Build.PL100644000765000024 320113603257356 15240 0ustar00goccystaff000000000000Compiler-Lexer-0.23# ========================================================================= # THIS FILE IS AUTOMATICALLY GENERATED BY MINILLA. # DO NOT EDIT DIRECTLY. # ========================================================================= use 5.008_001; use strict; use warnings; use utf8; BEGIN { push @INC, '.' } use builder::MyBuilder; use File::Basename; use File::Spec; my %args = ( license => 'perl_5', dynamic_config => 0, configure_requires => { 'Module::Build' => '0.4005', }, requires => { 'XSLoader' => '0.02', 'perl' => '5.008001', }, recommends => { }, suggests => { }, build_requires => { 'Devel::PPPort' => '3.19', 'ExtUtils::MakeMaker' => '6.59', 'ExtUtils::ParseXS' => '2.21', 'Test::More' => '0', }, test_requires => { 'Test::More' => '0.95', }, name => 'Compiler-Lexer', module_name => 'Compiler::Lexer', allow_pureperl => 0, script_files => [glob('script/*'), glob('bin/*')], PL_files => {}, test_files => ((-d '.git' || $ENV{RELEASE_TESTING}) && -d 'xt') ? 't/ xt/' : 't/', recursive_test_files => 1, ); if (-d 'share') { $args{share_dir} = 'share'; } my $builder = builder::MyBuilder->subclass( class => 'MyBuilder', code => q{ sub ACTION_distmeta { die "Do not run distmeta. Install Minilla and `minil install` instead.\n"; } sub ACTION_installdeps { die "Do not run installdeps. Run `cpanm --installdeps .` instead.\n"; } } )->new(%args); $builder->create_build_script(); Changes100644000765000024 406313603257356 15246 0ustar00goccystaff000000000000Compiler-Lexer-0.23Revision history for Perl extension Compiler-Lexer. 0.23 2020-01-02T03:09:25Z - support tokenize for Perl 5.24.1 0.22 2015-02-22T02:59:18Z - supported newly syntax from 5.20.0 - fixed parsing bugs (format/v-string/variable) 0.21 2015-01-26T05:59:24Z - added authority 0.20 2015-01-26T03:17:35Z - support HereDocumentBareTag (e.g. <' 0.14 2013-09-28T11:37:15Z - supported glob and array_size operator - supported simple recursive_tokenize - added destructor for Lexer 0.13 : Tue Jun 25 16:30:00 2013 - changed type of return value from reference of array reference to simple array reference - optimized by gperf - improved memory allocation for speedup - removed memory leaks during tokenize 0.12 : Mon May 27 16:40:00 2013 - added META.yml and optimized annotation methods 0.10 : Mon May 27 11:50:00 2013 - supported perl-5.8.1 and fixed some bugs 0.09 : Fri May 24 18:35:00 2013 - fixed example/bigdata.pl and removed example/lib/BigData.pm 0.08 : Fri May 24 18:21:00 2013 - removed unknown headers from tarball 0.07 : Fri May 24 17:00:00 2013 - optimize function call 0.06 : Thu May 23 17:44:00 2013 - support version string like 'v123.456.7890.00' 0.05 : Wed May 22 12:50:00 2013 - fix segv of Compiler::Lexer->new->tokenize->('package Foo;') 0.04 : Wed May 22 11:53:00 2013 - add simple accessor to Compiler::Lexer::Token 0.03 : Wed May 22 10:20:00 2013 - modify pod and fix package name of 'Compiler::Lexer::Token' 0.02 : Tue May 21 20:19:00 2013 - modify MANIFEST. 0.01 : Tue May 21 19:33:00 2013 - First Version. LICENSE100644000765000024 4400713603257356 15002 0ustar00goccystaff000000000000Compiler-Lexer-0.23This software is copyright (c) 2013 by Masaaki Goshima (goccy) . This is free software; you can redistribute it and/or modify it under the same terms as the Perl 5 programming language system itself. Terms of the Perl programming language system itself a) the GNU General Public License as published by the Free Software Foundation; either version 1, or (at your option) any later version, or b) the "Artistic License" --- The GNU General Public License, Version 1, February 1989 --- This software is Copyright (c) 2013 by Masaaki Goshima (goccy) . This is free software, licensed under: The GNU General Public License, Version 1, February 1989 GNU GENERAL PUBLIC LICENSE Version 1, February 1989 Copyright (C) 1989 Free Software Foundation, Inc. 51 Franklin St, Suite 500, Boston, MA 02110-1335 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The license agreements of most software companies try to keep users at the mercy of those companies. By contrast, our General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. The General Public License applies to the Free Software Foundation's software and to any other program whose authors commit to using it. You can use it for your programs, too. When we speak of free software, we are referring to freedom, not price. Specifically, the General Public License is designed to make sure that you have the freedom to give away or sell copies of free software, that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of a such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must tell them their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any work containing the Program or a portion of it, either verbatim or with modifications. Each licensee is addressed as "you". 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this General Public License and to the absence of any warranty; and give any other recipients of the Program a copy of this General Public License along with the Program. You may charge a fee for the physical act of transferring a copy. 2. You may modify your copy or copies of the Program or any portion of it, and copy and distribute such modifications under the terms of Paragraph 1 above, provided that you also do the following: a) cause the modified files to carry prominent notices stating that you changed the files and the date of any change; and b) cause the whole of any work that you distribute or publish, that in whole or in part contains the Program or any part thereof, either with or without modifications, to be licensed at no charge to all third parties under the terms of this General Public License (except that you may choose to grant warranty protection to some or all third parties, at your option). c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the simplest and most usual way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this General Public License. d) You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. Mere aggregation of another independent work with the Program (or its derivative) on a volume of a storage or distribution medium does not bring the other work under the scope of these terms. 3. You may copy and distribute the Program (or a portion or derivative of it, under Paragraph 2) in object code or executable form under the terms of Paragraphs 1 and 2 above provided that you also do one of the following: a) accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Paragraphs 1 and 2 above; or, b) accompany it with a written offer, valid for at least three years, to give any third party free (except for a nominal charge for the cost of distribution) a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Paragraphs 1 and 2 above; or, c) accompany it with the information you received as to where the corresponding source code may be obtained. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form alone.) Source code for a work means the preferred form of the work for making modifications to it. For an executable file, complete source code means all the source code for all modules it contains; but, as a special exception, it need not include source code for modules which are standard libraries that accompany the operating system on which the executable file runs, or for standard header files or definitions files that accompany that operating system. 4. You may not copy, modify, sublicense, distribute or transfer the Program except as expressly provided under this General Public License. Any attempt otherwise to copy, modify, sublicense, distribute or transfer the Program is void, and will automatically terminate your rights to use the Program under this License. However, parties who have received copies, or rights to use copies, from you under this General Public License will not have their licenses terminated so long as such parties remain in full compliance. 5. By copying, distributing or modifying the Program (or any work based on the Program) you indicate your acceptance of this license to do so, and all its terms and conditions. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. 7. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of the license which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the license, you may choose any version ever published by the Free Software Foundation. 8. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 9. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 10. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS Appendix: How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to humanity, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) 19yy This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 1, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) 19xx name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (a program to direct compilers to make passes at assemblers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice That's all there is to it! --- The Artistic License 1.0 --- This software is Copyright (c) 2013 by Masaaki Goshima (goccy) . This is free software, licensed under: The Artistic License 1.0 The Artistic License Preamble The intent of this document is to state the conditions under which a Package may be copied, such that the Copyright Holder maintains some semblance of artistic control over the development of the package, while giving the users of the package the right to use and distribute the Package in a more-or-less customary fashion, plus the right to make reasonable modifications. Definitions: - "Package" refers to the collection of files distributed by the Copyright Holder, and derivatives of that collection of files created through textual modification. - "Standard Version" refers to such a Package if it has not been modified, or has been modified in accordance with the wishes of the Copyright Holder. - "Copyright Holder" is whoever is named in the copyright or copyrights for the package. - "You" is you, if you're thinking about copying or distributing this Package. - "Reasonable copying fee" is whatever you can justify on the basis of media cost, duplication charges, time of people involved, and so on. (You will not be required to justify it to the Copyright Holder, but only to the computing community at large as a market that must bear the fee.) - "Freely Available" means that no fee is charged for the item itself, though there may be fees involved in handling the item. It also means that recipients of the item may redistribute it under the same conditions they received it. 1. You may make and give away verbatim copies of the source form of the Standard Version of this Package without restriction, provided that you duplicate all of the original copyright notices and associated disclaimers. 2. You may apply bug fixes, portability fixes and other modifications derived from the Public Domain or from the Copyright Holder. A Package modified in such a way shall still be considered the Standard Version. 3. You may otherwise modify your copy of this Package in any way, provided that you insert a prominent notice in each changed file stating how and when you changed that file, and provided that you do at least ONE of the following: a) place your modifications in the Public Domain or otherwise make them Freely Available, such as by posting said modifications to Usenet or an equivalent medium, or placing the modifications on a major archive site such as ftp.uu.net, or by allowing the Copyright Holder to include your modifications in the Standard Version of the Package. b) use the modified Package only within your corporation or organization. c) rename any non-standard executables so the names do not conflict with standard executables, which must also be provided, and provide a separate manual page for each non-standard executable that clearly documents how it differs from the Standard Version. d) make other distribution arrangements with the Copyright Holder. 4. You may distribute the programs of this Package in object code or executable form, provided that you do at least ONE of the following: a) distribute a Standard Version of the executables and library files, together with instructions (in the manual page or equivalent) on where to get the Standard Version. b) accompany the distribution with the machine-readable source of the Package with your modifications. c) accompany any non-standard executables with their corresponding Standard Version executables, giving the non-standard executables non-standard names, and clearly documenting the differences in manual pages (or equivalent), together with instructions on where to get the Standard Version. d) make other distribution arrangements with the Copyright Holder. 5. You may charge a reasonable copying fee for any distribution of this Package. You may charge any fee you choose for support of this Package. You may not charge a fee for this Package itself. However, you may distribute this Package in aggregate with other (possibly commercial) programs as part of a larger (possibly commercial) software distribution provided that you do not advertise this Package as a product of your own. 6. The scripts and library files supplied as input to or produced as output from the programs of this Package do not automatically fall under the copyright of this Package, but belong to whomever generated them, and may be sold commercially, and may be aggregated with this Package. 7. C or perl subroutines supplied by you and linked into this Package shall not be considered part of this Package. 8. The name of the Copyright Holder may not be used to endorse or promote products derived from this software without specific prior written permission. 9. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. The End META.json100644000765000024 550013603257356 15371 0ustar00goccystaff000000000000Compiler-Lexer-0.23{ "abstract" : "Lexical Analyzer for Perl5", "author" : [ "Masaaki Goshima (goccy) " ], "dynamic_config" : 0, "generated_by" : "Minilla/v3.1.8", "license" : [ "perl_5" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : "2" }, "name" : "Compiler-Lexer", "no_index" : { "directory" : [ "t", "examples", "builder", "experiments" ] }, "prereqs" : { "build" : { "requires" : { "Devel::PPPort" : "3.19", "ExtUtils::MakeMaker" : "6.59", "ExtUtils::ParseXS" : "2.21", "Test::More" : "0" } }, "configure" : { "requires" : { "Module::Build" : "0.4005", "Module::Build::XSUtil" : "0.02" } }, "develop" : { "requires" : { "Test::CPAN::Meta" : "0", "Test::MinimumVersion::Fast" : "0.04", "Test::PAUSE::Permissions" : "0.07", "Test::Pod" : "1.41", "Test::Spellunker" : "v0.2.7" } }, "runtime" : { "requires" : { "XSLoader" : "0.02", "perl" : "5.008001" } }, "test" : { "requires" : { "Test::More" : "0.95" } } }, "provides" : { "Compiler::Lexer" : { "file" : "lib/Compiler/Lexer.pm", "version" : "0.23" }, "Compiler::Lexer::Kind" : { "file" : "lib/Compiler/Lexer/Constants.pm" }, "Compiler::Lexer::SyntaxType" : { "file" : "lib/Compiler/Lexer/Constants.pm" }, "Compiler::Lexer::Token" : { "file" : "lib/Compiler/Lexer/Token.pm" }, "Compiler::Lexer::TokenType" : { "file" : "lib/Compiler/Lexer/Constants.pm" } }, "release_status" : "stable", "resources" : { "bugtracker" : { "web" : "https://github.com/goccy/p5-Compiler-Lexer/issues" }, "homepage" : "https://github.com/goccy/p5-Compiler-Lexer", "repository" : { "url" : "git://github.com/goccy/p5-Compiler-Lexer.git", "web" : "https://github.com/goccy/p5-Compiler-Lexer" } }, "version" : "0.23", "x_authority" : "cpan:GOCCY", "x_contributors" : [ "Fumihiro Itoh ", "K ", "Masaaki Goshima ", "Masaaki Goshima ", "Olivier Mengué ", "Reini Urban ", "Syohei YOSHIDA ", "brian d foy ", "moznion ", "tokuhirom " ], "x_serialization_backend" : "JSON::PP version 4.04", "x_static_install" : 0 } README.md100644000765000024 354413603257356 15235 0ustar00goccystaff000000000000Compiler-Lexer-0.23[![Build Status](https://travis-ci.org/goccy/p5-Compiler-Lexer.svg?branch=master)](https://travis-ci.org/goccy/p5-Compiler-Lexer) [![Coverage Status](https://img.shields.io/coveralls/goccy/p5-Compiler-Lexer/master.svg?style=flat)](https://coveralls.io/r/goccy/p5-Compiler-Lexer?branch=master) # NAME Compiler::Lexer - Lexical Analyzer for Perl5 # SYNOPSIS use Compiler::Lexer; use Data::Dumper; my $filename = $ARGV[0]; open my $fh, '<', $filename or die "Cannot open $filename: $!"; my $script = do { local $/; <$fh> }; my $lexer = Compiler::Lexer->new($filename); my $tokens = $lexer->tokenize($script); print Dumper $tokens; my $modules = $lexer->get_used_modules($script); print Dumper $modules; # METHODS - my $lexer = Compiler::Lexer->new($options); create new instance. You can create object from $options in hash reference. **options list** - filename - verbose : includes token of Pod, Comment and WhiteSpace - $lexer->tokenize($script); get token objects includes parameter of 'name' or 'type' or 'line' and so on. This method requires perl source code in string. - $lexer->set\_library\_path(\['path1', 'path2' ...\]) set libraries path for reading recursively. Default paths are @INC. - $lexer->recursive\_tokenize($script) get hash reference like { 'module\_nameA' => \[\], 'module\_nameB' => \[\] ... }. This method requires per source code in string. - $lexer->get\_used\_modules($script); get names of used module. This method requires perl source code in string. # AUTHOR Masaaki Goshima (goccy) <goccy(at)cpan.org> # CONTRIBUTORS tokuhirom: Tokuhiro Matsuno # LICENSE AND COPYRIGHT Copyright (c) 2013, Masaaki Goshima (goccy). All rights reserved. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. MyBuilder.pm100644000765000024 153013603257356 17627 0ustar00goccystaff000000000000Compiler-Lexer-0.23/builderpackage builder::MyBuilder; use strict; use warnings FATAL => 'all'; use 5.008005; use base 'Module::Build::XSUtil'; use constant DEBUG => 0; sub new { my ( $class, %args ) = @_; my @ignore_warnings_options = map { "-Wno-$_" } qw(missing-field-initializers); my $self = $class->SUPER::new( %args, generate_ppport_h => 'include/ppport.h', needs_compiler_cpp => 1, c_source => [qw/src/], xs_files => { 'src/Compiler-Lexer.xs' => 'lib/Compiler/Lexer.xs' }, cc_warnings => 0, # TODO extra_compiler_flags => ['-Iinclude', @ignore_warnings_options, '-g3'], add_to_cleanup => [ 'lib/Compiler/Lexer/*.o', 'lib/Compiler/Lexer/*.c', 'lib/Compiler/Lexer/*.xs', ], ); $self->{config}->set('optimize' => '-O0') if (DEBUG); return $self; } 1; cpanfile100644000765000024 56113603257356 15436 0ustar00goccystaff000000000000Compiler-Lexer-0.23requires 'XSLoader', '0.02'; requires 'perl', '5.008001'; on 'configure' => sub { requires 'Module::Build::XSUtil' => '>=0.02'; }; on 'build' => sub { requires 'Devel::PPPort', '3.19'; requires 'ExtUtils::MakeMaker', '6.59'; requires 'ExtUtils::ParseXS', '2.21'; requires 'Test::More'; }; on 'test' => sub { requires 'Test::More', '0.95'; }; cpanfile.snapshot100644000765000024 570413603257356 17320 0ustar00goccystaff000000000000Compiler-Lexer-0.23# carton snapshot format: version 1.0 DISTRIBUTIONS Devel-CheckCompiler-0.02 pathname: T/TO/TOKUHIROM/Devel-CheckCompiler-0.02.tar.gz provides: Devel::AssertC99 undef Devel::CheckCompiler 0.02 requirements: Exporter 0 ExtUtils::CBuilder 0 File::Temp 0 Module::Build 0.38 Test::More 0.98 Test::Requires 0 parent 0 perl 5.008001 Module-Build-0.4007 pathname: L/LE/LEONT/Module-Build-0.4007.tar.gz provides: Module::Build 0.4007 Module::Build::Base 0.4007 Module::Build::Compat 0.4007 Module::Build::Config 0.4007 Module::Build::Cookbook 0.4007 Module::Build::Dumper 0.4007 Module::Build::ModuleInfo 0.4007 Module::Build::Notes 0.4007 Module::Build::PPMMaker 0.4007 Module::Build::Platform::Default 0.4007 Module::Build::Platform::MacOS 0.4007 Module::Build::Platform::Unix 0.4007 Module::Build::Platform::VMS 0.4007 Module::Build::Platform::VOS 0.4007 Module::Build::Platform::Windows 0.4007 Module::Build::Platform::aix 0.4007 Module::Build::Platform::cygwin 0.4007 Module::Build::Platform::darwin 0.4007 Module::Build::Platform::os2 0.4007 Module::Build::PodParser 0.4007 Module::Build::Version 0.87 Module::Build::YAML 1.41 inc::latest 0.4007 inc::latest::private 0.4007 requirements: CPAN::Meta 2.110420 CPAN::Meta::YAML 0.003 Cwd 0 Data::Dumper 0 ExtUtils::CBuilder 0.27 ExtUtils::Install 0 ExtUtils::Manifest 0 ExtUtils::Mkbootstrap 0 ExtUtils::ParseXS 2.21 File::Basename 0 File::Compare 0 File::Copy 0 File::Find 0 File::Path 0 File::Spec 0.82 File::Temp 0.15 Getopt::Long 0 Module::Metadata 1.000002 Parse::CPAN::Meta 1.4401 Perl::OSType 1 Pod::Man 2.17 Test::Harness 3.16 Test::More 0.49 Text::Abbrev 0 Text::ParseWords 0 perl 5.006001 version 0.87 Module-Build-XSUtil-0.03 pathname: H/HI/HIDEAKIO/Module-Build-XSUtil-0.03.tar.gz provides: Module::Build::XSUtil 0.03 requirements: CPAN::Meta 0 CPAN::Meta::Prereqs 0 Devel::CheckCompiler 0.02 Devel::PPPort 3.19 Exporter 0 ExtUtils::CBuilder 0 File::Basename 0 File::Path 0 Module::Build 0.4005 XSLoader 0.02 parent 0 perl 5.008005 Test-Requires-0.07 pathname: T/TO/TOKUHIROM/Test-Requires-0.07.tar.gz provides: Test::Requires 0.07 requirements: CPAN::Meta 0 CPAN::Meta::Prereqs 0 ExtUtils::MakeMaker 6.59 Module::Build 0.38 Test::Builder::Module 0 Test::More 0.61 perl 5.008_001 YAML-LibYAML-0.41 pathname: I/IN/INGY/YAML-LibYAML-0.41.tar.gz provides: YAML::LibYAML 0.18 YAML::XS 0.41 YAML::XS::LibYAML undef requirements: ExtUtils::MakeMaker 6.59 perl 5.008001 benchmark.pl100644000765000024 114313603257356 17671 0ustar00goccystaff000000000000Compiler-Lexer-0.23/exampleuse strict; use warnings; use Benchmark qw/timethese cmpthese/; use PPI::Tokenizer; use Compiler::Lexer; use Data::Dumper; use constant { LOOP_COUNT => 1000 }; sub ppi { my $filename = $ARGV[0]; my $tokenizer = PPI::Tokenizer->new($filename); $tokenizer->all_tokens; } sub compiler_lexer { my $filename = $ARGV[0]; open my $fh, '<', $filename; my $script = do { local $/; <$fh> }; my $lexer = Compiler::Lexer->new($filename); $lexer->tokenize($script); } my $result = timethese(LOOP_COUNT, { PPI => \&ppi, COMPILER_LEXER => \&compiler_lexer }); cmpthese $result; sample.pl100644000765000024 63013603257356 17200 0ustar00goccystaff000000000000Compiler-Lexer-0.23/exampleuse strict; use warnings; use Compiler::Lexer; use Data::Dumper; my $filename = $ARGV[0]; my $lexer = Compiler::Lexer->new($filename); open my $fh, '<', $filename; my $script = do { local $/; <$fh> }; my $tokens = $lexer->tokenize($script); print Dumper $tokens; print Dumper $lexer->get_groups_by_syntax_level($$tokens, Compiler::Lexer::SyntaxType::T_Stmt); print Dumper $lexer->get_used_modules($script); double_charactor_operator.gperf100644000765000024 54113603257356 22741 0ustar00goccystaff000000000000Compiler-Lexer-0.23/gen%{ typedef struct _KeywordTable { const char *name; int value; } KeywordTable; %} KeywordTable; %% <= >= .= x= != == += -= *= "%=" |= &= ^= << >> ++ -- ** // && || :: .. => -> &{ @{ "%{" ${ @$ "%$" "%-" "%+" @- @+ &$ $# <> !~ ~~ =~ $0 $1 $2 $3 $4 $5 $6 $7 $8 $9 $& $` $' $+ $. $/ $| "$," $\ $" $% $= $- $~ $^ $* $: $; $? $! $@ $< $> $( $) $[ $] "%%" gen_constants.yaml100644000765000024 1144613603257356 20300 0ustar00goccystaff000000000000Compiler-Lexer-0.23/gen--- syntax_type: T_BlockStmt: 4 T_Expr: 2 T_Stmt: 3 T_Term: 1 T_Value: 0 token_kind: T_AUTOLOAD: 10 T_Annotation: 28 T_Assign: 2 T_CORE: 11 T_Class: 27 T_Colon: 20 T_Comma: 19 T_Control: 14 T_DESTROY: 12 T_DataWord: 8 T_Decl: 3 T_DefaultStmt: 18 T_Do: 15 T_Function: 4 T_Get: 33 T_Handle: 13 T_Import: 6 T_ModWord: 9 T_Modifier: 23 T_Module: 16 T_Namespace: 25 T_Operator: 1 T_Package: 26 T_Ref: 32 T_RegOpt: 29 T_RegPrefix: 30 T_RegReplacePrefix: 31 T_Return: 0 T_Set: 34 T_SingleTerm: 5 T_SpecificKeyword: 7 T_Stmt: 17 T_StmtEnd: 21 T_Symbol: 22 T_Term: 24 T_Undefined: 36 T_Verbose: 35 token_type: T_AUTOLOAD: 76 T_Add: 1 T_AddEqual: 20 T_AlphabetAnd: 16 T_AlphabetNot: 69 T_AlphabetOr: 14 T_AlphabetXOr: 18 T_And: 52 T_AndBitEqual: 54 T_AndEqual: 58 T_Annotation: 135 T_Argument: 205 T_ArgumentArray: 136 T_Array: 188 T_ArrayAt: 199 T_ArrayDereference: 113 T_ArrayRef: 197 T_ArraySet: 201 T_ArraySize: 66 T_ArraySizeDereference: 121 T_ArrayVar: 168 T_Arrow: 124 T_Assign: 65 T_BareWord: 123 T_BitAnd: 15 T_BitNot: 12 T_BitOr: 13 T_BitXOr: 17 T_Break: 88 T_BuiltinFunc: 70 T_CORE: 77 T_Call: 204 T_CallDecl: 130 T_Class: 129 T_CodeDereference: 116 T_CodeRef: 131 T_CodeVar: 167 T_Colon: 105 T_Comma: 104 T_Comment: 209 T_Compare: 33 T_ConstValue: 138 T_Continue: 86 T_DESTROY: 78 T_DataWord: 74 T_Dec: 46 T_Default: 207 T_DefaultEqual: 49 T_DefaultOperator: 60 T_DefaultStmt: 103 T_Diamond: 32 T_Div: 4 T_DivEqual: 23 T_Do: 87 T_Double: 171 T_ElseStmt: 97 T_ElsifStmt: 98 T_Environment: 141 T_EqualEqual: 31 T_ExecString: 174 T_Exp: 47 T_FieldDecl: 158 T_ForStmt: 133 T_ForeachStmt: 134 T_Format: 184 T_FormatDecl: 183 T_FormatEnd: 185 T_Function: 203 T_FunctionDecl: 63 T_GivenStmt: 102 T_Glob: 11 T_GlobalArrayVar: 195 T_GlobalHashVar: 196 T_GlobalVar: 194 T_GlobalVarDecl: 162 T_Goto: 85 T_Greater: 7 T_GreaterEqual: 29 T_Handle: 89 T_HandleDelim: 152 T_Hash: 189 T_HashAt: 200 T_HashDereference: 114 T_HashRef: 198 T_HashSet: 202 T_HashVar: 169 T_HereDocument: 181 T_HereDocumentBareTag: 179 T_HereDocumentEnd: 182 T_HereDocumentExecTag: 178 T_HereDocumentRawTag: 177 T_HereDocumentTag: 176 T_IfStmt: 96 T_Import: 72 T_Inc: 45 T_Include: 142 T_Int: 170 T_Is: 67 T_Key: 122 T_LabelRef: 160 T_Last: 84 T_LeftBrace: 109 T_LeftBracket: 111 T_LeftParenthesis: 107 T_LeftShift: 50 T_LeftShiftEqual: 26 T_Less: 8 T_LessEqual: 30 T_LibraryDirectories: 140 T_List: 206 T_LocalArrayVar: 192 T_LocalDecl: 90 T_LocalHashVar: 193 T_LocalVar: 191 T_LocalVarDecl: 161 T_Method: 64 T_Mod: 5 T_ModEqual: 24 T_ModWord: 75 T_Mul: 3 T_MulEqual: 22 T_MultiGlobalVarDecl: 164 T_MultiLocalVarDecl: 163 T_Namespace: 127 T_NamespaceResolver: 126 T_Next: 83 T_Not: 68 T_NotBitEqual: 56 T_NotEqual: 37 T_Object: 186 T_Operator: 190 T_Or: 53 T_OrBitEqual: 55 T_OrEqual: 57 T_OurDecl: 91 T_Package: 128 T_Pod: 208 T_Pointer: 125 T_PolymorphicCompare: 34 T_PostDeref: 212 T_PostDerefArraySliceCloseBracket: 215 T_PostDerefArraySliceOpenBracket: 214 T_PostDerefCodeCloseParen: 219 T_PostDerefCodeOpenParen: 218 T_PostDerefHashSliceCloseBrace: 217 T_PostDerefHashSliceOpenBrace: 216 T_PostDerefStar: 213 T_PowerEqual: 48 T_ProgramArgument: 139 T_Prototype: 165 T_RawHereDocument: 180 T_RawString: 173 T_Redo: 82 T_Ref: 10 T_RegAllReplace: 154 T_RegDecl: 149 T_RegDelim: 151 T_RegDoubleQuote: 146 T_RegExec: 148 T_RegExp: 187 T_RegList: 147 T_RegMatch: 150 T_RegMiddleDelim: 153 T_RegNot: 36 T_RegOK: 35 T_RegOpt: 144 T_RegQuote: 145 T_RegReplace: 155 T_RegReplaceFrom: 156 T_RegReplaceTo: 157 T_RequireDecl: 71 T_RequiredName: 95 T_Return: 0 T_RightBrace: 110 T_RightBracket: 112 T_RightParenthesis: 108 T_RightShift: 51 T_RightShiftEqual: 27 T_STDERR: 81 T_STDIN: 79 T_STDOUT: 80 T_ScalarDereference: 115 T_SemiColon: 106 T_ShortArrayDereference: 118 T_ShortCodeDereference: 120 T_ShortHashDereference: 119 T_ShortScalarDereference: 117 T_Signal: 143 T_Slice: 59 T_SpecificKeyword: 73 T_SpecificValue: 137 T_StateDecl: 92 T_String: 172 T_StringAdd: 9 T_StringAddEqual: 25 T_StringCompare: 44 T_StringEqual: 42 T_StringGreater: 40 T_StringGreaterEqual: 41 T_StringLess: 38 T_StringLessEqual: 39 T_StringMul: 19 T_StringMulEqual: 28 T_StringNotEqual: 43 T_Sub: 2 T_SubEqual: 21 T_ThreeTermOperator: 6 T_ToDo: 61 T_TypeRef: 159 T_Undefined: 211 T_UnlessStmt: 99 T_UntilStmt: 100 T_UseDecl: 93 T_UsedName: 94 T_Var: 166 T_VarDecl: 62 T_VersionString: 175 T_WhenStmt: 101 T_WhileStmt: 132 T_WhiteSpace: 210 gen_decl.pl100644000765000024 5737413603257356 16656 0ustar00goccystaff000000000000Compiler-Lexer-0.23/gen#!/usr/bin/perl use strict; use warnings; use YAML::XS qw/Dump/; my (@info, @token_enum, @kind_enum, @syntax_enum, @type_to_info); foreach () { my ($kind, $type, $data) = split /\s+/; my $info = { type => "$type", kind => "$kind", data => "$data" }; push @info, $info; unless (grep { $_ eq $type } @token_enum) { push @token_enum, $type; $type_to_info[scalar @token_enum - 1] = $info; } unless (grep { $_ eq $kind } @kind_enum) { push @kind_enum, $kind if ($kind); } } @syntax_enum = qw/Value Term Expr Stmt BlockStmt/; my $token_type = join ",\n", map { "\t$_" } @token_enum; my $token_kind = join ",\n", map { "\t$_" } @kind_enum; my $token_info = join ",\n", map { sprintf(qq|\t{Enum::Token::Type::%s, Enum::Token::Kind::%s, "%s", "%s"}|, $_->{type}, $_->{kind}, $_->{type}, $_->{data}); } @info; my $type_to_info = join ",\n", map { sprintf(qq|\t{Enum::Token::Type::%s, Enum::Token::Kind::%s, "%s", "%s"}|, $_->{type}, $_->{kind}, $_->{type}, $_->{data}); } @type_to_info; my %keyword_map; $keyword_map{$_->{data}}++ foreach @info; my $keywords = join "\n", map { sprintf(qq|"%s", {Enum::Token::Type::%s, Enum::Token::Kind::%s, "%s", "%s"}|, $_->{data}, $_->{type}, $_->{kind}, $_->{type}, $_->{data}); } grep { $keyword_map{$_->{data}} == 1 } grep { $_->{data} } @info; my $count = 0; my $token_type_enums = join ",\n", map { ' ' x 4 . "T_$_ => " . $count++; } @token_enum; $count = 0; my $syntax_type_enums = join ",\n", map { ' ' x 4 . "T_$_ => " . $count++; } @syntax_enum; $count = 0; my $token_kind_enums = join ",\n", map { ' ' x 4 . "T_$_ => " . $count++; } @kind_enum; my %token_type_constants_map; my %token_kind_constants_map; my %syntax_type_constants_map; $count = 0; $token_type_constants_map{"T_$_"} = $count++ foreach @token_enum; $count = 0; $token_kind_constants_map{"T_$_"} = $count++ foreach @kind_enum; $count = 0; $syntax_type_constants_map{"T_$_"} = $count++ foreach @syntax_enum; my $constants = +{ token_type => \%token_type_constants_map, syntax_type => \%syntax_type_constants_map, token_kind => \%token_kind_constants_map }; open(my $fh, '>', 'include/gen_token.hpp'); print $fh <<"CODE"; namespace Enum { namespace Token { namespace Type { typedef enum { $token_type } Type; } namespace Kind { typedef enum { $token_kind } Kind; } } } CODE open($fh, '>', 'src/compiler/util/Compiler_gen_token_decl.cpp'); print $fh <<"CODE"; #include TokenInfo decl_tokens[] = { $token_info }; TokenInfo type_to_info[] = { $type_to_info }; CODE open($fh, '>', 'lib/Compiler/Lexer/Constants.pm'); print $fh <', 'gen/gen_constants.yaml'); print $fh Dump $constants; open($fh, '>', 'gen/reserved_keywords.gperf'); print $fh < typedef struct _ReservedKeyword { const char *name; TokenInfo info; } ReservedKeyword; %} ReservedKeyword; %% $keywords %% CODE close($fh); __DATA__ Return Return return Operator Add + Operator Sub - Operator Mul * Operator Div / Operator Mod % Operator ThreeTermOperator ? Operator Greater > Operator Less < Operator StringAdd . Operator Ref \\ Operator Glob * Operator BitNot ~ Operator BitOr | Operator AlphabetOr or Operator BitAnd & Operator AlphabetAnd and Operator BitXOr ^ Operator AlphabetXOr xor Operator StringMul x Assign AddEqual += Assign SubEqual -= Assign MulEqual *= Assign DivEqual /= Assign ModEqual %= Assign StringAddEqual .= Assign LeftShiftEqual <<= Assign RightShiftEqual >>= Assign StringMulEqual x= Operator GreaterEqual >= Operator LessEqual <= Operator EqualEqual == Operator Diamond <> Operator Compare <=> Operator PolymorphicCompare ~~ Operator RegOK =~ Operator RegNot !~ Operator NotEqual != Operator StringLess lt Operator StringLessEqual le Operator StringGreater gt Operator StringGreaterEqual ge Operator StringEqual eq Operator StringNotEqual ne Operator StringCompare cmp Operator Inc ++ Operator Dec -- Operator Exp ** Assign PowerEqual **= Assign DefaultEqual //= Operator LeftShift << Operator RightShift >> Operator And && Operator Or || Assign AndBitEqual &= Assign OrBitEqual |= Assign NotBitEqual ^= Assign OrEqual ||= Assign AndEqual &&= Operator Slice .. Operator DefaultOperator // Operator ToDo ... Decl VarDecl my Decl FunctionDecl sub Function Method Assign Assign = SingleTerm ArraySize $# SingleTerm Is SingleTerm Not ! SingleTerm AlphabetNot not Function BuiltinFunc chomp Function BuiltinFunc chop Function BuiltinFunc chr Function BuiltinFunc crypt Function BuiltinFunc index Function BuiltinFunc lc Function BuiltinFunc lcfirst Function BuiltinFunc length Function BuiltinFunc ord Function BuiltinFunc pack Function BuiltinFunc unpack Function BuiltinFunc sort Function BuiltinFunc reverse Function BuiltinFunc rindex Function BuiltinFunc sprintf Function BuiltinFunc substr Function BuiltinFunc uc Function BuiltinFunc ucfirst Function BuiltinFunc pos Function BuiltinFunc quotemeta Function BuiltinFunc split Function BuiltinFunc study Function BuiltinFunc pop Function BuiltinFunc push Function BuiltinFunc splice Function BuiltinFunc shift Function BuiltinFunc unshift Function BuiltinFunc grep Function BuiltinFunc join Function BuiltinFunc map Function BuiltinFunc delete Function BuiltinFunc each Function BuiltinFunc exists Function BuiltinFunc keys Function BuiltinFunc values Function BuiltinFunc binmode Function BuiltinFunc close Function BuiltinFunc closedir Function BuiltinFunc dbmclose Function BuiltinFunc dbmopen Function BuiltinFunc die Function BuiltinFunc eof Function BuiltinFunc fileno Function BuiltinFunc flock Function BuiltinFunc format Function BuiltinFunc getc Function BuiltinFunc print Function BuiltinFunc say Function BuiltinFunc printf Function BuiltinFunc read Function BuiltinFunc readdir Function BuiltinFunc rewinddir Function BuiltinFunc seek Function BuiltinFunc seekdir Function BuiltinFunc select Function BuiltinFunc syscall Function BuiltinFunc sysread Function BuiltinFunc sysseek Function BuiltinFunc syswrite Function BuiltinFunc tell Function BuiltinFunc telldir Function BuiltinFunc truncate Function BuiltinFunc warn Function BuiltinFunc write Function BuiltinFunc vec Function BuiltinFunc chdir Function BuiltinFunc chmod Function BuiltinFunc chown Function BuiltinFunc chroot Function BuiltinFunc fcntl Function BuiltinFunc glob Function BuiltinFunc ioctl Function BuiltinFunc link Function BuiltinFunc lstat Function BuiltinFunc mkdir Function BuiltinFunc open Function BuiltinFunc opendir Function BuiltinFunc readlink Function BuiltinFunc rename Function BuiltinFunc rmdir Function BuiltinFunc stat Function BuiltinFunc symlink Function BuiltinFunc umask Function BuiltinFunc unlink Function BuiltinFunc utime Function BuiltinFunc caller Function BuiltinFunc dump Function BuiltinFunc eval Function BuiltinFunc exit Function BuiltinFunc wantarray Function BuiltinFunc formline Function BuiltinFunc reset Function BuiltinFunc scalar Function BuiltinFunc alarm Function BuiltinFunc exec Function BuiltinFunc fork Function BuiltinFunc getpgrp Function BuiltinFunc getppid Function BuiltinFunc getpriority Function BuiltinFunc kill Function BuiltinFunc pipe Function BuiltinFunc setpgrp Function BuiltinFunc setpriority Function BuiltinFunc sleep Function BuiltinFunc system Function BuiltinFunc times Function BuiltinFunc wait Function BuiltinFunc waitpid Function BuiltinFunc no Function BuiltinFunc tie Function BuiltinFunc tied Function BuiltinFunc untie Function BuiltinFunc accept Function BuiltinFunc bind Function BuiltinFunc connect Function BuiltinFunc getpeername Function BuiltinFunc getsockname Function BuiltinFunc getsockopt Function BuiltinFunc listen Function BuiltinFunc recv Function BuiltinFunc send Function BuiltinFunc setsockopt Function BuiltinFunc shutdown Function BuiltinFunc socket Function BuiltinFunc socketpair Function BuiltinFunc msgctl Function BuiltinFunc msgget Function BuiltinFunc msgrcv Function BuiltinFunc msgsnd Function BuiltinFunc semctl Function BuiltinFunc semget Function BuiltinFunc semop Function BuiltinFunc shmctl Function BuiltinFunc shmget Function BuiltinFunc shmread Function BuiltinFunc shmwrite Function BuiltinFunc endgrent Function BuiltinFunc endhostent Function BuiltinFunc endnetent Function BuiltinFunc endpwent Function BuiltinFunc getgrent Function BuiltinFunc getgrgid Function BuiltinFunc getgrnam Function BuiltinFunc getlogin Function BuiltinFunc getpwent Function BuiltinFunc getpwnam Function BuiltinFunc getpwuid Function BuiltinFunc setgrent Function BuiltinFunc setpwent Function BuiltinFunc endprotoent Function BuiltinFunc endservent Function BuiltinFunc gethostbyaddr Function BuiltinFunc gethostbyname Function BuiltinFunc gethostent Function BuiltinFunc getnetbyaddr Function BuiltinFunc getnetbyname Function BuiltinFunc getnetent Function BuiltinFunc getprotobyname Function BuiltinFunc getprotobynumber Function BuiltinFunc getprotoent Function BuiltinFunc getservbyname Function BuiltinFunc getservbyport Function BuiltinFunc getservent Function BuiltinFunc sethostent Function BuiltinFunc setnetent Function BuiltinFunc setprotoent Function BuiltinFunc setservent Function BuiltinFunc gmtime Function BuiltinFunc localtime Function BuiltinFunc time Function BuiltinFunc ref Function BuiltinFunc bless Function BuiltinFunc defined Function BuiltinFunc abs Function BuiltinFunc atan2 Function BuiltinFunc cos Function BuiltinFunc exp Function BuiltinFunc hex Function BuiltinFunc int Function BuiltinFunc log Function BuiltinFunc oct Function BuiltinFunc rand Function BuiltinFunc sin Function BuiltinFunc sqrt Function BuiltinFunc srand Decl RequireDecl require Import Import import SpecificKeyword SpecificKeyword __PACKAGE__ SpecificKeyword SpecificKeyword __FILE__ SpecificKeyword SpecificKeyword __LINE__ SpecificKeyword SpecificKeyword __SUB__ DataWord DataWord __DATA__ DataWord DataWord __END__ ModWord ModWord BEGIN ModWord ModWord CHECK ModWord ModWord INIT ModWord ModWord END ModWord ModWord UNITCHECK AUTOLOAD AUTOLOAD AUTOLOAD CORE CORE CORE DESTROY DESTROY DESTROY Handle STDIN STDIN Handle STDOUT STDOUT Handle STDERR STDERR Control Redo redo Control Next next Control Last last Control Goto goto Control Continue continue Do Do do Control Break break Handle Handle -b Handle Handle -c Handle Handle -d Handle Handle -e Handle Handle -f Handle Handle -g Handle Handle -k Handle Handle -l Handle Handle -o Handle Handle -p Handle Handle -r Handle Handle -s Handle Handle -t Handle Handle -u Handle Handle -w Handle Handle -x Handle Handle -z Handle Handle -A Handle Handle -B Handle Handle -C Handle Handle -M Handle Handle -O Handle Handle -R Handle Handle -S Handle Handle -T Handle Handle -W Handle Handle -X Decl LocalDecl local Decl OurDecl our Decl StateDecl state Decl UseDecl use Module UsedName Module RequiredName Stmt IfStmt if Stmt ElseStmt else Stmt ElsifStmt elsif Stmt UnlessStmt unless Stmt UntilStmt until Stmt WhenStmt when Stmt GivenStmt given DefaultStmt DefaultStmt default Comma Comma , Colon Colon : StmtEnd SemiColon ; Symbol LeftParenthesis ( Symbol RightParenthesis ) Symbol LeftBrace { Symbol RightBrace } Symbol LeftBracket [ Symbol RightBracket ] Modifier ArrayDereference @{ Modifier HashDereference %{ Modifier ScalarDereference ${ Modifier CodeDereference &{ Modifier ShortScalarDereference Modifier ShortArrayDereference @$ Modifier ShortHashDereference %$ Modifier ShortCodeDereference &$ Modifier ArraySizeDereference $#{ Term Key Term BareWord Operator Arrow => Operator Pointer -> Operator NamespaceResolver :: Namespace Namespace Package Package package Class Class Decl CallDecl & SingleTerm CodeRef \\& Stmt WhileStmt while Stmt ForStmt for Stmt ForeachStmt foreach Annotation Annotation #@ Term ArgumentArray @_ Term SpecificValue $_ Term SpecificValue $0 Term SpecificValue $1 Term SpecificValue $2 Term SpecificValue $3 Term SpecificValue $4 Term SpecificValue $5 Term SpecificValue $6 Term SpecificValue $7 Term SpecificValue $8 Term SpecificValue $9 Term SpecificValue $& Term SpecificValue $` Term SpecificValue $' Term SpecificValue $+ Term SpecificValue $. Term SpecificValue $/ Term SpecificValue $| Term SpecificValue $* Term SpecificValue $, Term SpecificValue $\\ Term SpecificValue $\" Term SpecificValue $% Term SpecificValue $= Term SpecificValue $- Term SpecificValue $~ Term SpecificValue $^ Term SpecificValue $: Term SpecificValue $? Term SpecificValue $! Term SpecificValue $@ Term SpecificValue $$ Term SpecificValue $< Term SpecificValue $> Term SpecificValue $( Term SpecificValue $) Term SpecificValue $[ Term SpecificValue $] Term SpecificValue $; Term SpecificValue $^A Term SpecificValue $^D Term SpecificValue $^E Term SpecificValue $^F Term SpecificValue $^G Term SpecificValue $^H Term SpecificValue $^I Term SpecificValue $^L Term SpecificValue $^M Term SpecificValue $^O Term SpecificValue $^P Term SpecificValue $^R Term SpecificValue $^T Term SpecificValue $^W Term SpecificValue $^X Term ConstValue Term ProgramArgument @ARGV Term LibraryDirectories @INC Term Environment %ENV Term Include %INC Term Signal %SIG RegOpt RegOpt RegPrefix RegQuote q RegPrefix RegDoubleQuote qq RegPrefix RegList qw RegPrefix RegExec qx RegPrefix RegDecl qr RegPrefix RegMatch m Term RegDelim Term HandleDelim Term RegMiddleDelim RegReplacePrefix RegAllReplace tr RegReplacePrefix RegAllReplace y RegReplacePrefix RegReplace s Term RegReplaceFrom Term RegReplaceTo Decl FieldDecl Ref TypeRef Ref LabelRef Decl LocalVarDecl Decl GlobalVarDecl Decl MultiLocalVarDecl Decl MultiGlobalVarDecl Term Prototype Term Var Term CodeVar Term ArrayVar Term HashVar Term Int Term Double Term String Term RawString Term ExecString Term VersionString Term HereDocumentTag Term HereDocumentRawTag Term HereDocumentExecTag Term HereDocumentBareTag Term RawHereDocument Term HereDocument Term HereDocumentEnd Decl FormatDecl Term Format Term FormatEnd Term Object Term RegExp Term Array Term Hash Operator Operator Term LocalVar Term LocalArrayVar Term LocalHashVar Term GlobalVar Term GlobalArrayVar Term GlobalHashVar Ref ArrayRef Ref HashRef Get ArrayAt Get HashAt Set ArraySet Set HashSet Decl Function Function Call Term Argument Term List Term Default undef Verbose Pod Verbose Comment Verbose WhiteSpace Symbol PostDeref Symbol PostDerefStar Symbol PostDerefArraySliceOpenBracket Symbol PostDerefArraySliceCloseBracket Symbol PostDerefHashSliceOpenBrace Symbol PostDerefHashSliceCloseBrace Symbol PostDerefCodeOpenParen Symbol PostDerefCodeCloseParen Undefined Undefined reserved_keywords.gperf100644000765000024 11150213603257356 21354 0ustar00goccystaff000000000000Compiler-Lexer-0.23/gen%{ #include typedef struct _ReservedKeyword { const char *name; TokenInfo info; } ReservedKeyword; %} ReservedKeyword; %% "return", {Enum::Token::Type::Return, Enum::Token::Kind::Return, "Return", "return"} "+", {Enum::Token::Type::Add, Enum::Token::Kind::Operator, "Add", "+"} "-", {Enum::Token::Type::Sub, Enum::Token::Kind::Operator, "Sub", "-"} "/", {Enum::Token::Type::Div, Enum::Token::Kind::Operator, "Div", "/"} "%", {Enum::Token::Type::Mod, Enum::Token::Kind::Operator, "Mod", "%"} "?", {Enum::Token::Type::ThreeTermOperator, Enum::Token::Kind::Operator, "ThreeTermOperator", "?"} ">", {Enum::Token::Type::Greater, Enum::Token::Kind::Operator, "Greater", ">"} "<", {Enum::Token::Type::Less, Enum::Token::Kind::Operator, "Less", "<"} ".", {Enum::Token::Type::StringAdd, Enum::Token::Kind::Operator, "StringAdd", "."} "\\", {Enum::Token::Type::Ref, Enum::Token::Kind::Operator, "Ref", "\\"} "~", {Enum::Token::Type::BitNot, Enum::Token::Kind::Operator, "BitNot", "~"} "|", {Enum::Token::Type::BitOr, Enum::Token::Kind::Operator, "BitOr", "|"} "or", {Enum::Token::Type::AlphabetOr, Enum::Token::Kind::Operator, "AlphabetOr", "or"} "and", {Enum::Token::Type::AlphabetAnd, Enum::Token::Kind::Operator, "AlphabetAnd", "and"} "^", {Enum::Token::Type::BitXOr, Enum::Token::Kind::Operator, "BitXOr", "^"} "xor", {Enum::Token::Type::AlphabetXOr, Enum::Token::Kind::Operator, "AlphabetXOr", "xor"} "x", {Enum::Token::Type::StringMul, Enum::Token::Kind::Operator, "StringMul", "x"} "+=", {Enum::Token::Type::AddEqual, Enum::Token::Kind::Assign, "AddEqual", "+="} "-=", {Enum::Token::Type::SubEqual, Enum::Token::Kind::Assign, "SubEqual", "-="} "*=", {Enum::Token::Type::MulEqual, Enum::Token::Kind::Assign, "MulEqual", "*="} "/=", {Enum::Token::Type::DivEqual, Enum::Token::Kind::Assign, "DivEqual", "/="} "%=", {Enum::Token::Type::ModEqual, Enum::Token::Kind::Assign, "ModEqual", "%="} ".=", {Enum::Token::Type::StringAddEqual, Enum::Token::Kind::Assign, "StringAddEqual", ".="} "<<=", {Enum::Token::Type::LeftShiftEqual, Enum::Token::Kind::Assign, "LeftShiftEqual", "<<="} ">>=", {Enum::Token::Type::RightShiftEqual, Enum::Token::Kind::Assign, "RightShiftEqual", ">>="} "x=", {Enum::Token::Type::StringMulEqual, Enum::Token::Kind::Assign, "StringMulEqual", "x="} ">=", {Enum::Token::Type::GreaterEqual, Enum::Token::Kind::Operator, "GreaterEqual", ">="} "<=", {Enum::Token::Type::LessEqual, Enum::Token::Kind::Operator, "LessEqual", "<="} "==", {Enum::Token::Type::EqualEqual, Enum::Token::Kind::Operator, "EqualEqual", "=="} "<>", {Enum::Token::Type::Diamond, Enum::Token::Kind::Operator, "Diamond", "<>"} "<=>", {Enum::Token::Type::Compare, Enum::Token::Kind::Operator, "Compare", "<=>"} "~~", {Enum::Token::Type::PolymorphicCompare, Enum::Token::Kind::Operator, "PolymorphicCompare", "~~"} "=~", {Enum::Token::Type::RegOK, Enum::Token::Kind::Operator, "RegOK", "=~"} "!~", {Enum::Token::Type::RegNot, Enum::Token::Kind::Operator, "RegNot", "!~"} "!=", {Enum::Token::Type::NotEqual, Enum::Token::Kind::Operator, "NotEqual", "!="} "lt", {Enum::Token::Type::StringLess, Enum::Token::Kind::Operator, "StringLess", "lt"} "le", {Enum::Token::Type::StringLessEqual, Enum::Token::Kind::Operator, "StringLessEqual", "le"} "gt", {Enum::Token::Type::StringGreater, Enum::Token::Kind::Operator, "StringGreater", "gt"} "ge", {Enum::Token::Type::StringGreaterEqual, Enum::Token::Kind::Operator, "StringGreaterEqual", "ge"} "eq", {Enum::Token::Type::StringEqual, Enum::Token::Kind::Operator, "StringEqual", "eq"} "ne", {Enum::Token::Type::StringNotEqual, Enum::Token::Kind::Operator, "StringNotEqual", "ne"} "cmp", {Enum::Token::Type::StringCompare, Enum::Token::Kind::Operator, "StringCompare", "cmp"} "++", {Enum::Token::Type::Inc, Enum::Token::Kind::Operator, "Inc", "++"} "--", {Enum::Token::Type::Dec, Enum::Token::Kind::Operator, "Dec", "--"} "**", {Enum::Token::Type::Exp, Enum::Token::Kind::Operator, "Exp", "**"} "**=", {Enum::Token::Type::PowerEqual, Enum::Token::Kind::Assign, "PowerEqual", "**="} "//=", {Enum::Token::Type::DefaultEqual, Enum::Token::Kind::Assign, "DefaultEqual", "//="} "<<", {Enum::Token::Type::LeftShift, Enum::Token::Kind::Operator, "LeftShift", "<<"} ">>", {Enum::Token::Type::RightShift, Enum::Token::Kind::Operator, "RightShift", ">>"} "&&", {Enum::Token::Type::And, Enum::Token::Kind::Operator, "And", "&&"} "||", {Enum::Token::Type::Or, Enum::Token::Kind::Operator, "Or", "||"} "&=", {Enum::Token::Type::AndBitEqual, Enum::Token::Kind::Assign, "AndBitEqual", "&="} "|=", {Enum::Token::Type::OrBitEqual, Enum::Token::Kind::Assign, "OrBitEqual", "|="} "^=", {Enum::Token::Type::NotBitEqual, Enum::Token::Kind::Assign, "NotBitEqual", "^="} "||=", {Enum::Token::Type::OrEqual, Enum::Token::Kind::Assign, "OrEqual", "||="} "&&=", {Enum::Token::Type::AndEqual, Enum::Token::Kind::Assign, "AndEqual", "&&="} "..", {Enum::Token::Type::Slice, Enum::Token::Kind::Operator, "Slice", ".."} "//", {Enum::Token::Type::DefaultOperator, Enum::Token::Kind::Operator, "DefaultOperator", "//"} "...", {Enum::Token::Type::ToDo, Enum::Token::Kind::Operator, "ToDo", "..."} "my", {Enum::Token::Type::VarDecl, Enum::Token::Kind::Decl, "VarDecl", "my"} "sub", {Enum::Token::Type::FunctionDecl, Enum::Token::Kind::Decl, "FunctionDecl", "sub"} "=", {Enum::Token::Type::Assign, Enum::Token::Kind::Assign, "Assign", "="} "$#", {Enum::Token::Type::ArraySize, Enum::Token::Kind::SingleTerm, "ArraySize", "$#"} "!", {Enum::Token::Type::Not, Enum::Token::Kind::SingleTerm, "Not", "!"} "not", {Enum::Token::Type::AlphabetNot, Enum::Token::Kind::SingleTerm, "AlphabetNot", "not"} "chomp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chomp"} "chop", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chop"} "chr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chr"} "crypt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "crypt"} "index", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "index"} "lc", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lc"} "lcfirst", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lcfirst"} "length", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "length"} "ord", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ord"} "pack", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pack"} "unpack", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unpack"} "sort", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sort"} "reverse", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "reverse"} "rindex", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rindex"} "sprintf", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sprintf"} "substr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "substr"} "uc", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "uc"} "ucfirst", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ucfirst"} "pos", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pos"} "quotemeta", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "quotemeta"} "split", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "split"} "study", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "study"} "pop", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pop"} "push", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "push"} "splice", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "splice"} "shift", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shift"} "unshift", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unshift"} "grep", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "grep"} "join", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "join"} "map", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "map"} "delete", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "delete"} "each", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "each"} "exists", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exists"} "keys", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "keys"} "values", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "values"} "binmode", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "binmode"} "close", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "close"} "closedir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "closedir"} "dbmclose", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dbmclose"} "dbmopen", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dbmopen"} "die", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "die"} "eof", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "eof"} "fileno", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fileno"} "flock", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "flock"} "format", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "format"} "getc", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getc"} "print", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "print"} "say", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "say"} "printf", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "printf"} "read", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "read"} "readdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "readdir"} "rewinddir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rewinddir"} "seek", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "seek"} "seekdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "seekdir"} "select", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "select"} "syscall", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "syscall"} "sysread", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sysread"} "sysseek", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sysseek"} "syswrite", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "syswrite"} "tell", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tell"} "telldir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "telldir"} "truncate", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "truncate"} "warn", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "warn"} "write", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "write"} "vec", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "vec"} "chdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chdir"} "chmod", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chmod"} "chown", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chown"} "chroot", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chroot"} "fcntl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fcntl"} "glob", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "glob"} "ioctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ioctl"} "link", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "link"} "lstat", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lstat"} "mkdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "mkdir"} "open", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "open"} "opendir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "opendir"} "readlink", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "readlink"} "rename", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rename"} "rmdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rmdir"} "stat", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "stat"} "symlink", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "symlink"} "umask", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "umask"} "unlink", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unlink"} "utime", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "utime"} "caller", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "caller"} "dump", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dump"} "eval", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "eval"} "exit", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exit"} "wantarray", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "wantarray"} "formline", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "formline"} "reset", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "reset"} "scalar", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "scalar"} "alarm", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "alarm"} "exec", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exec"} "fork", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fork"} "getpgrp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpgrp"} "getppid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getppid"} "getpriority", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpriority"} "kill", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "kill"} "pipe", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pipe"} "setpgrp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpgrp"} "setpriority", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpriority"} "sleep", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sleep"} "system", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "system"} "times", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "times"} "wait", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "wait"} "waitpid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "waitpid"} "no", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "no"} "tie", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tie"} "tied", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tied"} "untie", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "untie"} "accept", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "accept"} "bind", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "bind"} "connect", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "connect"} "getpeername", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpeername"} "getsockname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getsockname"} "getsockopt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getsockopt"} "listen", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "listen"} "recv", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "recv"} "send", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "send"} "setsockopt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setsockopt"} "shutdown", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shutdown"} "socket", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "socket"} "socketpair", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "socketpair"} "msgctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgctl"} "msgget", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgget"} "msgrcv", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgrcv"} "msgsnd", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgsnd"} "semctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semctl"} "semget", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semget"} "semop", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semop"} "shmctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmctl"} "shmget", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmget"} "shmread", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmread"} "shmwrite", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmwrite"} "endgrent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endgrent"} "endhostent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endhostent"} "endnetent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endnetent"} "endpwent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endpwent"} "getgrent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrent"} "getgrgid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrgid"} "getgrnam", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrnam"} "getlogin", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getlogin"} "getpwent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwent"} "getpwnam", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwnam"} "getpwuid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwuid"} "setgrent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setgrent"} "setpwent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpwent"} "endprotoent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endprotoent"} "endservent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endservent"} "gethostbyaddr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostbyaddr"} "gethostbyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostbyname"} "gethostent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostent"} "getnetbyaddr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetbyaddr"} "getnetbyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetbyname"} "getnetent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetent"} "getprotobyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotobyname"} "getprotobynumber", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotobynumber"} "getprotoent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotoent"} "getservbyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservbyname"} "getservbyport", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservbyport"} "getservent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservent"} "sethostent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sethostent"} "setnetent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setnetent"} "setprotoent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setprotoent"} "setservent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setservent"} "gmtime", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gmtime"} "localtime", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "localtime"} "time", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "time"} "ref", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ref"} "bless", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "bless"} "defined", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "defined"} "abs", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "abs"} "atan2", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "atan2"} "cos", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "cos"} "exp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exp"} "hex", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "hex"} "int", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "int"} "log", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "log"} "oct", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "oct"} "rand", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rand"} "sin", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sin"} "sqrt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sqrt"} "srand", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "srand"} "require", {Enum::Token::Type::RequireDecl, Enum::Token::Kind::Decl, "RequireDecl", "require"} "import", {Enum::Token::Type::Import, Enum::Token::Kind::Import, "Import", "import"} "__PACKAGE__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__PACKAGE__"} "__FILE__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__FILE__"} "__LINE__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__LINE__"} "__SUB__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__SUB__"} "__DATA__", {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__DATA__"} "__END__", {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__END__"} "BEGIN", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "BEGIN"} "CHECK", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "CHECK"} "INIT", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "INIT"} "END", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "END"} "UNITCHECK", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "UNITCHECK"} "AUTOLOAD", {Enum::Token::Type::AUTOLOAD, Enum::Token::Kind::AUTOLOAD, "AUTOLOAD", "AUTOLOAD"} "CORE", {Enum::Token::Type::CORE, Enum::Token::Kind::CORE, "CORE", "CORE"} "DESTROY", {Enum::Token::Type::DESTROY, Enum::Token::Kind::DESTROY, "DESTROY", "DESTROY"} "STDIN", {Enum::Token::Type::STDIN, Enum::Token::Kind::Handle, "STDIN", "STDIN"} "STDOUT", {Enum::Token::Type::STDOUT, Enum::Token::Kind::Handle, "STDOUT", "STDOUT"} "STDERR", {Enum::Token::Type::STDERR, Enum::Token::Kind::Handle, "STDERR", "STDERR"} "redo", {Enum::Token::Type::Redo, Enum::Token::Kind::Control, "Redo", "redo"} "next", {Enum::Token::Type::Next, Enum::Token::Kind::Control, "Next", "next"} "last", {Enum::Token::Type::Last, Enum::Token::Kind::Control, "Last", "last"} "goto", {Enum::Token::Type::Goto, Enum::Token::Kind::Control, "Goto", "goto"} "continue", {Enum::Token::Type::Continue, Enum::Token::Kind::Control, "Continue", "continue"} "do", {Enum::Token::Type::Do, Enum::Token::Kind::Do, "Do", "do"} "break", {Enum::Token::Type::Break, Enum::Token::Kind::Control, "Break", "break"} "-b", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-b"} "-c", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-c"} "-d", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-d"} "-e", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-e"} "-f", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-f"} "-g", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-g"} "-k", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-k"} "-l", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-l"} "-o", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-o"} "-p", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-p"} "-r", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-r"} "-s", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-s"} "-t", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-t"} "-u", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-u"} "-w", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-w"} "-x", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-x"} "-z", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-z"} "-A", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-A"} "-B", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-B"} "-C", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-C"} "-M", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-M"} "-O", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-O"} "-R", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-R"} "-S", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-S"} "-T", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-T"} "-W", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-W"} "-X", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-X"} "local", {Enum::Token::Type::LocalDecl, Enum::Token::Kind::Decl, "LocalDecl", "local"} "our", {Enum::Token::Type::OurDecl, Enum::Token::Kind::Decl, "OurDecl", "our"} "state", {Enum::Token::Type::StateDecl, Enum::Token::Kind::Decl, "StateDecl", "state"} "use", {Enum::Token::Type::UseDecl, Enum::Token::Kind::Decl, "UseDecl", "use"} "if", {Enum::Token::Type::IfStmt, Enum::Token::Kind::Stmt, "IfStmt", "if"} "else", {Enum::Token::Type::ElseStmt, Enum::Token::Kind::Stmt, "ElseStmt", "else"} "elsif", {Enum::Token::Type::ElsifStmt, Enum::Token::Kind::Stmt, "ElsifStmt", "elsif"} "unless", {Enum::Token::Type::UnlessStmt, Enum::Token::Kind::Stmt, "UnlessStmt", "unless"} "until", {Enum::Token::Type::UntilStmt, Enum::Token::Kind::Stmt, "UntilStmt", "until"} "when", {Enum::Token::Type::WhenStmt, Enum::Token::Kind::Stmt, "WhenStmt", "when"} "given", {Enum::Token::Type::GivenStmt, Enum::Token::Kind::Stmt, "GivenStmt", "given"} "default", {Enum::Token::Type::DefaultStmt, Enum::Token::Kind::DefaultStmt, "DefaultStmt", "default"} ",", {Enum::Token::Type::Comma, Enum::Token::Kind::Comma, "Comma", ","} ":", {Enum::Token::Type::Colon, Enum::Token::Kind::Colon, "Colon", ":"} ";", {Enum::Token::Type::SemiColon, Enum::Token::Kind::StmtEnd, "SemiColon", ";"} "(", {Enum::Token::Type::LeftParenthesis, Enum::Token::Kind::Symbol, "LeftParenthesis", "("} ")", {Enum::Token::Type::RightParenthesis, Enum::Token::Kind::Symbol, "RightParenthesis", ")"} "{", {Enum::Token::Type::LeftBrace, Enum::Token::Kind::Symbol, "LeftBrace", "{"} "}", {Enum::Token::Type::RightBrace, Enum::Token::Kind::Symbol, "RightBrace", "}"} "[", {Enum::Token::Type::LeftBracket, Enum::Token::Kind::Symbol, "LeftBracket", "["} "]", {Enum::Token::Type::RightBracket, Enum::Token::Kind::Symbol, "RightBracket", "]"} "@{", {Enum::Token::Type::ArrayDereference, Enum::Token::Kind::Modifier, "ArrayDereference", "@{"} "%{", {Enum::Token::Type::HashDereference, Enum::Token::Kind::Modifier, "HashDereference", "%{"} "${", {Enum::Token::Type::ScalarDereference, Enum::Token::Kind::Modifier, "ScalarDereference", "${"} "&{", {Enum::Token::Type::CodeDereference, Enum::Token::Kind::Modifier, "CodeDereference", "&{"} "@$", {Enum::Token::Type::ShortArrayDereference, Enum::Token::Kind::Modifier, "ShortArrayDereference", "@$"} "%$", {Enum::Token::Type::ShortHashDereference, Enum::Token::Kind::Modifier, "ShortHashDereference", "%$"} "&$", {Enum::Token::Type::ShortCodeDereference, Enum::Token::Kind::Modifier, "ShortCodeDereference", "&$"} "$#{", {Enum::Token::Type::ArraySizeDereference, Enum::Token::Kind::Modifier, "ArraySizeDereference", "$#{"} "=>", {Enum::Token::Type::Arrow, Enum::Token::Kind::Operator, "Arrow", "=>"} "->", {Enum::Token::Type::Pointer, Enum::Token::Kind::Operator, "Pointer", "->"} "::", {Enum::Token::Type::NamespaceResolver, Enum::Token::Kind::Operator, "NamespaceResolver", "::"} "package", {Enum::Token::Type::Package, Enum::Token::Kind::Package, "Package", "package"} "\\&", {Enum::Token::Type::CodeRef, Enum::Token::Kind::SingleTerm, "CodeRef", "\\&"} "while", {Enum::Token::Type::WhileStmt, Enum::Token::Kind::Stmt, "WhileStmt", "while"} "for", {Enum::Token::Type::ForStmt, Enum::Token::Kind::Stmt, "ForStmt", "for"} "foreach", {Enum::Token::Type::ForeachStmt, Enum::Token::Kind::Stmt, "ForeachStmt", "foreach"} "#@", {Enum::Token::Type::Annotation, Enum::Token::Kind::Annotation, "Annotation", "#@"} "@_", {Enum::Token::Type::ArgumentArray, Enum::Token::Kind::Term, "ArgumentArray", "@_"} "$_", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$_"} "$0", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$0"} "$1", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$1"} "$2", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$2"} "$3", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$3"} "$4", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$4"} "$5", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$5"} "$6", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$6"} "$7", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$7"} "$8", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$8"} "$9", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$9"} "$&", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$&"} "$`", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$`"} "$'", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$'"} "$+", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$+"} "$.", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$."} "$/", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$/"} "$|", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$|"} "$*", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$*"} "$,", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$,"} "$\\", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$\\"} "$\"", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$\""} "$%", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$%"} "$=", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$="} "$-", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$-"} "$~", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$~"} "$^", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^"} "$:", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$:"} "$?", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$?"} "$!", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$!"} "$@", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$@"} "$$", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$$"} "$<", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$<"} "$>", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$>"} "$(", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$("} "$)", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$)"} "$[", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$["} "$]", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$]"} "$;", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$;"} "$^A", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^A"} "$^D", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^D"} "$^E", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^E"} "$^F", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^F"} "$^G", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^G"} "$^H", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^H"} "$^I", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^I"} "$^L", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^L"} "$^M", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^M"} "$^O", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^O"} "$^P", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^P"} "$^R", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^R"} "$^T", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^T"} "$^W", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^W"} "$^X", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^X"} "@ARGV", {Enum::Token::Type::ProgramArgument, Enum::Token::Kind::Term, "ProgramArgument", "@ARGV"} "@INC", {Enum::Token::Type::LibraryDirectories, Enum::Token::Kind::Term, "LibraryDirectories", "@INC"} "%ENV", {Enum::Token::Type::Environment, Enum::Token::Kind::Term, "Environment", "%ENV"} "%INC", {Enum::Token::Type::Include, Enum::Token::Kind::Term, "Include", "%INC"} "%SIG", {Enum::Token::Type::Signal, Enum::Token::Kind::Term, "Signal", "%SIG"} "q", {Enum::Token::Type::RegQuote, Enum::Token::Kind::RegPrefix, "RegQuote", "q"} "qq", {Enum::Token::Type::RegDoubleQuote, Enum::Token::Kind::RegPrefix, "RegDoubleQuote", "qq"} "qw", {Enum::Token::Type::RegList, Enum::Token::Kind::RegPrefix, "RegList", "qw"} "qx", {Enum::Token::Type::RegExec, Enum::Token::Kind::RegPrefix, "RegExec", "qx"} "qr", {Enum::Token::Type::RegDecl, Enum::Token::Kind::RegPrefix, "RegDecl", "qr"} "m", {Enum::Token::Type::RegMatch, Enum::Token::Kind::RegPrefix, "RegMatch", "m"} "tr", {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "tr"} "y", {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "y"} "s", {Enum::Token::Type::RegReplace, Enum::Token::Kind::RegReplacePrefix, "RegReplace", "s"} "undef", {Enum::Token::Type::Default, Enum::Token::Kind::Term, "Default", "undef"} %% test_generator.pl100644000765000024 737413603257356 20116 0ustar00goccystaff000000000000Compiler-Lexer-0.23/genuse strict; use warnings; use Compiler::Lexer; use File::Basename qw/basename dirname/; use YAML::XS qw/LoadFile/; use File::Find qw//; use Data::Dumper; use Cwd qw/getcwd/; use constant CURRENT_DIR => getcwd; use constant YAML_PATH => CURRENT_DIR . '/gen/gen_constants.yaml'; use constant PERL_DIR => '/path/to/perl-5.24.1'; my $template = template(); sub slurp { my ($filename) = @_; open(my $fh, '<', $filename); my $script = do { local $/; <$fh> }; close $fh; return $script; } sub template { return do { local $/; }; } sub get_constants_map { return LoadFile YAML_PATH; } sub generate { my $filename = shift; my $script = slurp($filename); my $lexer = Compiler::Lexer->new(''); my ($tokens, $stmts, $modules); eval { print "$filename: \n"; $tokens = $lexer->tokenize($script); $stmts = $lexer->get_groups_by_syntax_level($tokens, Compiler::Lexer::SyntaxType::T_Stmt); $modules = Compiler::Lexer->new($filename)->get_used_modules($script); }; if ($@) { warn "[ERROR] $filename [$@]\n"; } my $dirname = dirname $filename; my $basename = basename $filename; $dirname =~ s|(.*)/||; mkdir CURRENT_DIR . "/t/perl/$dirname" unless -d "/t/perl/$dirname"; open my $fh, '>', CURRENT_DIR . "/t/perl/$dirname/$basename" or die $!; my $constans_map = get_constants_map; my $type = $constans_map->{token_type}; my $kind = $constans_map->{token_kind}; my $stype = $constans_map->{syntax_type}; foreach my $token (@$tokens) { foreach my $key (keys %$type) { if ($token->type eq $type->{$key}) { $token->type("Compiler::Lexer::TokenType::$key"); } } foreach my $key (keys %$kind) { if ($token->kind eq $kind->{$key}) { $token->kind("Compiler::Lexer::Kind::$key"); } } foreach my $key (keys %$stype) { if ($token->stype eq $stype->{$key}) { $token->stype("Compiler::Lexer::SyntaxType::$key"); } } } my $tmp1 = Dumper $tokens; $tmp1 =~ s/'type' => '(.*)'/'type' => $1/g; $tmp1 =~ s/'kind' => '(.*)'/'kind' => $1/g; $tmp1 =~ s/'stype' => '(.*)'/'stype' => $1/g; my $tmp2 = (ref $stmts eq 'ARRAY') ? Dumper $stmts : ''; my $tmp3 = Dumper $modules; my @filtered = map { $_ =~ s/\$VAR1 = //; $_ =~ s/;$//; $_; } ($tmp1, $tmp2, $tmp3); print $fh sprintf($template, $script, @filtered); print "generated ", CURRENT_DIR . "/t/perl/$dirname/$basename\n"; close $fh; } if (@ARGV) { my $file_or_dirname = $ARGV[0]; if ($file_or_dirname =~ /\.[a-zA-Z]+$/) { generate($file_or_dirname); } else { File::Find::find(sub { return unless $_ =~ /\.t$/; generate("$File::Find::dir/$_"); }, $file_or_dirname); } } else { File::Find::find(sub { return unless $_ =~ /\.t$/; generate("$File::Find::dir/$_"); }, PERL_DIR . '/t'); } __DATA__ use strict; use warnings; use Test::More; BEGIN { use_ok('Compiler::Lexer'); } my $script =<<'__SCRIPT__'; %s __SCRIPT__ subtest 'tokenize' => sub { my $tokens = Compiler::Lexer->new('')->tokenize($script); is_deeply($tokens, %s, 'Compiler::Lexer::tokenize'); }; subtest 'get_groups_by_syntax_level' => sub { my $lexer = Compiler::Lexer->new(''); my $tokens = $lexer->tokenize($script); my $stmts = $lexer->get_groups_by_syntax_level($tokens, Compiler::Lexer::SyntaxType::T_Stmt); is_deeply($stmts, %s, 'Compiler::Lexer::get_groups_by_syntax_level'); }; subtest 'get_used_modules' => sub { my $modules = Compiler::Lexer->new('')->get_used_modules($script); is_deeply($modules, %s, 'Compiler::Lexer::get_used_modules'); }; done_testing; triple_charactor_operator.gperf100644000765000024 42113603257356 22763 0ustar00goccystaff000000000000Compiler-Lexer-0.23/gen%{ typedef struct _KeywordTable { const char *name; int value; } KeywordTable; %} KeywordTable; %% <=>, 1 **=, 1 //=, 1 ||=, 1 &&=, 1 <<=, 1 >>=, 1 ..., 1 $#{, 1 $^A, 1 $^D, 1 $^E, 1 $^F, 1 $^G, 1 $^H, 1 $^I, 1 $^L, 1 $^M, 1 $^O, 1 $^P, 1 $^R, 1 $^T, 1 $^W, 1 $^X, 1 %% common.hpp100644000765000024 226113603257356 17375 0ustar00goccystaff000000000000Compiler-Lexer-0.23/include#include #include #include #include #include #include #include #include #include #include #include #include #include #define EOL '\0' #define MAX_TOKEN_SIZE 4096 #define cstr(s) s.c_str() #ifdef DEBUG_MODE #define DBG_P(fmt, ...) {\ fprintf(stderr, fmt, ## __VA_ARGS__); \ } #define DBG_PL(fmt, ...) {\ fprintf(stderr, fmt, ## __VA_ARGS__); \ fprintf(stderr, "\n"); \ } #else #define DBG_P(fmt, ...) {} #define DBG_PL(fmt, ...) {} #endif #define DECL(T, S) {T, #T, S} #define PTR_SIZE sizeof(void*) class TokenInfo; class Token; class Tokens; class Module; class Annotator; class AnnotateMethods; class AnnotateMethodIterator; typedef std::vector Modules; typedef std::map StringMap; typedef std::vector::iterator TokenPos; extern void *safe_malloc(size_t size); extern void safe_free(void *ptr, size_t size); #include #include typedef std::map TypeMap; typedef std::map TypeDataMap; typedef std::queue StringsQueue;gen_token.hpp100644000765000024 631313603257356 20060 0ustar00goccystaff000000000000Compiler-Lexer-0.23/includenamespace Enum { namespace Token { namespace Type { typedef enum { Return, Add, Sub, Mul, Div, Mod, ThreeTermOperator, Greater, Less, StringAdd, Ref, Glob, BitNot, BitOr, AlphabetOr, BitAnd, AlphabetAnd, BitXOr, AlphabetXOr, StringMul, AddEqual, SubEqual, MulEqual, DivEqual, ModEqual, StringAddEqual, LeftShiftEqual, RightShiftEqual, StringMulEqual, GreaterEqual, LessEqual, EqualEqual, Diamond, Compare, PolymorphicCompare, RegOK, RegNot, NotEqual, StringLess, StringLessEqual, StringGreater, StringGreaterEqual, StringEqual, StringNotEqual, StringCompare, Inc, Dec, Exp, PowerEqual, DefaultEqual, LeftShift, RightShift, And, Or, AndBitEqual, OrBitEqual, NotBitEqual, OrEqual, AndEqual, Slice, DefaultOperator, ToDo, VarDecl, FunctionDecl, Method, Assign, ArraySize, Is, Not, AlphabetNot, BuiltinFunc, RequireDecl, Import, SpecificKeyword, DataWord, ModWord, AUTOLOAD, CORE, DESTROY, STDIN, STDOUT, STDERR, Redo, Next, Last, Goto, Continue, Do, Break, Handle, LocalDecl, OurDecl, StateDecl, UseDecl, UsedName, RequiredName, IfStmt, ElseStmt, ElsifStmt, UnlessStmt, UntilStmt, WhenStmt, GivenStmt, DefaultStmt, Comma, Colon, SemiColon, LeftParenthesis, RightParenthesis, LeftBrace, RightBrace, LeftBracket, RightBracket, ArrayDereference, HashDereference, ScalarDereference, CodeDereference, ShortScalarDereference, ShortArrayDereference, ShortHashDereference, ShortCodeDereference, ArraySizeDereference, Key, BareWord, Arrow, Pointer, NamespaceResolver, Namespace, Package, Class, CallDecl, CodeRef, WhileStmt, ForStmt, ForeachStmt, Annotation, ArgumentArray, SpecificValue, ConstValue, ProgramArgument, LibraryDirectories, Environment, Include, Signal, RegOpt, RegQuote, RegDoubleQuote, RegList, RegExec, RegDecl, RegMatch, RegDelim, HandleDelim, RegMiddleDelim, RegAllReplace, RegReplace, RegReplaceFrom, RegReplaceTo, FieldDecl, TypeRef, LabelRef, LocalVarDecl, GlobalVarDecl, MultiLocalVarDecl, MultiGlobalVarDecl, Prototype, Var, CodeVar, ArrayVar, HashVar, Int, Double, String, RawString, ExecString, VersionString, HereDocumentTag, HereDocumentRawTag, HereDocumentExecTag, HereDocumentBareTag, RawHereDocument, HereDocument, HereDocumentEnd, FormatDecl, Format, FormatEnd, Object, RegExp, Array, Hash, Operator, LocalVar, LocalArrayVar, LocalHashVar, GlobalVar, GlobalArrayVar, GlobalHashVar, ArrayRef, HashRef, ArrayAt, HashAt, ArraySet, HashSet, Function, Call, Argument, List, Default, Pod, Comment, WhiteSpace, Undefined, PostDeref, PostDerefStar, PostDerefArraySliceOpenBracket, PostDerefArraySliceCloseBracket, PostDerefHashSliceOpenBrace, PostDerefHashSliceCloseBrace, PostDerefCodeOpenParen, PostDerefCodeCloseParen } Type; } namespace Kind { typedef enum { Return, Operator, Assign, Decl, Function, SingleTerm, Import, SpecificKeyword, DataWord, ModWord, AUTOLOAD, CORE, DESTROY, Handle, Control, Do, Module, Stmt, DefaultStmt, Comma, Colon, StmtEnd, Symbol, Modifier, Term, Namespace, Package, Class, Annotation, RegOpt, RegPrefix, RegReplacePrefix, Ref, Get, Set, Verbose, Undefined } Kind; } } } keyword.hpp100644000765000024 200113603257356 17561 0ustar00goccystaff000000000000Compiler-Lexer-0.23/include#define TRIPLE_OPERATOR_TOTAL_KEYWORDS 24 #define TRIPLE_OPERATOR_MIN_WORD_LENGTH 3 #define TRIPLE_OPERATOR_MAX_WORD_LENGTH 3 #define TRIPLE_OPERATOR_MIN_HASH_VALUE 3 #define TRIPLE_OPERATOR_MAX_HASH_VALUE 50 #define DOUBLE_OPERATOR_TOTAL_KEYWORDS 79 #define DOUBLE_OPERATOR_MIN_WORD_LENGTH 2 #define DOUBLE_OPERATOR_MAX_WORD_LENGTH 2 #define DOUBLE_OPERATOR_MIN_HASH_VALUE 2 #define DOUBLE_OPERATOR_MAX_HASH_VALUE 200 class TripleCharactorOperatorMap { private: static inline unsigned int hash(const char *str); public: static const char *in_word_set(const char *str); }; class DoubleCharactorOperatorMap { private: static inline unsigned int hash(const char *str); public: static const char *in_word_set(const char *str); }; typedef struct _ReservedKeyword { const char *name; TokenInfo info; } ReservedKeyword; class ReservedKeywordMap { private: static inline unsigned int hash (const char *str, unsigned int len); public: static ReservedKeyword *in_word_set (const char *str, unsigned int len); }; lexer.hpp100644000765000024 2240413603257356 17245 0ustar00goccystaff000000000000Compiler-Lexer-0.23/include#include #include typedef Token TokenPool; class TokenManager { public: Tokens *tokens; size_t max_token_size; size_t idx; TypeMap type_to_info_map; TypeDataMap data_to_info_map; TypeMap::iterator type_to_info_map_end; TypeDataMap::iterator data_to_info_map_end; ReservedKeywordMap keyword_map; TokenInfo undefined_info; Token *head; TokenPool *pool; bool verbose; TokenManager(void); TokenManager(size_t script_size, bool verbose); inline Token *new_Token(char *data, FileInfo finfo) { Token *ret = pool++; ret->stype = Enum::Parser::Syntax::Value; ret->type = Enum::Token::Type::Undefined; ret->finfo = finfo; ret->info = undefined_info; ret->_data = data; ret->token_num = 0; ret->total_token_num = 0; ret->deparsed_data = ""; return ret; } Token *at(size_t i); size_t size(void); void dump(void); Token *getTokenByBase(Token *base, int offset); Token *getTokenByIdx(size_t idx); Token *beforePreviousToken(void); Token *beforePreviousToken(Token *tk); Token *previousToken(void); Token *previousToken(Token *tk); Token *currentToken(void); Token *nextToken(void); Token *nextToken(Token *tk); Token *beforeLastToken(void); Token *lastToken(void); void remove(size_t idx); inline TokenInfo getTokenInfo(Enum::Token::Type::Type type) { return type_to_info[type]; } inline TokenInfo getTokenInfo(const char *data) { ReservedKeyword *ret = keyword_map.in_word_set(data, strlen(data)); if (ret) return ret->info; return undefined_info; } inline void add(Token *tk) { if (tk) tokens->add(tk); } bool end(void); Token *next(void); Token *back(void); }; class ScriptManager { public: char *_script; char *raw_script; size_t script_size; size_t idx; ScriptManager(char *script); bool compare(int start, int end, std::string target); inline char getCharByOffset(int offset) { size_t current_idx = this->idx; int wanted_idx = current_idx + offset; return (0 <= wanted_idx && (size_t)wanted_idx < script_size) ? raw_script[wanted_idx] : EOL; } inline char beforePreviousChar(void) { size_t current_idx = this->idx; int wanted_idx = current_idx - 2; return (0 <= wanted_idx) ? raw_script[wanted_idx] : EOL; } inline char previousChar(void) { size_t current_idx = this->idx; int wanted_idx = current_idx - 1; return (0 <= wanted_idx) ? raw_script[wanted_idx] : EOL; } inline char currentChar(void) { return idx < script_size ? raw_script[idx] : EOL; } inline char nextChar(void) { size_t current_idx = this->idx; int wanted_idx = current_idx + 1; return ((size_t)wanted_idx < script_size) ? raw_script[wanted_idx] : EOL; } inline char afterNextChar(void) { size_t current_idx = this->idx; int wanted_idx = current_idx + 2; return ((size_t)wanted_idx < script_size) ? raw_script[wanted_idx] : EOL; } inline char next(void) { return raw_script[++idx]; } inline char back(void) { return raw_script[--idx]; } inline bool end(void) { return idx >= script_size; } inline char forward(size_t progress) { this->idx += progress; return raw_script[idx]; } }; class LexContext { public: ScriptManager *smgr; TokenManager *tmgr; FileInfo finfo; int progress; char *buffer_head; char *token_buffer; size_t buffer_idx; size_t script_size; TokenPos itr; Enum::Token::Type::Type prev_type; LexContext(const char *filename, char *script, bool verbose); LexContext(Tokens *tokens); inline char *buffer(void) { return token_buffer; } inline void clearBuffer(void) { token_buffer += buffer_idx; token_buffer[0] = EOL; buffer_idx = 0; token_buffer++; token_buffer[0] = EOL; } inline void writeBuffer(char ch) { token_buffer[buffer_idx++] = ch; token_buffer[buffer_idx] = EOL; } inline void writeBuffer(const char *str) { for (size_t i = 0; str[i] != EOL; i++) { token_buffer[buffer_idx++] = str[i]; } token_buffer[buffer_idx] = EOL; } inline bool existsBuffer(void) { return token_buffer[0] != EOL; } Token *tk(void); Token *nextToken(void); void next(void); bool end(void); }; class Module { public: const char *name; const char *args; Module(const char *name, const char *args); }; class Scanner { public: bool isStringStarted; bool isRegexStarted; bool isPrototypeStarted; bool isFormatStarted; Token *formatDeclaredToken; bool commentFlag; bool skipFlag; char start_string_ch; char regex_delim; char regex_middle_delim; int brace_count_inner_regex; int bracket_count_inner_regex; int cury_brace_count_inner_regex; Token *here_document_tag_tk; StringsQueue here_document_tags; StringMap regex_prefix_map; StringMap regex_replace_map; StringMap enable_regex_argument_func_map; StringMap dereference_prefix_map; DoubleCharactorOperatorMap double_operator_map; TripleCharactorOperatorMap triple_operator_map; StringMap operator_map; bool verbose; Scanner(void); bool isRegexStartDelim(LexContext *ctx, const StringMap &list); bool isRegexEndDelim(LexContext *ctx); bool isRegexDelim(LexContext *ctx, Token *prev_token, char symbol); bool isHereDocument(LexContext *ctx, Token *prev_token); bool isPostDeref(LexContext *ctx); bool isFormat(LexContext *ctx, Token *tk); bool isVersionString(LexContext *ctx); bool isRegex(LexContext *ctx); bool isSkip(LexContext *ctx); bool isPrototype(LexContext *ctx); bool isRegexOptionPrevToken(LexContext *ctx); bool isRegexOption(const char *opt); char getRegexDelim(LexContext *ctx); Token *scanQuote(LexContext *ctx, char quote); Token *scanRegQuote(LexContext *ctx, char delim); Token *scanNewLineKeyword(LexContext *ctx); Token *scanTabKeyword(LexContext *ctx); Token *scanPrevSymbol(LexContext *ctx, char symbol); Token *scanCurSymbol(LexContext *ctx, char symbol); Token *scanDoubleCharacterOperator(LexContext *ctx, char symbol, char next_ch); Token *scanTripleCharacterOperator(LexContext *ctx, char symbol, char next_ch, char after_next_ch); Token *scanPostDeref(LexContext *ctx); Token *scanSymbol(LexContext *ctx); Token *scanWordDelimiter(LexContext *ctx); Token *scanReference(LexContext *ctx); Token *scanSingleLineComment(LexContext *ctx); Token *scanLineDelimiter(LexContext *ctx); Token *scanNumber(LexContext *ctx); Token *scanVersionString(LexContext *ctx); Token *scanWhiteSpace(LexContext *ctx); bool scanNegativeNumber(LexContext *ctx, char num); inline bool hereDocumentFlag(void) { return here_document_tags.size() > 0; } }; class Lexer { public: TokenPos head; size_t start_pos; size_t pos; FileInfo finfo; const char *filename; bool verbose; LexContext *ctx; Lexer(const char *filename, bool verbose); ~Lexer(void); Tokens *tokenize(char *script); void clearContext(void); void grouping(Tokens *tokens); void prepare(Tokens *tokens); Token *parseSyntax(Token *start_token, Tokens *tokens); void parseSpecificStmt(Token *root); void setIndent(Token *tk, int indent); void setBlockIDWithBreadthFirst(Token *tk, size_t base_id); void setBlockIDWithDepthFirst(Token *tk, size_t *block_id); void dump(Tokens *tokens); void dumpSyntax(Token *tk, int indent); Tokens *getTokensBySyntaxLevel(Token *root, Enum::Parser::Syntax::Type type); Modules *getUsedModules(Token *root); private: void annotateTokens(LexContext *ctx, Tokens *tokens); bool isExpr(Token *tk, Token *prev_tk, Enum::Token::Type::Type type, Enum::Token::Kind::Kind kind); void insertStmt(Token *tk, int idx, size_t grouping_num); void insertParenthesis(Tokens *tokens); }; class Annotator { public: StringMap vardecl_map; StringMap funcdecl_map; StringMap pkgdecl_map; Annotator(void); void annotate(LexContext *ctx, Token *tk); private: bool isRegexOption(const char *opt); void annotateRegOpt(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateNamespace(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateMethod(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateKey(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateShortScalarDereference(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateCallDecl(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateHandleDelimiter(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateReservedKeyword(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateGlobOrMul(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateNamelessFunction(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateLocalVariable(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateVariable(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateGlobalVariable(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateFunction(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateCall(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateClass(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateModuleName(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); void annotateBareWord(LexContext *ctx, const std::string &data, Token *tk, TokenInfo *info); }; #define isSKIP() commentFlag token.hpp100644000765000024 215213603257356 17224 0ustar00goccystaff000000000000Compiler-Lexer-0.23/includenamespace Enum { namespace Parser { namespace Syntax { typedef enum { Value, Term, Expr, Stmt, BlockStmt } Type; } } } class FileInfo { public: size_t start_line_num; size_t end_line_num; size_t indent; size_t block_id; const char *filename; }; class TokenInfo { public: Enum::Token::Type::Type type; Enum::Token::Kind::Kind kind; const char *name; const char *data; bool has_warnings; }; class Token { public: Enum::Parser::Syntax::Type stype; Enum::Token::Type::Type type; TokenInfo info; FileInfo finfo; Token **tks; const char *_data; size_t token_num; size_t total_token_num; const char *deparsed_data; bool isDeparsed; bool isDeleted; Token(std::string data_, FileInfo finfo); Token(Tokens *tokens); const char *deparse(void); }; class Tokens : public std::vector { public: Tokens(void) {} inline void add(Token *token) { if (token) push_back(token); } inline void remove(size_t) { //erase(idx); } inline Token *lastToken(void) { return (size() > 0) ? back() : NULL; } }; extern TokenInfo decl_tokens[]; extern TokenInfo type_to_info[]; Lexer.pm100644000765000024 712113603257356 17706 0ustar00goccystaff000000000000Compiler-Lexer-0.23/lib/Compilerpackage Compiler::Lexer; use strict; use warnings; use 5.008_001; use File::Find; use Compiler::Lexer::Token; use Compiler::Lexer::Constants; require Exporter; our @ISA = qw(Exporter); our %EXPORT_TAGS = ( 'all' => [ qw() ] ); our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); our @EXPORT = qw(); our $VERSION = '0.23'; require XSLoader; XSLoader::load(__PACKAGE__, $VERSION); my $inc; sub new { my ($class, $args) = @_; my $options = +{}; if (ref $args eq 'HASH') { $options = $args; } elsif (ref $args eq 'SCALAR') { $options->{filename} = $args; } $options->{filename} ||= '-'; $options->{verbose} ||= 0; return $class->_new($options); } sub set_library_path { my ($self, $_inc) = @_; $inc = $_inc; } sub load_module { my ($self, $name) = @_; $name =~ s|::|/|g; my @include_path = ($inc) ? @$inc : @INC; my $module_path = ''; foreach my $path (@include_path) { next unless -e $path; find(sub { return if ($module_path); my $absolute_path = $File::Find::name; if ($absolute_path =~ "$name.pm") { $module_path = $absolute_path; } }, $path); last if ($module_path); } return undef unless $module_path; open my $fh, '<', $module_path; return do { local $/; <$fh> }; } sub recursive_tokenize { my ($self, $script) = @_; my %results; $self->__recursive_tokenize(\%results, $script); $results{main} = $self->tokenize($script); return \%results; } sub __recursive_tokenize { my ($self, $results, $script) = @_; my $modules = $self->get_used_modules($script); foreach my $module (@$modules) { my $name = $module->{name}; next if (defined $results->{$name}); $results->{$name} ||= []; my $code = $self->load_module($name); next unless ($code); $results->{$name} = $self->tokenize($code); $self->__recursive_tokenize($results, $code); } } 1; __END__ =encoding utf-8 =head1 NAME Compiler::Lexer - Lexical Analyzer for Perl5 =head1 SYNOPSIS use Compiler::Lexer; use Data::Dumper; my $filename = $ARGV[0]; open my $fh, '<', $filename or die "Cannot open $filename: $!"; my $script = do { local $/; <$fh> }; my $lexer = Compiler::Lexer->new($filename); my $tokens = $lexer->tokenize($script); print Dumper $tokens; my $modules = $lexer->get_used_modules($script); print Dumper $modules; =head1 METHODS =over 4 =item my $lexer = Compiler::Lexer->new($options); create new instance. You can create object from $options in hash reference. B =over 4 =item filename =item verbose : includes token of Pod, Comment and WhiteSpace =back =item $lexer->tokenize($script); get token objects includes parameter of 'name' or 'type' or 'line' and so on. This method requires perl source code in string. =item $lexer->set_library_path(['path1', 'path2' ...]) set libraries path for reading recursively. Default paths are @INC. =item $lexer->recursive_tokenize($script) get hash reference like { 'module_nameA' => [], 'module_nameB' => [] ... }. This method requires per source code in string. =item $lexer->get_used_modules($script); get names of used module. This method requires perl source code in string. =back =head1 AUTHOR Masaaki Goshima (goccy) Egoccy(at)cpan.orgE =head1 CONTRIBUTORS tokuhirom: Tokuhiro Matsuno =head1 LICENSE AND COPYRIGHT Copyright (c) 2013, Masaaki Goshima (goccy). All rights reserved. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut Constants.pm100644000765000024 1432713603257356 21710 0ustar00goccystaff000000000000Compiler-Lexer-0.23/lib/Compiler/Lexeruse strict; use warnings; package Compiler::Lexer::TokenType; use constant { T_Return => 0, T_Add => 1, T_Sub => 2, T_Mul => 3, T_Div => 4, T_Mod => 5, T_ThreeTermOperator => 6, T_Greater => 7, T_Less => 8, T_StringAdd => 9, T_Ref => 10, T_Glob => 11, T_BitNot => 12, T_BitOr => 13, T_AlphabetOr => 14, T_BitAnd => 15, T_AlphabetAnd => 16, T_BitXOr => 17, T_AlphabetXOr => 18, T_StringMul => 19, T_AddEqual => 20, T_SubEqual => 21, T_MulEqual => 22, T_DivEqual => 23, T_ModEqual => 24, T_StringAddEqual => 25, T_LeftShiftEqual => 26, T_RightShiftEqual => 27, T_StringMulEqual => 28, T_GreaterEqual => 29, T_LessEqual => 30, T_EqualEqual => 31, T_Diamond => 32, T_Compare => 33, T_PolymorphicCompare => 34, T_RegOK => 35, T_RegNot => 36, T_NotEqual => 37, T_StringLess => 38, T_StringLessEqual => 39, T_StringGreater => 40, T_StringGreaterEqual => 41, T_StringEqual => 42, T_StringNotEqual => 43, T_StringCompare => 44, T_Inc => 45, T_Dec => 46, T_Exp => 47, T_PowerEqual => 48, T_DefaultEqual => 49, T_LeftShift => 50, T_RightShift => 51, T_And => 52, T_Or => 53, T_AndBitEqual => 54, T_OrBitEqual => 55, T_NotBitEqual => 56, T_OrEqual => 57, T_AndEqual => 58, T_Slice => 59, T_DefaultOperator => 60, T_ToDo => 61, T_VarDecl => 62, T_FunctionDecl => 63, T_Method => 64, T_Assign => 65, T_ArraySize => 66, T_Is => 67, T_Not => 68, T_AlphabetNot => 69, T_BuiltinFunc => 70, T_RequireDecl => 71, T_Import => 72, T_SpecificKeyword => 73, T_DataWord => 74, T_ModWord => 75, T_AUTOLOAD => 76, T_CORE => 77, T_DESTROY => 78, T_STDIN => 79, T_STDOUT => 80, T_STDERR => 81, T_Redo => 82, T_Next => 83, T_Last => 84, T_Goto => 85, T_Continue => 86, T_Do => 87, T_Break => 88, T_Handle => 89, T_LocalDecl => 90, T_OurDecl => 91, T_StateDecl => 92, T_UseDecl => 93, T_UsedName => 94, T_RequiredName => 95, T_IfStmt => 96, T_ElseStmt => 97, T_ElsifStmt => 98, T_UnlessStmt => 99, T_UntilStmt => 100, T_WhenStmt => 101, T_GivenStmt => 102, T_DefaultStmt => 103, T_Comma => 104, T_Colon => 105, T_SemiColon => 106, T_LeftParenthesis => 107, T_RightParenthesis => 108, T_LeftBrace => 109, T_RightBrace => 110, T_LeftBracket => 111, T_RightBracket => 112, T_ArrayDereference => 113, T_HashDereference => 114, T_ScalarDereference => 115, T_CodeDereference => 116, T_ShortScalarDereference => 117, T_ShortArrayDereference => 118, T_ShortHashDereference => 119, T_ShortCodeDereference => 120, T_ArraySizeDereference => 121, T_Key => 122, T_BareWord => 123, T_Arrow => 124, T_Pointer => 125, T_NamespaceResolver => 126, T_Namespace => 127, T_Package => 128, T_Class => 129, T_CallDecl => 130, T_CodeRef => 131, T_WhileStmt => 132, T_ForStmt => 133, T_ForeachStmt => 134, T_Annotation => 135, T_ArgumentArray => 136, T_SpecificValue => 137, T_ConstValue => 138, T_ProgramArgument => 139, T_LibraryDirectories => 140, T_Environment => 141, T_Include => 142, T_Signal => 143, T_RegOpt => 144, T_RegQuote => 145, T_RegDoubleQuote => 146, T_RegList => 147, T_RegExec => 148, T_RegDecl => 149, T_RegMatch => 150, T_RegDelim => 151, T_HandleDelim => 152, T_RegMiddleDelim => 153, T_RegAllReplace => 154, T_RegReplace => 155, T_RegReplaceFrom => 156, T_RegReplaceTo => 157, T_FieldDecl => 158, T_TypeRef => 159, T_LabelRef => 160, T_LocalVarDecl => 161, T_GlobalVarDecl => 162, T_MultiLocalVarDecl => 163, T_MultiGlobalVarDecl => 164, T_Prototype => 165, T_Var => 166, T_CodeVar => 167, T_ArrayVar => 168, T_HashVar => 169, T_Int => 170, T_Double => 171, T_String => 172, T_RawString => 173, T_ExecString => 174, T_VersionString => 175, T_HereDocumentTag => 176, T_HereDocumentRawTag => 177, T_HereDocumentExecTag => 178, T_HereDocumentBareTag => 179, T_RawHereDocument => 180, T_HereDocument => 181, T_HereDocumentEnd => 182, T_FormatDecl => 183, T_Format => 184, T_FormatEnd => 185, T_Object => 186, T_RegExp => 187, T_Array => 188, T_Hash => 189, T_Operator => 190, T_LocalVar => 191, T_LocalArrayVar => 192, T_LocalHashVar => 193, T_GlobalVar => 194, T_GlobalArrayVar => 195, T_GlobalHashVar => 196, T_ArrayRef => 197, T_HashRef => 198, T_ArrayAt => 199, T_HashAt => 200, T_ArraySet => 201, T_HashSet => 202, T_Function => 203, T_Call => 204, T_Argument => 205, T_List => 206, T_Default => 207, T_Pod => 208, T_Comment => 209, T_WhiteSpace => 210, T_Undefined => 211, T_PostDeref => 212, T_PostDerefStar => 213, T_PostDerefArraySliceOpenBracket => 214, T_PostDerefArraySliceCloseBracket => 215, T_PostDerefHashSliceOpenBrace => 216, T_PostDerefHashSliceCloseBrace => 217, T_PostDerefCodeOpenParen => 218, T_PostDerefCodeCloseParen => 219 }; package Compiler::Lexer::SyntaxType; use constant { T_Value => 0, T_Term => 1, T_Expr => 2, T_Stmt => 3, T_BlockStmt => 4 }; package Compiler::Lexer::Kind; use constant { T_Return => 0, T_Operator => 1, T_Assign => 2, T_Decl => 3, T_Function => 4, T_SingleTerm => 5, T_Import => 6, T_SpecificKeyword => 7, T_DataWord => 8, T_ModWord => 9, T_AUTOLOAD => 10, T_CORE => 11, T_DESTROY => 12, T_Handle => 13, T_Control => 14, T_Do => 15, T_Module => 16, T_Stmt => 17, T_DefaultStmt => 18, T_Comma => 19, T_Colon => 20, T_StmtEnd => 21, T_Symbol => 22, T_Modifier => 23, T_Term => 24, T_Namespace => 25, T_Package => 26, T_Class => 27, T_Annotation => 28, T_RegOpt => 29, T_RegPrefix => 30, T_RegReplacePrefix => 31, T_Ref => 32, T_Get => 33, T_Set => 34, T_Verbose => 35, T_Undefined => 36 }; 1; Token.pm100644000765000024 254113603257356 20767 0ustar00goccystaff000000000000Compiler-Lexer-0.23/lib/Compiler/Lexerpackage Compiler::Lexer::Token; use strict; use warnings; my $FIELDS = [qw/ stype type kind line name data has_warnings /]; { no strict 'refs'; foreach my $field (@$FIELDS) { *{__PACKAGE__ . '::' . $field} = sub { my ($self, $value) = @_; return $self->{$field} unless defined $value; $self->{$field} = $value; }; } } 1; __END__ =encoding utf-8 =for stopwords stype =head1 NAME Compiler::Lexer::Token =head1 SYNOPSIS Compiler::Lexer::Token includes the following members. =over =item stype constant of Compiler::Lexer::SyntaxType =item type constant of Compiler::Lexer::TokenType =item kind constant of Compiler::Lexer::Kind =item name name of Compiler::Lexer::TokenType =item data raw data =item has_warnings flag of whether unknown keyword or not =back =head1 METHODS support simple get/set accessors like Class::Accessor::Fast example: my $type = $token->type; # get accessor $token->type(Compiler::Lexer::TokenType::T_RegExp); # set accessor =head1 AUTHOR Masaaki Goshima (goccy) Egoccy(at)cpan.orgE =head1 LICENSE AND COPYRIGHT Copyright (c) 2013, Masaaki Goshima (goccy). All rights reserved. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut minil.toml100644000765000024 40213603257356 15731 0ustar00goccystaff000000000000Compiler-Lexer-0.23badges = ["travis", "coveralls"] authority = "cpan:GOCCY" [build] build_class = "builder::MyBuilder" [no_index] directory = ['t', 'examples', 'builder', 'experiments'] [FileGatherer] exclude_match = ['^t/perl/.*', '^example/bigdata.pl', '^experiments/' ] Compiler-Lexer.xs100644000765000024 1332713603257356 17770 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src#include #ifdef __cplusplus extern "C" { #endif #define PERL_NO_GET_CONTEXT #include "EXTERN.h" #include "perl.h" #include "XSUB.h" #include "ppport.h" #undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang. #define dNOOP #define new_Array() (AV*)sv_2mortal((SV*)newAV()) #define new_Hash() (HV*)sv_2mortal((SV*)newHV()) #define new_String(s, len) sv_2mortal(newSVpv(s, len)) #define new_Int(u) sv_2mortal(newSVuv(u)) #define new_Ref(sv) sv_2mortal(newRV_inc((SV*)sv)) #define set(e) SvREFCNT_inc(e) #define get_value(hash, key) *hv_fetchs(hash, key, strlen(key)) #ifdef __cplusplus }; #endif typedef Lexer * Compiler_Lexer; MODULE = Compiler::Lexer PACKAGE = Compiler::Lexer PROTOTYPES: DISABLE Compiler_Lexer _new(classname, _options) char *classname HV *_options CODE: { const char *filename = SvPVX(get_value(_options, "filename")); bool verbose = SvIVX(get_value(_options, "verbose")); Lexer *lexer = new Lexer(filename, verbose); RETVAL = lexer; } OUTPUT: RETVAL void DESTROY(self) Compiler_Lexer self CODE: { delete self; } AV * tokenize(self, script) Compiler_Lexer self const char *script CODE: { Tokens *tokens = self->tokenize((char *)script); AV* ret = new_Array(); size_t size = tokens->size(); for (size_t i = 0; i < size; i++) { Token *token = tokens->at(i); HV *hash = (HV*)new_Hash(); (void)hv_stores(hash, "stype", set(new_Int(token->stype))); (void)hv_stores(hash, "type", set(new_Int(token->info.type))); (void)hv_stores(hash, "kind", set(new_Int(token->info.kind))); (void)hv_stores(hash, "line", set(new_Int(token->finfo.start_line_num))); (void)hv_stores(hash, "has_warnings", set(new_Int(token->info.has_warnings))); (void)hv_stores(hash, "name", set(new_String(token->info.name, strlen(token->info.name)))); (void)hv_stores(hash, "data", set(new_String(token->_data, strlen(token->_data)))); HV *stash = (HV *)gv_stashpv("Compiler::Lexer::Token", sizeof("Compiler::Lexer::Token")); av_push(ret, set(sv_bless(new_Ref(hash), stash))); } self->clearContext(); RETVAL = ret; } OUTPUT: RETVAL AV * get_groups_by_syntax_level(self, tokens_, syntax_level) Compiler_Lexer self AV *tokens_ int syntax_level CODE: { int tokens_size = av_len(tokens_); if (tokens_size < 0) { RETVAL = NULL; return; } Tokens tks; for (int i = 0; i <= tokens_size; i++) { SV *token_ = (SV *)*av_fetch(tokens_, i, FALSE); if (sv_isa(token_, "Compiler::Lexer::Token")) { token_ = SvRV(token_); } HV *token = (HV *)token_; const char *name = SvPVX(get_value(token, "name")); const char *data = SvPVX(get_value(token, "data")); int line = SvIVX(get_value(token, "line")); int has_warnings = SvIVX(get_value(token, "has_warnings")); Enum::Token::Type::Type type = (Enum::Token::Type::Type)SvIVX(get_value(token, "type")); Enum::Token::Kind::Kind kind = (Enum::Token::Kind::Kind)SvIVX(get_value(token, "kind")); FileInfo finfo; finfo.start_line_num = line; finfo.end_line_num = line; finfo.filename = self->finfo.filename; TokenInfo info; info.type = type; info.kind = kind; info.name = name; info.data = data; info.has_warnings = has_warnings; Token *tk = new Token(std::string(data), finfo); tk->info = info; tk->type = type; tk->_data = data; tks.push_back(tk); } self->grouping(&tks); self->prepare(&tks); //self->dump(&tks); Token *root = self->parseSyntax(NULL, &tks); //self->dumpSyntax(root, 0); self->parseSpecificStmt(root); //self->dumpSyntax(root, 0); self->setIndent(root, 0); size_t block_id = 0; self->setBlockIDWithDepthFirst(root, &block_id); Tokens *stmts = self->getTokensBySyntaxLevel(root, (Enum::Parser::Syntax::Type)syntax_level); AV* ret = new_Array(); for (size_t i = 0; i < stmts->size(); i++) { Token *stmt = stmts->at(i); const char *src = stmt->deparse(); size_t len = strlen(src); HV *hash = (HV*)new_Hash(); (void)hv_stores(hash, "src", set(new_String(src, len))); (void)hv_stores(hash, "token_num", set(new_Int(stmt->total_token_num))); (void)hv_stores(hash, "indent", set(new_Int(stmt->finfo.indent))); (void)hv_stores(hash, "block_id", set(new_Int(stmt->finfo.block_id))); (void)hv_stores(hash, "start_line", set(new_Int(stmt->finfo.start_line_num))); (void)hv_stores(hash, "end_line", set(new_Int(stmt->finfo.end_line_num))); (void)hv_stores(hash, "has_warnings", set(new_Int(stmt->info.has_warnings))); av_push(ret, set(new_Ref(hash))); } RETVAL = ret; } OUTPUT: RETVAL AV * get_used_modules(self, script) Compiler_Lexer self const char *script CODE: { Tokens *tokens = self->tokenize((char *)script); self->grouping(tokens); self->prepare(tokens); Token *root = self->parseSyntax(NULL, tokens); Modules *modules = self->getUsedModules(root); AV* ret = new_Array(); for (size_t i = 0; i < modules->size(); i++) { Module *module = modules->at(i); const char *module_name = module->name; const char *module_args = module->args; size_t module_name_len = strlen(module_name); size_t module_args_len = (module_args) ? strlen(module_args) : 0; HV *hash = (HV*)new_Hash(); (void)hv_stores(hash, "name", set(new_String(module_name, module_name_len))); (void)hv_stores(hash, "args", set(new_String(module_args, module_args_len))); av_push(ret, set(new_Ref(hash))); } self->clearContext(); RETVAL = ret; } OUTPUT: RETVAL SV * deparse(filename, script) const char *filename const char *script CODE: { Lexer lexer(filename, false); Tokens *tokens = lexer.tokenize((char *)script); lexer.grouping(tokens); lexer.prepare(tokens); Token *root = lexer.parseSyntax(NULL, tokens); const char *src = root->deparse(); size_t len = strlen(src) + 1; size_t token_size = tokens->size(); RETVAL = newSVpv(src, len); } OUTPUT: RETVAL Compiler_annotator.cpp100644000765000024 2505413603257356 24061 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/lexer#include namespace TokenType = Enum::Token::Type; namespace TokenKind = Enum::Token::Kind; using namespace TokenType; using namespace std; Annotator::Annotator(void) { } #define ANNOTATE(method, data, info) do { \ method(ctx, data, tk, &info); \ if (info.type != Undefined) { \ tk->info = info; \ ctx->prev_type = info.type; \ return; \ } \ } while (0) void Annotator::annotate(LexContext *ctx, Token *tk) { // Ignore WhiteSpace tokens to annotate if (tk->info.type == WhiteSpace) { return; } if (tk->info.type != Undefined) { ctx->prev_type = tk->info.type; return; } TokenInfo info; info.type = Undefined; string data = string(tk->_data); ANNOTATE(annotateRegOpt, data, info); ANNOTATE(annotateNamespace, data, info); ANNOTATE(annotateMethod, data, info); ANNOTATE(annotateKey, data, info); ANNOTATE(annotateShortScalarDereference, data, info); ANNOTATE(annotateCallDecl, data, info); ANNOTATE(annotateHandleDelimiter, data, info); ANNOTATE(annotateReservedKeyword, data, info); ANNOTATE(annotateGlobOrMul, data, info); ANNOTATE(annotateNamelessFunction, data, info); ANNOTATE(annotateLocalVariable, data, info); ANNOTATE(annotateVariable, data, info); ANNOTATE(annotateGlobalVariable, data, info); ANNOTATE(annotateFunction, data, info); ANNOTATE(annotateCall, data, info); ANNOTATE(annotateClass, data, info); ANNOTATE(annotateModuleName, data, info); ANNOTATE(annotateBareWord, data, info); } bool Annotator::isRegexOption(const char *opt) { size_t len = strlen(opt); for (size_t i = 0; i < len; i++) { char ch = opt[i]; switch (ch) { case 'a': case 'c': case 'd': case 'e': case 'g': case 'i': case 'm': case 'l': case 'o': case 'p': case 'r': case 's': case 'u': case 'x': break; default: return false; break; } } return true; } void Annotator::annotateRegOpt(LexContext *ctx, const string &data, Token *tk, TokenInfo *info) { if (ctx->prev_type == RegDelim && isalpha(tk->_data[0]) && data != "or" && isRegexOption(data.c_str())) { *info = ctx->tmgr->getTokenInfo(RegOpt); } } void Annotator::annotateNamespace(LexContext *ctx, const string &data, Token *tk, TokenInfo *info) { Token *next_tk = ctx->tmgr->nextToken(tk); if (next_tk && next_tk->_data[0] == ':' && next_tk->_data[1] == ':' && next_tk->info.type != String && next_tk->info.type != RawString) { char data_front = tk->_data[0]; if (data_front == '$' || data_front == '@' || data_front == '%') { annotateLocalVariable(ctx, data, tk, info); if (info->type != Undefined) return; annotateVariable(ctx, data, tk, info); if (info->type != Undefined) return; annotateGlobalVariable(ctx, data, tk, info); if (info->type != Undefined) return; } else if (data_front > 0 && !isalnum(data_front) && data_front != '_') { return; } *info = ctx->tmgr->getTokenInfo(Namespace); } else if (ctx->prev_type == NamespaceResolver) { TokenInfo tk_info = ctx->tmgr->getTokenInfo(tk->_data); if (tk_info.kind == TokenKind::Symbol) return; *info = ctx->tmgr->getTokenInfo(Namespace); } } void Annotator::annotateMethod(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { if (ctx->prev_type == Pointer && (isalpha(tk->_data[0]) || tk->_data[0] == '_')) { *info = ctx->tmgr->getTokenInfo(Method); } } void Annotator::annotateKey(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { Token *prev_before_tk = ctx->tmgr->beforePreviousToken(tk); TokenType::Type prev_before_type = (prev_before_tk) ? prev_before_tk->info.type : Undefined; Token *next_tk = ctx->tmgr->nextToken(tk); if (prev_before_type != Function && ctx->prev_type == LeftBrace && next_tk && (isalpha(tk->_data[0]) || tk->_data[0] == '_') && next_tk->_data[0] == '}') { *info = ctx->tmgr->getTokenInfo(Key); } else if (next_tk && (isalpha(tk->_data[0]) || tk->_data[0] == '_') && (next_tk->_data[0] == '=' && next_tk->_data[1] == '>')) { *info = ctx->tmgr->getTokenInfo(Key); } else if (ctx->prev_type == ArraySize && (isalpha(tk->_data[0]) || tk->_data[0] == '_')) { *info = ctx->tmgr->getTokenInfo(Key); } } void Annotator::annotateShortScalarDereference(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { Token *next_tk = ctx->tmgr->nextToken(tk); if (next_tk && (tk->_data[0] == '$' && tk->_data[1] == '$') && (isalpha(next_tk->_data[0]) || next_tk->_data[0] == '_')) { *info = ctx->tmgr->getTokenInfo(ShortScalarDereference); } } void Annotator::annotateCallDecl(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { Token *prev_tk = ctx->tmgr->previousToken(tk); if (prev_tk && prev_tk->info.type == TokenType::Ref && tk->_data[0] == '&') { *info = ctx->tmgr->getTokenInfo(CallDecl); } else if (tk->_data[0] == '&') { *info = ctx->tmgr->getTokenInfo(BitAnd); } } void Annotator::annotateHandleDelimiter(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { if (tk->_data[0] != '<') return; Token *prev_tk = ctx->tmgr->previousToken(tk); TokenKind::Kind prev_kind = (prev_tk) ? prev_tk->info.kind : TokenKind::Undefined; TokenType::Type prev_type = (prev_tk) ? prev_tk->info.type : TokenType::Undefined; if (prev_type == SemiColon || prev_type == LeftParenthesis || prev_type == Comma || prev_kind == TokenKind::Assign || (prev_type != Inc && prev_type != Dec && prev_kind == TokenKind::Operator) || prev_kind == TokenKind::Decl) { *info = ctx->tmgr->getTokenInfo(HandleDelim); Token *handle_end_delimiter = ctx->tmgr->getTokenByBase(tk, 2); if (handle_end_delimiter && handle_end_delimiter->_data[0] == '>') { handle_end_delimiter->info = ctx->tmgr->getTokenInfo(HandleDelim); } } } void Annotator::annotateReservedKeyword(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { TokenInfo reserved_info = ctx->tmgr->getTokenInfo(tk->_data); TokenManager *tmgr = ctx->tmgr; Token *prev_tk = tmgr->previousToken(tk); if (reserved_info.type == IfStmt && prev_tk && prev_tk->info.type == UseDecl) { // For `if` statement which is used at `use` declaration. // It should be treated as a `UsedName` instead of `IfStmt`. // e.g. // use if $] < 5.009_005, 'MRO::Compat'; *info = tmgr->getTokenInfo(UsedName); return; } if (reserved_info.type != TokenType::Undefined && ctx->prev_type != FunctionDecl) { switch (ctx->prev_type) { /* ${m} or @{m} or %{m} or &{m} or $#{m} */ case ArrayDereference: case HashDereference: case ScalarDereference: case CodeDereference: case ArraySizeDereference: *info = tmgr->getTokenInfo(Key); break; case HandleDelim: { /* or */ Token *next_tk = ctx->tmgr->nextToken(tk); if (next_tk && next_tk->info.type == HandleDelim && (reserved_info.type == RegMatch || reserved_info.type == RegAllReplace)) { *info = tmgr->getTokenInfo(Key); break; } /* fallthrough */ } default: *info = reserved_info; break; } } } void Annotator::annotateGlobOrMul(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { if (tk->_data[0] != '*') return; Token *prev_tk = ctx->tmgr->previousToken(tk); TokenType::Type prev_type = (prev_tk) ? prev_tk->info.type : TokenType::Undefined; TokenKind::Kind prev_kind = (prev_tk) ? prev_tk->info.kind : TokenKind::Undefined; Token *next_tk = ctx->tmgr->nextToken(tk); if ((next_tk && next_tk->_data[0] == '=') || prev_type == SemiColon || prev_type == LeftParenthesis || prev_type == LeftBrace || prev_type == Comma || prev_type == ScalarDereference || prev_kind == TokenKind::Assign || (prev_type != Inc && prev_type != Dec && prev_kind == TokenKind::Operator) || prev_kind == TokenKind::Decl) { *info = ctx->tmgr->getTokenInfo(Glob); } else { *info = ctx->tmgr->getTokenInfo(Mul); } } void Annotator::annotateNamelessFunction(LexContext *ctx, const string &, Token *tk, TokenInfo *info) { if (ctx->prev_type == FunctionDecl && tk->_data[0] == '{') { *info = ctx->tmgr->getTokenInfo(tk->_data); } } void Annotator::annotateLocalVariable(LexContext *ctx, const string &data, Token *, TokenInfo *info) { if (ctx->prev_type == VarDecl && data.find("$") != string::npos) { *info = ctx->tmgr->getTokenInfo(LocalVar); vardecl_map.insert(StringMap::value_type(data, "")); } else if (ctx->prev_type == VarDecl && data.find("@") != string::npos) { *info = ctx->tmgr->getTokenInfo(LocalArrayVar); vardecl_map.insert(StringMap::value_type(data, "")); } else if (ctx->prev_type == VarDecl && data.find("%") != string::npos) { *info = ctx->tmgr->getTokenInfo(LocalHashVar); vardecl_map.insert(StringMap::value_type(data, "")); } } void Annotator::annotateVariable(LexContext *ctx, const string &data, Token *, TokenInfo *info) { if (vardecl_map.find(data) == vardecl_map.end()) return; if (data.find("@") != string::npos) { *info = ctx->tmgr->getTokenInfo(ArrayVar); } else if (data.find("%") != string::npos) { *info = ctx->tmgr->getTokenInfo(HashVar); } else { *info = ctx->tmgr->getTokenInfo(Var); } } void Annotator::annotateGlobalVariable(LexContext *ctx, const string &data, Token *, TokenInfo *info) { if (data.find("$") != string::npos) { *info = ctx->tmgr->getTokenInfo(GlobalVar); vardecl_map.insert(StringMap::value_type(data, "")); } else if (data.find("@") != string::npos) { *info = ctx->tmgr->getTokenInfo(GlobalArrayVar); vardecl_map.insert(StringMap::value_type(data, "")); } else if (data.find("%") != string::npos) { *info = ctx->tmgr->getTokenInfo(GlobalHashVar); vardecl_map.insert(StringMap::value_type(data, "")); } } void Annotator::annotateFunction(LexContext *ctx, const string &data, Token *, TokenInfo *info) { if (ctx->prev_type == FunctionDecl) { *info = ctx->tmgr->getTokenInfo(Function); funcdecl_map.insert(StringMap::value_type(data, "")); } } void Annotator::annotateCall(LexContext *ctx, const string &data, Token *, TokenInfo *info) { if (funcdecl_map.find(data) != funcdecl_map.end()) { *info = ctx->tmgr->getTokenInfo(Call); } } void Annotator::annotateClass(LexContext *ctx, const string &data, Token *, TokenInfo *info) { if (ctx->prev_type == Package) { *info = ctx->tmgr->getTokenInfo(Class); pkgdecl_map.insert(StringMap::value_type(data, "")); } else if (pkgdecl_map.find(data) != pkgdecl_map.end()) { *info = ctx->tmgr->getTokenInfo(Class); } } void Annotator::annotateModuleName(LexContext *ctx, const string &, Token *, TokenInfo *info) { if (ctx->prev_type == UseDecl) { *info = ctx->tmgr->getTokenInfo(UsedName); } else if (ctx->prev_type == RequireDecl) { *info = ctx->tmgr->getTokenInfo(RequiredName); } } void Annotator::annotateBareWord(LexContext *ctx, const string &, Token *, TokenInfo *info) { *info = ctx->tmgr->getTokenInfo(Key);//BareWord); info->has_warnings = true; } Compiler_lexer.cpp100644000765000024 5017713603257356 23177 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/lexer#include /* Declare Namespace */ using namespace std; namespace TokenType = Enum::Token::Type; namespace SyntaxType = Enum::Parser::Syntax; namespace TokenKind = Enum::Token::Kind; #define ITER_CAST(T, it) (T)*(it) #define EXTEND_BUFFER_SIZE (16) //This parameter is needed to correspond #53 Module::Module(const char *name_, const char *args_) : name(name_), args(args_) {} LexContext::LexContext(const char *filename, char *script, bool verbose) : progress(0), buffer_idx(0) { script_size = strlen(script) + 1; token_buffer = (char *)malloc((script_size + EXTEND_BUFFER_SIZE) * 2); buffer_head = token_buffer; token_buffer[0] = EOL; prev_type = TokenType::Undefined; smgr = new ScriptManager(script); tmgr = new TokenManager(script_size + EXTEND_BUFFER_SIZE, verbose); finfo.start_line_num = 1; finfo.filename = filename; } Lexer::Lexer(const char *filename, bool verbose) { this->filename = filename; this->verbose = verbose; } Lexer::~Lexer(void) { //free((void *)this->filename); } Tokens *Lexer::tokenize(char *script) { Scanner scanner; scanner.verbose = verbose; ctx = new LexContext(filename, script, verbose); Token *tk = NULL; TokenManager *tmgr = ctx->tmgr; ScriptManager *smgr = ctx->smgr; for (char ch; (ch = smgr->currentChar()) != EOL; smgr->idx++) { if (smgr->end()) break; if (ch == '\n') ctx->finfo.start_line_num++; if (scanner.isSkip(ctx)) { continue; } else { if (ctx->progress > 0) { smgr->idx += ctx->progress - 1; ctx->progress = 0; if (smgr->end()) break; // We should refetch after refresh the index. continue; } } switch (ch) { case '"': case '\'': case '`': tmgr->add(scanner.scanQuote(ctx, ch)); break; case ' ': case '\t': tmgr->add(scanner.scanWordDelimiter(ctx)); tmgr->add(scanner.scanWhiteSpace(ctx)); // For newline character break; case '#': tmgr->add(scanner.scanSingleLineComment(ctx)); break; case '-': if (scanner.scanNegativeNumber(ctx, smgr->nextChar())) { break; } else if (isalpha(smgr->nextChar())) { ctx->writeBuffer(smgr->currentChar()); break; } //fall through case '.': if (!ctx->existsBuffer() && '0' <= smgr->nextChar() && smgr->nextChar() <= '9') { // .01234 tmgr->add(scanner.scanNumber(ctx)); ctx->clearBuffer(); continue; } else if (scanner.isVersionString(ctx)) { tmgr->add(scanner.scanVersionString(ctx)); ctx->clearBuffer(); continue; } //fall through case '$': case '@': case '%': case '&': case '*': // all of the sigils if (scanner.isPostDeref(ctx)) { tk = scanner.scanPostDeref(ctx); tmgr->add(tk); } else { tmgr->add(scanner.scanSymbol(ctx)); } smgr->idx += ctx->progress; ctx->progress = 0; break; case '=': case '^': case '~': case ',': case ':': case ';': case '+': case '<': case '>': case '|': case '!': case '/': case '(': case ')': case '{': case '}': case '[': case ']': case '?': case '\\': tmgr->add(scanner.scanSymbol(ctx)); smgr->idx += ctx->progress; ctx->progress = 0; break; case 'x': { char next_ch = smgr->nextChar(); char after_next_ch = smgr->afterNextChar(); if (next_ch != '=' || ctx->existsBuffer() || after_next_ch == '=' || after_next_ch == '>' || after_next_ch == '~') { ctx->writeBuffer(ch); } else { tmgr->add(scanner.scanSymbol(ctx)); smgr->idx += ctx->progress; ctx->progress = 0; } break; } case '\n': tmgr->add(scanner.scanLineDelimiter(ctx)); tmgr->add(scanner.scanWhiteSpace(ctx)); // For newline character break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': if (!ctx->existsBuffer() || (ctx->buffer_idx == 1 && ctx->buffer()[0] == '-')) { tmgr->add(scanner.scanNumber(ctx)); ctx->clearBuffer(); continue; } default: { char ch = smgr->currentChar(); if (ch != '\n') ctx->writeBuffer(ch); break; } } } if (ctx->existsBuffer()) { tmgr->add(tmgr->new_Token(ctx->buffer(), ctx->finfo)); ctx->clearBuffer(); } annotateTokens(ctx, tmgr->tokens); return tmgr->tokens; } void Lexer::clearContext(void) { free(ctx->tmgr->head); free(ctx->buffer_head); delete ctx->tmgr->tokens; delete ctx->tmgr; delete ctx->smgr; delete ctx; ctx = NULL; } void Lexer::dump(Tokens *tokens) { TokenPos it = tokens->begin(); while (it != tokens->end()) { Token *t = ITER_CAST(Token *, it); fprintf(stdout, "[%-12s] : %12s \n", t->_data, t->info.name); it++; } } void Lexer::annotateTokens(LexContext *ctx, Tokens *tokens) { Annotator annotator; size_t size = tokens->size(); for (size_t i = 0; i < size; i++) { Token *tk = tokens->at(i); annotator.annotate(ctx, tk); } } void Lexer::grouping(Tokens *tokens) { using namespace TokenType; TokenPos pos = tokens->begin(); string ns = ""; Token *next_tk = NULL; while (pos != tokens->end()) { Token *tk = ITER_CAST(Token *, pos); if (!tk) break; switch (tk->info.type) { case Var: case GlobalVar: case GlobalHashVar: case Namespace: case Class: case CORE: { Token *ns_token = tk; TokenPos start_pos = pos+1; size_t move_count = 0; do { tk = ITER_CAST(Token *, pos); if (tk) ns += string(tk->_data); else break; pos++; move_count++; if (pos == tokens->end()) break; next_tk = ITER_CAST(Token *, pos); } while ((tk->info.type == NamespaceResolver && (next_tk && next_tk->info.kind != TokenKind::Symbol && next_tk->info.kind != TokenKind::StmtEnd)) || (next_tk && next_tk->info.type == NamespaceResolver)); TokenPos end_pos = pos; pos -= move_count; //if (ns_token->info.type == Namespace) asm("int3"); ns_token->_data = (new string(ns))->c_str(); ns_token->info.has_warnings = true; ns = ""; tokens->erase(start_pos, end_pos); break; } case ArraySize: { Token *as_token = tk; Token *next_tk = ITER_CAST(Token *, pos+1); TokenType::Type type = next_tk->info.type; if (type == Key || type == Var || type == GlobalVar) { string new_str = string(as_token->_data) + string(next_tk->_data); as_token->_data = (new string(new_str))->c_str(); tokens->erase(pos+1); } break; } case ShortScalarDereference: case ShortArrayDereference: case ShortHashDereference: case ShortCodeDereference: { Token *next_tk = ITER_CAST(Token *, pos+1); if (!next_tk) break; Token *sp_token = tk; string new_str = string(sp_token->_data) + string(next_tk->_data); sp_token->_data = (new string(new_str))->c_str(); tokens->erase(pos+1); break; } default: break; } pos++; } } void Lexer::prepare(Tokens *tokens) { head = tokens->begin(); pos = 0; start_pos = pos; TokenPos start_tk_pos = tokens->begin(); TokenPos it = tokens->begin(); TokenPos tag_pos = head + start_pos; while (it != tokens->end()) { Token *t = ITER_CAST(Token *, it); switch (t->info.type) { case TokenType::HereDocumentTag: case TokenType::HereDocumentRawTag: case TokenType::HereDocumentExecTag: case TokenType::HereDocumentBareTag: tag_pos = it; break; case TokenType::HereDocument: { assert(tag_pos != start_tk_pos && "ERROR!: nothing use HereDocumentTag"); Token *tag = ITER_CAST(Token *, tag_pos); switch (tag->info.type) { case TokenType::HereDocumentTag: case TokenType::HereDocumentBareTag: tag->info.type = Enum::Token::Type::RegDoubleQuote; tag->info.kind = Enum::Token::Kind::RegPrefix; tag->info.name = "RegDoubleQuote"; tag->info.data = "qq"; //tag->data = "qq{" + string(t->_data) + "}"; tag->_data = (new string("qq{" + string(t->_data) + "}"))->c_str(); break; case TokenType::HereDocumentRawTag: tag->info.type = Enum::Token::Type::RegQuote; tag->info.kind = Enum::Token::Kind::RegPrefix; tag->info.name = "RegQuote"; tag->info.data = "q"; //tag->data = "q{" + string(t->_data) + "}"; tag->_data = (new string("q{" + string(t->_data) + "}"))->c_str(); break; case TokenType::HereDocumentExecTag: tag->info.type = Enum::Token::Type::RegExec; tag->info.kind = Enum::Token::Kind::RegPrefix; tag->info.name = "RegExec"; tag->info.data = "qx"; //tag->data = "qx{" + string(t->_data) + "}"; tag->_data = (new string("qx{" + string(t->_data) + "}"))->c_str(); break; default: break; } tokens->erase(tag_pos-1); tokens->erase(it-1); it--; continue; break; } case TokenType::HereDocumentEnd: tokens->erase(it); continue; break; default: break; } it++; } } bool Lexer::isExpr(Token *tk, Token *prev_tk, TokenType::Type type, TokenKind::Kind kind) { using namespace TokenType; assert(tk->tks[0]->info.type == LeftBrace); if (tk->token_num > 1 && tk->tks[1]->info.type == RightBrace) { return true; } else if (tk->token_num > 3 && ( tk->tks[1]->info.type == Key || tk->tks[1]->info.type == String || tk->tks[1]->info.type == Int || tk->tks[1]->info.type == Double) && (tk->tks[2]->info.type == Arrow || tk->tks[2]->info.type == Comma)) { /* { [key|"key"|int|double] [,|=>] value ... */ return true; } else if (type == Pointer || (type == Mul || type == Glob) || kind == TokenKind::Term || kind == TokenKind::Function ||/* type == FunctionDecl ||*/ ((prev_tk && prev_tk->stype == SyntaxType::Expr) && (type == RightBrace || type == RightBracket))) { /* ->{ or $hash{ or map { or {key}{ or [idx]{ */ return true; } return false; } Token *Lexer::parseSyntax(Token *start_token, Tokens *tokens) { using namespace TokenType; Type prev_type = Undefined; TokenKind::Kind prev_kind = TokenKind::Undefined; size_t end_pos = tokens->size(); Tokens *new_tokens = new Tokens(); size_t intermediate_pos = pos; Token *prev_syntax = NULL; if (start_token) { new_tokens->push_back(start_token); intermediate_pos--; } start_pos = pos; for (; pos < end_pos; pos++) { Token *t = ITER_CAST(Token *, head + pos); Type type = t->info.type; TokenKind::Kind kind = t->info.kind; switch (type) { case LeftBracket: case LeftParenthesis: case ArrayDereference: case HashDereference: case ScalarDereference: case ArraySizeDereference: { // Syntax error, It didn't close the brackets. if (pos + 1 >= end_pos) { /* Maybe we should use croak? */ fprintf(stderr, "ERROR!!: It didn't close the brackets. near %s:%lu\n", t->finfo.filename, t->finfo.start_line_num ); exit(EXIT_FAILURE); } pos++; Token *syntax = parseSyntax(t, tokens); syntax->stype = SyntaxType::Expr; new_tokens->push_back(syntax); prev_syntax = syntax; break; } case LeftBrace: { // Syntax error, It didn't close the brackets. if (pos + 1 >= end_pos) { /* Maybe we should use croak? */ fprintf(stderr, "ERROR!!: It didn't close the brace. near %s:%lu\n", t->finfo.filename, t->finfo.start_line_num ); exit(EXIT_FAILURE); } Token *prev = pos > 0 ? ITER_CAST(Token *, head + (pos - 1)) : NULL; prev_type = (prev) ? prev->info.type : Undefined; pos++; Token *syntax = parseSyntax(t, tokens); if (isExpr(syntax, prev_syntax, prev_type, prev_kind)) { syntax->stype = SyntaxType::Expr; } else if (prev_type == FunctionDecl) { /* LeftBrace is Expr but assign stype of BlockStmt */ syntax->stype = SyntaxType::BlockStmt; } else if (prev_kind == TokenKind::Do) { syntax->stype = SyntaxType::BlockStmt; } else { syntax->stype = SyntaxType::BlockStmt; if (pos + 1 < end_pos) { Token *next_tk = ITER_CAST(Token *, head + (pos + 1)); if (next_tk && next_tk->info.type != SemiColon) { intermediate_pos = pos; } } } new_tokens->push_back(syntax); prev_syntax = syntax; break; } case RightBrace: case RightBracket: case RightParenthesis: new_tokens->push_back(t); return new Token(new_tokens); break; /* not reached this stmt */ case SemiColon: { size_t k = pos - intermediate_pos; Token *intermediate_tk = ITER_CAST(Token *, head + intermediate_pos); if (start_pos == intermediate_pos && intermediate_tk->info.type != LeftBrace) { k++; } Tokens *stmt = new Tokens(); for (size_t j = 0; j < k - 1; j++) { Token *tk = new_tokens->back(); j += (tk->total_token_num > 0) ? tk->total_token_num - 1 : 0; stmt->insert(stmt->begin(), tk); new_tokens->pop_back(); } stmt->push_back(t); Token *stmt_ = new Token(stmt); stmt_->stype = SyntaxType::Stmt; new_tokens->push_back(stmt_); intermediate_pos = pos; prev_syntax = stmt_; break; } default: new_tokens->push_back(t); prev_syntax = NULL; break; } prev_kind = kind; prev_type = type; // We should prevent to increment pos over the end_pos } return new Token(new_tokens); } void Lexer::insertStmt(Token *syntax, int idx, size_t grouping_num) { size_t tk_n = syntax->token_num; Token **tks = syntax->tks; Token *tk = tks[idx]; Tokens *stmt = new Tokens(); stmt->push_back(tk); for (size_t i = 1; i < grouping_num; i++) { stmt->push_back(tks[idx+i]); } Token *stmt_ = new Token(stmt); stmt_->stype = SyntaxType::Stmt; tks[idx] = stmt_; if (tk_n == idx+grouping_num) { for (size_t i = 1; i < grouping_num; i++) { syntax->tks[idx+i] = NULL; } } else { memmove(syntax->tks+(idx+1), syntax->tks+(idx+grouping_num), sizeof(Token *) * (tk_n - (idx+grouping_num))); for (size_t i = 1; i < grouping_num; i++) { syntax->tks[tk_n-i] = NULL; } } syntax->token_num -= (grouping_num - 1); } void Lexer::parseSpecificStmt(Token *syntax) { using namespace TokenType; size_t tk_n = syntax->token_num; for (size_t i = 0; i < tk_n; i++) { Token **tks = syntax->tks; Token *tk = tks[i]; switch (tk->info.type) { case IfStmt: case ElsifStmt: case ForeachStmt: case ForStmt: case WhileStmt: case UnlessStmt: case GivenStmt: case UntilStmt: case WhenStmt: { if (tk_n > i+2 && tks[i+1]->stype == SyntaxType::Expr && tks[i+2]->stype == SyntaxType::BlockStmt) { /* if Expr BlockStmt */ Token *expr = tks[i+1]; if (expr->token_num > 3 && tk->info.type == ForStmt && expr->tks[1]->stype == SyntaxType::Stmt && expr->tks[2]->stype == SyntaxType::Stmt && expr->tks[3]->stype != SyntaxType::Stmt && expr->tks[3]->info.type != RightParenthesis) { insertStmt(expr, 3, expr->token_num - 4); } insertStmt(syntax, i, 3); tk_n -= 2; parseSpecificStmt(tks[i]->tks[2]); //i += 2; } else if ((tk->info.type == ForStmt || tk->info.type == ForeachStmt) && tk_n > i+3 && tks[i+1]->stype != SyntaxType::Expr) { /* for(each) [decl] Term Expr BlockStmt */ if (tk_n > i+3 && tks[i+1]->info.kind == TokenKind::Term && tks[i+2]->stype == SyntaxType::Expr && tks[i+3]->stype == SyntaxType::BlockStmt) { insertStmt(syntax, i, 4); tk_n -= 3; parseSpecificStmt(tks[i]->tks[3]); //i += 3; } else if (tk_n > i+4 && tks[i+1]->info.kind == TokenKind::Decl && tks[i+2]->info.kind == TokenKind::Term && tks[i+3]->stype == SyntaxType::Expr && tks[i+4]->stype == SyntaxType::BlockStmt) { insertStmt(syntax, i, 5); tk_n -= 4; parseSpecificStmt(tks[i]->tks[4]); //i += 4; } else { //fprintf(stderr, "Syntax Error!: near by line[%lu]\n", tk->finfo.start_line_num); //exit(EXIT_FAILURE); } } break; } case ElseStmt: case Do: case Continue: case DefaultStmt: if (tk_n > i+1 && tks[i+1]->stype == SyntaxType::BlockStmt) { /* else BlockStmt */ insertStmt(syntax, i, 2); tk_n -= 1; parseSpecificStmt(tks[i]->tks[1]); //i += 1; } break; case FunctionDecl: if (tk_n > i+1 && tks[i+1]->stype == SyntaxType::BlockStmt) { /* sub BlockStmt */ insertStmt(syntax, i, 2); tk_n -= 1; parseSpecificStmt(tks[i]->tks[1]); } else if (tk_n > i+2 && tks[i+1]->info.type == Function && tks[i+2]->stype == SyntaxType::BlockStmt) { /* sub func BlockStmt */ insertStmt(syntax, i, 3); tk_n -= 2; parseSpecificStmt(tks[i]->tks[2]); } else if (tk_n > i+3 && tks[i+1]->info.type == Function && tks[i+2]->stype == SyntaxType::Expr && tks[i+3]->stype == SyntaxType::BlockStmt) { /* sub func Expr BlockStmt */ insertStmt(syntax, i, 4); tk_n -= 3; parseSpecificStmt(tks[i]->tks[3]); } break; default: if (tk->stype == SyntaxType::BlockStmt) { if (i > 0 && (tks[i-1]->stype == SyntaxType::Stmt || tks[i-1]->stype == SyntaxType::BlockStmt)) { /* nameless block */ insertStmt(syntax, i, 1); } parseSpecificStmt(tk); } else if (tk->stype == SyntaxType::Stmt || tk->stype == SyntaxType::Expr) { parseSpecificStmt(tk); } break; } } } void Lexer::setIndent(Token *syntax, int indent) { using namespace SyntaxType; size_t tk_n = syntax->token_num; for (size_t i = 0; i < tk_n; i++) { Token *tk = syntax->tks[i]; switch (tk->stype) { case BlockStmt: tk->finfo.indent = ++indent; setIndent(tk, indent); if (indent == 0) { fprintf(stderr, "ERROR!!: syntax error near %s:%lu\n", tk->finfo.filename, tk->finfo.start_line_num); exit(EXIT_FAILURE); } indent--; break; case Expr: case Stmt: tk->finfo.indent = indent; setIndent(tk, indent); break; default: syntax->tks[i]->finfo.indent = indent; break; } } } void Lexer::setBlockIDWithBreadthFirst(Token *syntax, size_t base_id) { using namespace SyntaxType; size_t tk_n = syntax->token_num; size_t block_num = 0; for (size_t i = 0; i < tk_n; i++) { Token *tk = syntax->tks[i]; if (tk->stype == BlockStmt) block_num++; } size_t total_block_num = block_num; block_num = 0; for (size_t i = 0; i < tk_n; i++) { Token *tk = syntax->tks[i]; switch (tk->stype) { case BlockStmt: setBlockIDWithBreadthFirst(tk, base_id + total_block_num + 1); block_num++; break; case Expr: case Stmt: setBlockIDWithBreadthFirst(tk, base_id + block_num); break; default: syntax->tks[i]->finfo.block_id = base_id + block_num; break; } } } void Lexer::setBlockIDWithDepthFirst(Token *syntax, size_t *block_id) { using namespace SyntaxType; size_t tk_n = syntax->token_num; size_t base_id = *block_id; for (size_t i = 0; i < tk_n; i++) { Token *tk = syntax->tks[i]; switch (tk->stype) { case BlockStmt: *block_id += 1; syntax->tks[i]->finfo.block_id = *block_id; setBlockIDWithDepthFirst(tk, block_id); break; case Expr: case Stmt: syntax->tks[i]->finfo.block_id = base_id; setBlockIDWithDepthFirst(tk, block_id); break; default: syntax->tks[i]->finfo.block_id = base_id; break; } } } void Lexer::dumpSyntax(Token *syntax, int indent) { using namespace SyntaxType; size_t tk_n = syntax->token_num; for (size_t i = 0; i < tk_n; i++) { Token *tk = syntax->tks[i]; for (int j = 0; j < indent; j++) { fprintf(stdout, "----------------"); } switch (tk->stype) { case Term: fprintf(stdout, "Term |\n"); dumpSyntax(tk, ++indent); indent--; break; case Expr: fprintf(stdout, "Expr |\n"); dumpSyntax(tk, ++indent); indent--; break; case Stmt: fprintf(stdout, "Stmt |\n"); dumpSyntax(tk, ++indent); indent--; break; case BlockStmt: fprintf(stdout, "BlockStmt |\n"); dumpSyntax(tk, ++indent); indent--; break; default: fprintf(stdout, "%-12s\n", syntax->tks[i]->info.name); break; } } } Tokens *Lexer::getTokensBySyntaxLevel(Token *root, SyntaxType::Type type) { Tokens *ret = new Tokens(); for (size_t i = 0; i < root->token_num; i++) { Token **tks = root->tks; if (tks[i]->stype == type) { ret->push_back(tks[i]); } if (tks[i]->token_num > 0) { Tokens *new_tks = getTokensBySyntaxLevel(tks[i], type); ret->insert(ret->end(), new_tks->begin(), new_tks->end()); } } return ret; } Modules *Lexer::getUsedModules(Token *root) { using namespace TokenType; Modules *ret = new Modules(); for (size_t i = 0; i < root->token_num; i++) { Token **tks = root->tks; if (tks[i]->info.type == UseDecl && i + 1 < root->token_num) { const char *module_name = tks[i+1]->_data; string args; for (i += 2; i < root->token_num && tks[i]->info.type != SemiColon; i++) { args += " " + string(tks[i]->deparse()); } ret->push_back(new Module(module_name, (new string(args))->c_str())); } if (i < root->token_num && tks[i]->token_num > 0) { Modules *new_mds = getUsedModules(tks[i]); ret->insert(ret->end(), new_mds->begin(), new_mds->end()); } } return ret; } Compiler_manager.cpp100644000765000024 1035513603257356 23464 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/lexer#include namespace TokenType = Enum::Token::Type; TokenManager::TokenManager(size_t script_size, bool verbose) : max_token_size(0), idx(0) { size_t token_size = sizeof(Token); tokens = new Tokens(); pool = (TokenPool *)calloc(script_size, token_size); head = pool; undefined_info = getTokenInfo(TokenType::Undefined); this->verbose = verbose; } Token *TokenManager::at(size_t i) { return head + i; } Token *TokenManager::nextToken(Token *tk) { if (!verbose) { return (tk + 1 < pool) ? tk + 1 : NULL; } Token *next_tk = (tk + 1 < pool) ? tk + 1 : NULL; /* refetch is necessary when verbose mode */ while (next_tk != NULL && next_tk->info.type == TokenType::WhiteSpace) { next_tk = (next_tk + 1 < pool) ? next_tk + 1 : NULL; } return next_tk; } Token *TokenManager::previousToken(Token *tk) { if (!verbose) { return (tk != head) ? tk - 1 : NULL; } Token *prev_tk = (tk != head) ? tk - 1 : NULL; /* refetch is necessary when verbose mode */ while (prev_tk != NULL && prev_tk->info.type == TokenType::WhiteSpace) { prev_tk = (prev_tk != head) ? prev_tk - 1 : NULL; } return prev_tk; } Token *TokenManager::beforePreviousToken(Token *tk) { if (!verbose) { return (tk != head && (tk-1) != head) ? tk - 2 : NULL; } Token *prev_tk = (tk != head) ? tk - 1 : NULL; while (prev_tk != NULL && prev_tk->info.type == TokenType::WhiteSpace) { prev_tk = (prev_tk != head) ? prev_tk - 1 : NULL; } Token *before_prev_tk = (prev_tk != head) ? prev_tk - 1 : NULL; while (before_prev_tk != NULL && before_prev_tk->info.type == TokenType::WhiteSpace) { before_prev_tk = (before_prev_tk != head) ? before_prev_tk - 1 : NULL; } return before_prev_tk; } Token *TokenManager::lastToken(void) { return (head != pool) ? pool-1 : NULL; } Token *TokenManager::beforeLastToken(void) { return (head + 2 <= pool) ? pool-2 : NULL; } size_t TokenManager::size(void) { return (pool - head); } void TokenManager::dump(void) { size_t size = pool - head; for (size_t i = 0; i < size; i++) { Token *tk = (head + i); fprintf(stdout, "[%-12s] : %12s \n", tk->_data, tk->info.name); } } Token *TokenManager::getTokenByBase(Token *base, int offset) { Tokens *tks = this->tokens; size_t size = tks->size(); int wanted_idx = -1; for (size_t i = 0; i < size; i++) { if (tks->at(i) == base) { wanted_idx = i + offset; } } return (0 <= wanted_idx && (size_t)wanted_idx < size) ? tks->at(wanted_idx) : NULL; } Token *TokenManager::getTokenByIdx(size_t idx) { size_t size = tokens->size(); return (idx < size) ? tokens->at(idx) : NULL; } Token *TokenManager::beforePreviousToken(void) { size_t current_idx = this->idx; size_t size = tokens->size(); int wanted_idx = current_idx - 2; return (0 <= wanted_idx && (size_t)wanted_idx < size) ? this->beforePreviousToken(tokens->at(current_idx)) : NULL; } Token *TokenManager::previousToken(void) { size_t current_idx = this->idx; size_t size = tokens->size(); int wanted_idx = current_idx - 1; return (0 <= wanted_idx && (size_t)wanted_idx < size) ? this->previousToken(tokens->at(current_idx)) : NULL; } Token *TokenManager::currentToken(void) { size_t current_idx = this->idx; size_t size = tokens->size(); return (current_idx < size) ? tokens->at(current_idx) : NULL; } Token *TokenManager::nextToken(void) { size_t current_idx = this->idx; size_t size = tokens->size(); int wanted_idx = current_idx + 1; return (0 <= wanted_idx && (size_t)wanted_idx < size) ? this->nextToken(tokens->at(current_idx)) : NULL; } Token *TokenManager::next(void) { this->idx++; return currentToken(); } bool TokenManager::end(void) { return (idx >= tokens->size()) ? true : false; } void TokenManager::remove(size_t idx) { this->tokens->erase(this->tokens->begin() + idx); } Token *TokenManager::back(void) { this->idx--; return currentToken(); } ScriptManager::ScriptManager(char *script) : _script(script), raw_script(script), idx(0) { script_size = strlen(script) + 1; } bool ScriptManager::compare(int start, int len, std::string target) { size_t current_idx = this->idx; int s = current_idx + start; int e = s + len; if (0 <= s && (size_t)e < script_size) { char buffer[len + 1]; memset(buffer, 0, len + 1); memcpy(buffer, raw_script + s, len); return std::string(buffer) == target; } return false; } Compiler_scanner.cpp100644000765000024 10755713603257356 23536 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/lexer#include using namespace std; namespace TokenType = Enum::Token::Type; namespace SyntaxType = Enum::Parser::Syntax; namespace TokenKind = Enum::Token::Kind; Scanner::Scanner() : isStringStarted(false), isRegexStarted(false), isPrototypeStarted(false), isFormatStarted(false), formatDeclaredToken(NULL), commentFlag(false), skipFlag(false), regex_delim(0), regex_middle_delim(0), brace_count_inner_regex(0), bracket_count_inner_regex(0), cury_brace_count_inner_regex(0) { const char *regex_prefixes[] = { "q", "qq", "qw", "qx", "qr", "m", NULL }; const char *regex_replaces[] = { "s", "y", "tr", NULL }; const char *enable_regex_argument_funcs[] = { "map", "grep", "split", NULL }; const char *operators[] = { "<=>", "**=", "//=", "||=", "&&=", "...", "$#{", "$^A", "$^D", "$^E", "$^F", "$^G", "$^H", "$^I", "$^L", "$^M", "$^O", "$^P", "$^R", "$^T", "$^W", "$^X", "<=", ">=", ".=", "!=", "==", "+=", "-=", "*=", "%=", "|=", "&=", "^=", "<<", ">>", "++", "--", "**", "//", "&&", "||", "::", "..", "=>", "->", "@{", "%{", "${", "@$", "%$", "%-", "%+", "@-", "@+", "&$", "$#", "<>", "!~", "~~", "=~", "$0", "$1", "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$&", "$`", "$'", "$+", "$.", "$/", "$|", "$,", "$\\", "$\"", "$%", "$=", "$-", "$~", "$^", "$*", "$:", "$;", "$?", "$!", "$@", /*"$$",*/ "$<", "$>", "$(", "$)", "$[", "$]", NULL }; const char *dereference_prefixes[] = { "@{", "%{", "${", "&{", "$#{", NULL }; for (size_t i = 0; regex_prefixes[i] != NULL; i++) { regex_prefix_map.insert(StringMap::value_type(regex_prefixes[i], "")); } for (size_t i = 0; regex_replaces[i] != NULL; i++) { enable_regex_argument_func_map.insert(StringMap::value_type(enable_regex_argument_funcs[i], "")); regex_replace_map.insert(StringMap::value_type(regex_replaces[i], "")); } for (size_t i = 0; operators[i] != NULL; i++) { operator_map.insert(StringMap::value_type(operators[i], "")); } for (size_t i = 0; dereference_prefixes[i] != NULL; i++) { dereference_prefix_map.insert(StringMap::value_type(dereference_prefixes[i], "")); } } Token *Scanner::scanQuote(LexContext *ctx, char quote) { TokenManager *tmgr = ctx->tmgr; ScriptManager *smgr = ctx->smgr; char prev_ch = smgr->previousChar(); Token *prev_token = tmgr->lastToken(); if (prev_token && prev_token->info.type == TokenType::RegExp) { return scanSymbol(ctx); } if (isalnum(prev_ch) || prev_ch == '_') { char *token = ctx->buffer(); TokenInfo info = tmgr->getTokenInfo(token); char cur_ch = smgr->currentChar(); if (cur_ch == '\'' && info.type == TokenType::Undefined) { Token *namespace_tk = tmgr->new_Token(token, ctx->finfo); namespace_tk->info = tmgr->getTokenInfo(TokenType::Namespace); tmgr->add(namespace_tk); ctx->clearBuffer(); ctx->writeBuffer(cur_ch); Token *namespace_resolver = tmgr->new_Token(ctx->buffer(), ctx->finfo); namespace_resolver->info = tmgr->getTokenInfo(TokenType::NamespaceResolver); ctx->clearBuffer(); return namespace_resolver; } else if (info.kind == TokenKind::RegPrefix || info.kind == TokenKind::RegReplacePrefix) { Token *tk = tmgr->new_Token(token, ctx->finfo); tk->info = info; tmgr->add(tk); ctx->clearBuffer(); return scanSymbol(ctx); } else { Token *tk = tmgr->new_Token(token, ctx->finfo); tk->info = info; tmgr->add(tk); ctx->clearBuffer(); } } for (smgr->next(); !smgr->end(); smgr->next()) { char ch = smgr->currentChar(); if (ch == '\n') { ctx->writeBuffer(ch); ctx->finfo.start_line_num++; continue; } else if (ch == quote) { char prev_ch = smgr->previousChar(); char before_prev_ch = smgr->beforePreviousChar(); if ((prev_ch == '\\' && before_prev_ch == '\\') || prev_ch != '\\') break; ctx->writeBuffer(ch); } else { ctx->writeBuffer(ch); } } if (smgr->end()) smgr->back(); Token *prev_tk = ctx->tmgr->lastToken(); int idx = ctx->tmgr->size() - 2; string prev_data = (prev_tk) ? string(prev_tk->_data) : ""; string before_prev_data = (idx >= 0) ? string(ctx->tmgr->beforeLastToken()->_data) : ""; char *token = ctx->buffer(); Token *ret = ctx->tmgr->new_Token(token, ctx->finfo); switch (quote) { case '\'': ret->info = tmgr->getTokenInfo(TokenType::RawString); break; case '"': ret->info = tmgr->getTokenInfo(TokenType::String); break; case '`': ret->info = tmgr->getTokenInfo(TokenType::ExecString); break; default: break; } ctx->clearBuffer(); if (prev_data == "<<" || (before_prev_data == "<<" && prev_data == "\\")) { /* String is HereDocument */ std::string here_document_tag = string(ret->_data); here_document_tag_tk = ret; if (here_document_tag == "") { here_document_tag = "\n"; here_document_tag_tk->_data = "\n"; } here_document_tags.push(here_document_tag); switch (quote) { case '\'': ret->info = tmgr->getTokenInfo(TokenType::HereDocumentRawTag); break; case '"': ret->info = tmgr->getTokenInfo(TokenType::HereDocumentTag); break; case '`': ret->info = tmgr->getTokenInfo(TokenType::HereDocumentExecTag); break; default: break; } } return ret; } Token *Scanner::scanRegQuote(LexContext *ctx, char delim) { TokenManager *tmgr = ctx->tmgr; ScriptManager *smgr = ctx->smgr; bool will_expand = delim == '}'; int brace_count_inner_quote = 0; for (; !smgr->end(); smgr->next()) { char ch = smgr->currentChar(); if (ch == '\n') { ctx->writeBuffer(ch); ctx->finfo.start_line_num++; } else if (brace_count_inner_quote == 0 && ch == delim) { break; } else { if (will_expand) { if (ch == '{') brace_count_inner_quote++; else if (ch == '}') brace_count_inner_quote--; } ctx->writeBuffer(ch); } } if (smgr->end()) smgr->back(); char *token = ctx->buffer(); Token *ret = tmgr->new_Token(token, ctx->finfo); ret->info = tmgr->getTokenInfo(TokenType::RegExp); ctx->clearBuffer(); return ret; } bool Scanner::scanNegativeNumber(LexContext *ctx, char number) { char num_buffer[2] = {0}; if (number != EOL) { num_buffer[0] = number; if (atoi(num_buffer) > 0 || number == '0') { if (ctx->existsBuffer()) { ctx->tmgr->add(ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo)); ctx->clearBuffer(); //sub operator ctx->writeBuffer('-'); Token *sub_operator = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); sub_operator->info = ctx->tmgr->getTokenInfo(TokenType::Sub); ctx->clearBuffer(); ctx->tmgr->add(sub_operator); } else { //negative number ctx->writeBuffer('-'); } return true; } } return false; } bool Scanner::isRegexStartDelim(LexContext *ctx, const StringMap &map) { /* exclude { m } or { m => ... } or { m, ... } or *m or //m */ string prev_data = string(ctx->buffer()); //... [more_before_prev_token] [before_prev_token] [prev_token] [symbol] ... if (map.find(prev_data) == map.end()) return false; Token *before_prev_token = ctx->tmgr->lastToken(); string before_prev_data = (before_prev_token) ? string(before_prev_token->_data) : ""; TokenType::Type before_prev_type = (before_prev_token) ? before_prev_token->info.type : TokenType::Undefined; TokenKind::Kind before_prev_kind = (before_prev_token) ? before_prev_token->info.kind : TokenKind::Undefined; char symbol = ctx->smgr->currentChar(); if (before_prev_type == TokenType::RegDelim) return false; /* regex option */ if (before_prev_data == "*") return false; /* glob */ if (before_prev_data == "&") return false; /* function call */ if (before_prev_data == "::") return false; /* method call */ /* ${m} or @{m} or %{m} or &{m} or $#{m} or $Var{m} */ if (symbol == '}') { Token *more_before_prev_token = ctx->tmgr->beforeLastToken(); if (more_before_prev_token && more_before_prev_token->_data[0] == '$') { return false; } /* it will return true if before_prev_data is not dereference */ return dereference_prefix_map.find(before_prev_data) == dereference_prefix_map.end(); } if (symbol == '=' || symbol == ')' || symbol == '>') return false; if (before_prev_kind == TokenKind::Modifier) return false; /* dereference */ return true; } bool Scanner::isRegexEndDelim(LexContext *ctx) { Token *token = ctx->tmgr->lastToken(); TokenType::Type type = (token) ? token->info.type : TokenType::Undefined; if (isRegexStarted) return true; if (type == TokenType::RegExp) return true; if (type == TokenType::RegReplaceTo) return true; return false; } char Scanner::getRegexDelim(LexContext *ctx) { char ret = EOL; char symbol = ctx->smgr->currentChar(); switch (symbol) { case '{': ret = '}'; brace_count_inner_regex++; break; case '(': ret = ')'; cury_brace_count_inner_regex++; break; case '[': ret = ']'; bracket_count_inner_regex++; break; case '<': ret = '>'; break; default: ret = symbol; break; } return ret; } bool Scanner::isPrototype(LexContext *ctx) { Token *prev_token = ctx->tmgr->lastToken(); string prev_data = (prev_token) ? string(prev_token->_data) : ""; int idx = ctx->tmgr->size() - 2; string before_prev_data = (idx >= 0) ? string(ctx->tmgr->beforeLastToken()->_data) : ""; char symbol = ctx->smgr->currentChar(); if (symbol != '(') return false; if (prev_data == "sub") return true; if (prev_data != "{" && before_prev_data == "sub") return true; return false; } bool Scanner::isHereDocument(LexContext *ctx, Token *tk) { int idx = ctx->tmgr->size() - 2; string prev_tk_data = (idx >= 0) ? string(ctx->tmgr->beforeLastToken()->_data) : ""; string tk_data = (tk) ? string(tk->_data) : ""; char *token = ctx->buffer(); if ((tk_data == "<<" || (prev_tk_data == "<<" && tk_data == "\\")) && strtod(token, NULL) == 0 && string(token) != "0" && (isupper(token[0]) || islower(token[0]) || token[0] == '_')) { return true; } return false; } bool Scanner::isFormat(LexContext *, Token *tk) { return (string(tk->_data) == "format") ? true : false; } bool Scanner::isRegexDelim(LexContext *ctx, Token *prev_token, char symbol) { const char *prev_data = (prev_token) ? prev_token->_data : ""; /* [^0-9] && !"0" && !CONST && !{hash} && ![array] && !func() && !$var */ string prev_tk = string(prev_data); if (regex_delim == 0 && prev_token && prev_token->info.type == TokenType::Undefined && (symbol != '-' && symbol != '=' && symbol != ',' && symbol != ')') && regex_prefix_map.find(prev_tk) != regex_prefix_map.end()) { /* ${m} or @{m} or %{m} or &{m} or $#{m} or $Var{m} */ if (symbol == '}') { /* more back */ prev_token = ctx->tmgr->previousToken(prev_token); prev_tk = string((prev_token) ? prev_token->_data : ""); Token *more_prev_tk = ctx->tmgr->previousToken(prev_token); if (more_prev_tk && more_prev_tk->_data[0] == '$') { return false; } /* it will return true if before_prev_data is not dereference */ return dereference_prefix_map.find(prev_tk) == dereference_prefix_map.end(); } return true; } else if (regex_delim == 0 && prev_token && (prev_token->info.kind == TokenKind::RegPrefix || prev_token->info.kind == TokenKind::RegReplacePrefix)) { return true; } TokenType::Type prev_type = (prev_token) ? prev_token->info.type : TokenType::Undefined; if (prev_type == TokenType::RawString || prev_type == TokenType::String || prev_type == TokenType::ExecString) return false; if (symbol != '/') return false; if (!prev_token) return true; if (symbol == '/' && (prev_tk == "xor" || prev_tk == "and" || prev_tk == "not" || prev_tk == "or")) return true; if (strtod(prev_data, NULL)) return false; if (prev_tk == "0") return false; if (enable_regex_argument_func_map.find(prev_tk) != enable_regex_argument_func_map.end()) return true; if (!isupper(prev_data[0]) && prev_data[0] != '_' && prev_data[0] != '}' && prev_data[0] != ']' && prev_data[0] != ')' && prev_data[0] != '$' && prev_data[0] != '@' && prev_data[0] != '%') { if (isalpha(prev_data[0]) && prev_tk != "if" && prev_tk != "unless" && prev_tk != "ok") return false; return true; } return false; } Token *Scanner::scanPrevSymbol(LexContext *ctx, char ) { char *token = ctx->buffer(); TokenManager *tmgr = ctx->tmgr; Token *ret = NULL; Token *prev_tk = ctx->tmgr->lastToken(); bool isPointer = (prev_tk && prev_tk->info.type == TokenType::Pointer) ? true : false; if (!isPointer && isRegexStartDelim(ctx, regex_prefix_map)) { //RegexPrefix ret = ctx->tmgr->new_Token(token, ctx->finfo); ret->info = tmgr->getTokenInfo(token); regex_delim = getRegexDelim(ctx); isRegexStarted = true; skipFlag = true; } else if (!isPointer && isRegexStartDelim(ctx, regex_replace_map)) { //ReplaceRegexPrefix ret = ctx->tmgr->new_Token(token, ctx->finfo); ret->info = tmgr->getTokenInfo(token); char delim = getRegexDelim(ctx); regex_delim = delim; regex_middle_delim = delim; isRegexStarted = true; skipFlag = true; } else if (isPrototype(ctx)) { ret = ctx->tmgr->new_Token(token, ctx->finfo); isPrototypeStarted = true; skipFlag = true; } else { Token *prev_before_tk = ctx->tmgr->lastToken(); if (isHereDocument(ctx, prev_before_tk)) { /* Key is HereDocument */ ret = ctx->tmgr->new_Token(token, ctx->finfo); here_document_tags.push(string(token)); here_document_tag_tk = ret; ret->info = tmgr->getTokenInfo(TokenType::HereDocumentBareTag); } else { ret = ctx->tmgr->new_Token(token, ctx->finfo); } } ctx->clearBuffer(); return ret; } bool Scanner::isRegexOption(const char *opt) { size_t len = strlen(opt); for (size_t i = 0; i < len; i++) { char ch = opt[i]; switch (ch) { case 'a': case 'c': case 'd': case 'e': case 'g': case 'i': case 'm': case 'l': case 'o': case 'p': case 'r': case 's': case 'u': case 'x': break; default: return false; break; } } return true; } bool Scanner::isRegexOptionPrevToken(LexContext *ctx) { if (ctx->tmgr->size() < 2) return false; Token *before_prev_token = ctx->tmgr->beforeLastToken(); Token *prev_token = ctx->tmgr->lastToken(); const char *data = prev_token->_data; if (before_prev_token->info.type == TokenType::RegDelim && isalpha(data[0]) && string(data) != "or" && isRegexOption(data)) { return true; } return false; } Token *Scanner::scanCurSymbol(LexContext *ctx, char symbol) { Token *ret = NULL; TokenManager *tmgr = ctx->tmgr; Token *prev_tk = ctx->tmgr->lastToken(); string prev_data = (prev_tk) ? prev_tk->_data : ""; int idx = ctx->tmgr->size() - 2; string prev_before = (idx >= 0) ? string(ctx->tmgr->beforeLastToken()->_data) : ""; if ((prev_before != "sub" && !isRegexOptionPrevToken(ctx) && isRegexDelim(ctx, prev_tk, symbol)) || (prev_data == "{" && symbol == '/')) { if (!isRegexEndDelim(ctx)) { regex_delim = getRegexDelim(ctx); isRegexStarted = true; skipFlag = true; } else { regex_delim = 0; } ctx->writeBuffer(symbol); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ret->info = tmgr->getTokenInfo(TokenType::RegDelim); ctx->clearBuffer(); } else if (isRegexEndDelim(ctx)) { ctx->writeBuffer(symbol); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ret->info = tmgr->getTokenInfo(TokenType::RegDelim); ctx->clearBuffer(); } else if (symbol == '*') { char ch = symbol; size_t progressing = 0; ScriptManager *smgr = ctx->smgr; ctx->writeBuffer(ch); /* skip whitespaces */ do { smgr->idx++; progressing++; if (smgr->end()) break; ch = smgr->currentChar(); } while (ch == ' ' || ch == '\n'); /* rollback */ smgr->idx -= progressing; /* if syntax is like *[a-zA-Z_] */ if (isalpha(ch) || ch == '_') return ret; ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ctx->clearBuffer(); } else if (symbol == '@' || symbol == '$' || symbol == '%') { //|| symbol == '&') ctx->writeBuffer(symbol); } else if (symbol == ';') { ctx->writeBuffer(symbol); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ctx->clearBuffer(); } else if (isPrototype(ctx)) { ctx->writeBuffer(symbol); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ctx->clearBuffer(); isPrototypeStarted = true; skipFlag = true; } else if (symbol != '\n') { if (prev_tk && symbol == '^') { ScriptManager *smgr = ctx->smgr; switch (prev_tk->info.type) { /* ${m} or @{m} or %{m} or &{m} or $#{m} */ case TokenType::ArrayDereference: case TokenType::HashDereference: case TokenType::ScalarDereference: case TokenType::CodeDereference: case TokenType::ArraySizeDereference: for (; !smgr->end(); smgr->next()) { char ch = smgr->currentChar(); if (ch == '}') { break; } ctx->writeBuffer(ch); } ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ret->info = ctx->tmgr->getTokenInfo(TokenType::Key); ctx->clearBuffer(); smgr->back(); // } return ret; default: break; } } ctx->writeBuffer(symbol); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ctx->clearBuffer(); } return ret; } Token *Scanner::scanTripleCharacterOperator(LexContext *ctx, char symbol, char next_ch, char after_next_ch) { Token *ret = NULL; char op[4] = { symbol, next_ch, after_next_ch, EOL }; if (triple_operator_map.in_word_set(op)) {// != operator_map.end()) { ctx->writeBuffer(symbol); ctx->writeBuffer(next_ch); ctx->writeBuffer(after_next_ch); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ret->info = ctx->tmgr->getTokenInfo(op); ctx->clearBuffer(); ctx->progress = 2; } else if (symbol == '$' && next_ch == '$') { ret = ctx->tmgr->new_Token((char *)"$$", ctx->finfo); TokenManager *tmgr = ctx->tmgr; ret->info = (isalpha(after_next_ch) || after_next_ch == '_') ? tmgr->getTokenInfo(TokenType::ShortScalarDereference) : tmgr->getTokenInfo("$$"); ctx->progress = 1; } return ret; } Token *Scanner::scanDoubleCharacterOperator(LexContext *ctx, char symbol, char next_ch) { Token *ret = NULL; char op[3] = { symbol, next_ch, EOL }; if (double_operator_map.in_word_set(op)) { ctx->writeBuffer(symbol); ctx->writeBuffer(next_ch); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ret->info = ctx->tmgr->getTokenInfo(op); ctx->clearBuffer(); ctx->progress = 1; } else if (symbol == '/' && next_ch == '=') { Token *prev_tk = ctx->tmgr->lastToken(); const char *prev_data = prev_tk->_data; /* '/=' is RegDelim + RegExp or DivEqual */ if (strtod(prev_data, NULL) != 0 || string(prev_data) == "0" || isupper(prev_data[0]) || prev_data[0] == '}' || prev_data[0] == ']' || prev_data[0] == ')' || prev_data[0] == '$') { ctx->writeBuffer(symbol); ctx->writeBuffer(next_ch); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ctx->clearBuffer(); ctx->progress = 1; } } return ret; } /* Scanner::scanPostDeref The postfix dereference is a bit odd because we have to treat a sigil a bit special. Scalars are simple: $scalar->$* Arrays have a special case with the last index, and support single element access and slices: $array->@* $array->$#* $array->@[0] $array->@[0,1] Hashes support single element access and slices: $hash->%* $array->%{key} $array->%{key,key2} Code supports argument lists: $code->&* $code->&( arg, arg2 ) Typeglobs have "keys" into the symbol table $gref->** $gref->*{SCALAR} */ Token *Scanner::scanPostDeref(LexContext *ctx) { Token *ret = NULL; Token *sigil_tk = NULL; if (!isPostDeref(ctx)) return ret; char symbol = ctx->smgr->currentChar(); ctx->writeBuffer(symbol); if (symbol == '$') { char next_ch = ctx->smgr->nextChar(); if (next_ch=='#') { // we have the last array index symbol = ctx->smgr->forward(1); ctx->writeBuffer(next_ch); } } sigil_tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); sigil_tk->info = ctx->tmgr->getTokenInfo(TokenType::PostDeref); ctx->clearBuffer(); // This is a bit odd because we add a Token directly instead of // returning it and letting the rest of the system figure it out ctx->tmgr->add(sigil_tk); // We only care if it's a *. We'll let the rest of the tokenizer // handle the slices, which would have [, {, ( char next_ch = ctx->smgr->nextChar(); if (next_ch != '*') return ret; symbol = ctx->smgr->forward(1); ctx->writeBuffer(symbol); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ctx->clearBuffer(); ret->info = ctx->tmgr->getTokenInfo(TokenType::PostDerefStar); return ret; } /* Scanner::isPostDeref See Scanner::scanPostDeref for the rules */ bool Scanner::isPostDeref(LexContext *ctx) { Token *prev_token = ctx->tmgr->lastToken(); string prev_data = (prev_token) ? string(prev_token->_data) : ""; char symbol = ctx->smgr->currentChar(); // Should I check that the previous Token was Pointer // instead of looking at the data if (prev_data != "->") return false; // do we need an isSigil method? if (symbol != '$' && symbol != '@' && symbol != '%' && symbol != '&' && symbol != '*') return false; char next_ch = ctx->smgr->nextChar(); // scalar and array index case if (symbol == '$' && ! ( next_ch == '*' || next_ch == '#' )) return false; // array case if (symbol == '@' && ! ( next_ch == '*' || next_ch == '[' )) return false; // hash case if (symbol == '%' && ! ( next_ch == '*' || next_ch == '{' )) return false; // code case if (symbol == '&' && ! ( next_ch == '*' || next_ch == '(' )) return false; // typeglob case if (symbol == '*' && ! ( next_ch == '*' || next_ch == '{' )) return false; return true; } Token *Scanner::scanSymbol(LexContext *ctx) { Token *ret = NULL; ScriptManager *smgr = ctx->smgr; char symbol = smgr->currentChar(); char next_ch = smgr->nextChar(); char after_next_ch = smgr->afterNextChar(); if (ctx->existsBuffer()) ctx->tmgr->add(scanPrevSymbol(ctx, symbol)); if (!isRegexStarted) { ret = scanPostDeref(ctx); if (!ret) ret = scanTripleCharacterOperator(ctx, symbol, next_ch, after_next_ch); if (!ret && !isRegex(ctx)) ret = scanDoubleCharacterOperator(ctx, symbol, next_ch); } if (!ret) ret = scanCurSymbol(ctx, symbol); return ret; } Token *Scanner::scanWordDelimiter(LexContext *ctx) { TokenManager *tmgr = ctx->tmgr; Token *ret = NULL; if (ctx->existsBuffer()) { char *token = ctx->buffer(); if (isHereDocument(ctx, ctx->tmgr->lastToken())) { ret = ctx->tmgr->new_Token(token, ctx->finfo); /* Key is HereDocument */ here_document_tags.push(string(token)); here_document_tag_tk = ret; ret->info = tmgr->getTokenInfo(TokenType::HereDocumentBareTag); } else if (string(token) == "format") { ret = ctx->tmgr->new_Token(token, ctx->finfo); // if it has been declared `format` (means it has been in format context), // this token should not be FormatDecl. Check here. if (formatDeclaredToken == NULL) { // when it has not been in format context ret->info = tmgr->getTokenInfo(TokenType::FormatDecl); formatDeclaredToken = ret; } } else if (token[0] != '\n' || token[1] != EOL) { ret = ctx->tmgr->new_Token(token, ctx->finfo); } ctx->clearBuffer(); } return ret; } Token *Scanner::scanReference(LexContext *ctx) { Token *ret = NULL; char next_ch = ctx->smgr->nextChar(); if (next_ch == '$' || next_ch == '@' || next_ch == '%' || next_ch == '&') { ret = ctx->tmgr->new_Token((char *)"\\", ctx->finfo); } return ret; } Token *Scanner::scanSingleLineComment(LexContext *ctx) { Token *ret = NULL; ScriptManager *smgr = ctx->smgr; TokenManager *tmgr = ctx->tmgr; if (ctx->existsBuffer()) tmgr->add(scanPrevSymbol(ctx, '#')); Token *prev_tk = ctx->tmgr->lastToken(); TokenType::Type prev_type = (prev_tk) ? prev_tk->info.type : TokenType::Undefined; if (isRegexStarted || prev_type == TokenType::RegExp || prev_type == TokenType::RegReplaceTo) { ctx->writeBuffer('#'); ret = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); ret->info = tmgr->getTokenInfo(TokenType::RegDelim); ctx->clearBuffer(); } else { if (verbose) { for (; smgr->currentChar() != '\n' && !smgr->end(); smgr->next()) { ctx->writeBuffer(smgr->currentChar()); } Token *tk = tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(TokenType::Comment); ctx->clearBuffer(); tmgr->add(tk); } else { for (; smgr->currentChar() != '\n' && !smgr->end(); smgr->next()) {} } tmgr->add(scanWhiteSpace(ctx)); ctx->finfo.start_line_num++; } return ret; } Token *Scanner::scanLineDelimiter(LexContext *ctx) { Token *ret = scanWordDelimiter(ctx); Token *last_tk = ctx->tmgr->lastToken(); string data = (ret) ? string(ret->_data) : (last_tk) ? string(last_tk->_data) : ""; if (formatDeclaredToken != NULL && data == "=") { TokenManager *tmgr = ctx->tmgr; Token *currentToken = tmgr->lastToken(); Token *prev_token = tmgr->previousToken(currentToken); Token *before_prev_token = tmgr->beforePreviousToken(currentToken); if ( (prev_token != NULL && prev_token->info.type != Enum::Token::Type::FormatDecl) && (before_prev_token != NULL && before_prev_token->info.type != Enum::Token::Type::FormatDecl) ) { // When reach here, maybe `FormatDecl` which was declared previous is invalid. // So downgrade a doubtful token to `Undefined` and don't deal as format context. formatDeclaredToken->info.type = Enum::Token::Type::Undefined; } else { // format context. isFormatStarted = true; skipFlag = true; } formatDeclaredToken = NULL; } else if (hereDocumentFlag()) { skipFlag = true; } ctx->clearBuffer(); return ret; } static inline char next(LexContext *ctx, char *src, size_t &i) { ctx->writeBuffer((src+i)[0]); return *(src + i++); } #define PREDICT() (*(src + i)) #define is_number(ch) ('0' <= ch && ch <= '9') #define is_number_literal(ch) ((is_number(ch) || ch == '_') && ch != EOL) #define is_hexchar(ch) (('a' <= ch && ch <= 'f') || ('A' <= ch && ch <= 'F')) bool Scanner::isVersionString(LexContext *ctx) { if (!ctx->existsBuffer()) return false; char *token = ctx->buffer(); if (token[0] != 'v') return false; for (int i = 1; token[i] != EOL; i++) { if (!is_number(token[i])) return false; } return true; } Token *Scanner::scanVersionString(LexContext *ctx) { TokenManager *tmgr = ctx->tmgr; char *src = ctx->smgr->raw_script; size_t i = ctx->smgr->idx; // char *begin = src + i; char c = next(ctx, src, i);//NEXT(); Token *token = NULL; for (;(is_number(c) || c == '.' || c == '_') && c != EOL; c = next(ctx, src, i)) {} i -= 1; char *buf = ctx->buffer(); buf[ctx->buffer_idx-1] = EOL; token = ctx->tmgr->new_Token(buf, ctx->finfo); token->info = tmgr->getTokenInfo(TokenType::VersionString); ctx->smgr->idx = --i; return token; } Token *Scanner::scanNumber(LexContext *ctx) { TokenManager *tmgr = ctx->tmgr; char *src = ctx->smgr->raw_script; size_t i = ctx->smgr->idx; // char *begin = src + i; int c = next(ctx, src, i); Token *token = NULL; assert((c == '.' || is_number(c)) && "It do not seem as Number"); bool isFloat = false; if (is_number(c)) { /* first char */ if (is_number_literal(c)) c = next(ctx, src, i); /* second char is includes 'b' or 'x' */ if ((is_number(c) || c == 'b' || c == 'x' || c == '_') && c != EOL) c = next(ctx, src, i); for (;(is_number(c) || is_hexchar(c) || c == '_') && c != EOL; c = next(ctx, src, i)) {} } if (c != '.' && c != 'e' && c != 'E') goto L_emit; if (c == '.') { c = PREDICT(); if (c == '.') { goto L_emit; /* Number .. */ } isFloat = true; for (; is_number_literal(c); c = next(ctx, src, i)) {} } if (c == 'e' || c == 'E') { isFloat = true; c = next(ctx, src, i); if (c == '+' || c == '-') c = next(ctx, src, i); for (; is_number_literal(c); c = next(ctx, src, i)) {} } L_emit:; i -= 1; char *buf = ctx->buffer(); buf[ctx->buffer_idx-1] = EOL; token = ctx->tmgr->new_Token(buf, ctx->finfo); token->info = isFloat ? tmgr->getTokenInfo(TokenType::Double) : tmgr->getTokenInfo(TokenType::Int); ctx->smgr->idx = --i; return token; } Token *Scanner::scanWhiteSpace(LexContext *ctx) { TokenManager *tmgr = ctx->tmgr; Token *prev_tk = tmgr->lastToken(); TokenType::Type prev_type = (prev_tk) ? prev_tk->info.type : TokenType::Undefined; bool does_ws_continue = false; ScriptManager *smgr = ctx->smgr; for (; !smgr->end(); smgr->next()) { char ch = smgr->currentChar(); if (ch == ' ' || ch == '\t') { // For normal whitespace. // It collects into one token when a whitespace continues. ctx->writeBuffer(ch); does_ws_continue = true; continue; } else if (!does_ws_continue && ch == '\n') { // For newline character. // It should be on the same line to before token. ctx->writeBuffer(ch); if (verbose) { ctx->finfo.start_line_num = (prev_tk != NULL) ? prev_tk->finfo.start_line_num : 1; } break; } smgr->back(); break; } if (!verbose) { ctx->clearBuffer(); return NULL; } if (ctx->existsBuffer()) { Token *token = tmgr->new_Token(ctx->buffer(), ctx->finfo); token->info = tmgr->getTokenInfo(TokenType::WhiteSpace); ctx->clearBuffer(); return token; } return NULL; } #undef NEXT #undef PREDICT bool Scanner::isRegex(LexContext *ctx) { Token *prev_tk = ctx->tmgr->lastToken(); string prev_data = string(prev_tk ? prev_tk->_data : ""); TokenType::Type prev_type = prev_tk ? prev_tk->info.type : TokenType::Undefined; bool isRegexArg = enable_regex_argument_func_map.find(prev_data) != enable_regex_argument_func_map.end(); return isRegexArg || prev_type == TokenType::RegOK; } bool Scanner::isSkip(LexContext *ctx) { using namespace TokenType; bool ret = commentFlag; ScriptManager *smgr = ctx->smgr; TokenManager *tmgr = ctx->tmgr; char *script = smgr->raw_script; size_t idx = smgr->idx; char prev_ch = smgr->previousChar(); char cur_ch = smgr->currentChar(); if (prev_ch == '\n' && cur_ch == '=' && isalnum(smgr->nextChar())) { if (smgr->compare(1, 3, "cut")) { DBG_PL("commentFlag => OFF"); smgr->idx += 4; commentFlag = false; ret = false; if (verbose) { ctx->finfo.start_line_num++; ctx->writeBuffer("=cut"); Token *tk = tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(TokenType::Pod); ctx->clearBuffer(); tmgr->add(tk); tmgr->add(scanWhiteSpace(ctx)); } ctx->finfo.start_line_num++; } else { DBG_PL("commentFlag => ON"); commentFlag = true; ret = true; } } if (commentFlag) { if (verbose) ctx->writeBuffer(cur_ch); return ret; } if (prev_ch == '\n' && cur_ch == '_' && !hereDocumentFlag() && smgr->compare(0, 7, "__END__")) { int progress_to_end = ctx->script_size - idx - 1; ctx->progress = progress_to_end; ret = false; } else if (prev_ch == '\n' && cur_ch == '_' && !hereDocumentFlag() && smgr->compare(0, 8, "__DATA__")) { int progress_to_end = ctx->script_size - idx - 1; ctx->progress = progress_to_end; ret = false; } if (!skipFlag) return ret; if (isFormatStarted) { if (prev_ch == '\n' && cur_ch == '.') { Token *tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(Format); ctx->clearBuffer(); tmgr->add(tk); tk = ctx->tmgr->new_Token((char *)".", ctx->finfo); tk->info = tmgr->getTokenInfo(TokenType::FormatEnd); tmgr->add(tk); ctx->progress = 1; isFormatStarted = false; skipFlag = false; ret = false; } else { ctx->writeBuffer(script[idx]); ret = true; } } else if (isRegexStarted) { char before_prev_ch = smgr->beforePreviousChar(); if (prev_ch != '\\' || (prev_ch == '\\' && before_prev_ch == '\\')) { Token *last_tk = tmgr->lastToken(); Token *before_last_tk = tmgr->beforeLastToken(); TokenType::Type prefixType = before_last_tk ? before_last_tk->info.type : TokenType::Undefined; if (last_tk && (prefixType == TokenType::RegQuote || prefixType == TokenType::RegDoubleQuote || prefixType == TokenType::RegExec || prefixType == TokenType::RegList)) { char end_delim; char last_ch = last_tk->_data[0]; switch (last_ch) { case '{': end_delim = '}'; break; case '[': end_delim = ']'; break; case '(': end_delim = ')'; break; case '<': end_delim = '>'; break; default: end_delim = last_ch; break; } tmgr->add(this->scanRegQuote(ctx, end_delim)); ctx->writeBuffer(smgr->currentChar()); Token *end_delim_tk = tmgr->new_Token(ctx->buffer(), ctx->finfo); end_delim_tk->info = tmgr->getTokenInfo(TokenType::RegDelim); tmgr->add(end_delim_tk); ctx->clearBuffer(); isRegexStarted = false; skipFlag = false; regex_delim = 0; brace_count_inner_regex = 0; cury_brace_count_inner_regex = 0; bracket_count_inner_regex = 0; return true; } switch (cur_ch) { case '{': brace_count_inner_regex++; break; case '}': if (brace_count_inner_regex > 0) brace_count_inner_regex--; break; case '[': bracket_count_inner_regex++; break; case ']': if (bracket_count_inner_regex > 0) bracket_count_inner_regex--; break; case '(': cury_brace_count_inner_regex++; break; case ')': if (cury_brace_count_inner_regex > 0) cury_brace_count_inner_regex--; break; default: break; } } if (prev_ch == '\\' && before_prev_ch != '\\') { ctx->writeBuffer(cur_ch); ret = true; } else if (cur_ch != regex_delim && cur_ch != regex_middle_delim) { ctx->writeBuffer(cur_ch); ret = true; } else if (cur_ch == regex_middle_delim) { if ((regex_middle_delim == '}' && brace_count_inner_regex != 0) || (regex_middle_delim == ')' && cury_brace_count_inner_regex != 0) || (regex_middle_delim == ']' && bracket_count_inner_regex != 0)) { ctx->writeBuffer(cur_ch); ret = true; } else { Token *tk = NULL; if (regex_middle_delim != '{' && regex_middle_delim != '(' && regex_middle_delim != '<' && regex_middle_delim != '[') { tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(RegReplaceFrom); ctx->clearBuffer(); tmgr->add(tk); } ctx->writeBuffer(regex_middle_delim); tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(RegMiddleDelim); ctx->clearBuffer(); tmgr->add(tk); switch (regex_middle_delim) { case '}': regex_middle_delim = '{'; break; case ')': regex_middle_delim = '('; break; case '>': regex_middle_delim = '<'; break; case ']': regex_middle_delim = '['; break; default: regex_middle_delim = '\0'; break; } ret = true; } } else { if ((regex_delim == '}' && brace_count_inner_regex != 0) || (regex_delim == ')' && cury_brace_count_inner_regex != 0) || (regex_delim == ']' && bracket_count_inner_regex != 0)) { ctx->writeBuffer(cur_ch); ret = true; } else { Token *prev_tk = ctx->tmgr->lastToken(); Token *tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = (prev_tk->info.type == RegMiddleDelim) ? tmgr->getTokenInfo(RegReplaceTo) : tmgr->getTokenInfo(RegExp); ctx->clearBuffer(); tmgr->add(tk); ret = false; isRegexStarted = false; skipFlag = false; regex_delim = 0; brace_count_inner_regex = 0; cury_brace_count_inner_regex = 0; bracket_count_inner_regex = 0; } } } else if (isPrototypeStarted) { if (script[idx] == ')') { Token *tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(Prototype); ctx->clearBuffer(); tmgr->add(tk); isPrototypeStarted = false; skipFlag = false; ret = false; } else { ctx->writeBuffer(script[idx]); ret = true; } } else if (hereDocumentFlag()) { std::string here_document_tag = here_document_tags.front(); size_t len = here_document_tag.size(); if (smgr->previousChar() == '\n' && idx + len < ctx->script_size) { size_t i; for (i = 0; i < len && script[idx + i] == here_document_tag.at(i); i++); char tag_after_char = script[idx + i]; if (i == len && (tag_after_char == '\n' || tag_after_char == EOL)) { ctx->progress = len; if (verbose) ctx->finfo.start_line_num++; Token *tk = ctx->tmgr->new_Token(ctx->buffer(), ctx->finfo); tk->info = tmgr->getTokenInfo(TokenType::HereDocument); ctx->clearBuffer(); tmgr->add(tk); tk = ctx->tmgr->new_Token((char *)here_document_tag_tk->_data, ctx->finfo); tk->info = tmgr->getTokenInfo(TokenType::HereDocumentEnd); tmgr->add(tk); here_document_tags.pop(); skipFlag = false; ret = false; } else { ctx->writeBuffer(script[idx]); ret = true; } } else { ctx->writeBuffer(script[idx]); ret = true; } } return ret; } Compiler_double_charactor_operator.cpp100644000765000024 650413603257356 27104 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/util/* C++ code produced by gperf version 3.0.3 */ /* Command-line: gperf -L C++ gen/double_charactor_operator.gperf */ /* Computed positions: -k'1-2' */ #include /* maximum key range = 201, duplicates = 0 */ inline unsigned int DoubleCharactorOperatorMap::hash(register const char *str) { static unsigned char asso_values[] = { 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 72, 203, 203, 5, 20, 75, 69, 125, 87, 2, 62, 64, 65, 32, 120, 59, 25, 47, 37, 54, 49, 44, 39, 34, 24, 110, 19, 50, 22, 12, 14, 10, 0, 35, 9, 4, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 4, 127, 122, 112, 97, 203, 92, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 0, 203, 203, 203, 82, 203, 60, 100, 5, 203, 90, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203 }; return asso_values[(unsigned char)str[1]+4] + asso_values[(unsigned char)str[0]]; } const char *DoubleCharactorOperatorMap::in_word_set(register const char *str) { static const char *double_charactor_operators[] = { "x=", "", "*=", "", "^=", "$=", "", "$&", "", "$@", "$|", "", ">=", "", "$?", "$<", "", "$:", "", "$;", "%=", "", "==", "", "$7", "$!", "", "$9", "", "$5", "$-", "", ".=", "", "**", "@-", "", "$*", "", "$4", "$>", "", "$/", "", "$3", "%-", "", ">>", "", "$2", "<=", "", "$.", "", "$1", "$8", "", "=>", "", "$0", "<<", "", "+=", "", "$,", "-=", "", "$'", "", "$(", "$)", "", "!=", "", "$#", "&=", "", "&&", "", "..", "$\"", "", "|=", "", "", "<>", "", "||", "", "", "--", "", "$%", "", "", "$~", "", "$`", "", "", "->", "", "$^", "", "", "${", "", "%%", "", "", "@{", "", "=~", "", "", "$6", "", "$]", "", "", "%{", "", "::", "", "", "$+", "", "$\\", "", "", "@+", "", "$[", "", "", "@$", "", "", "", "", "%+", "", "", "", "", "%$", "", "", "", "", "~~", "", "", "", "", "", "", "//", "", "", "", "", "!~", "", "", "", "", "", "", "", "", "", "", "", "", "&{", "", "", "", "", "", "", "++", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "&$" }; register int key = hash(str); if (key <= DOUBLE_OPERATOR_MAX_HASH_VALUE && key >= 0) { register const char *s = double_charactor_operators[key]; if (*str == *s && !strcmp (str + 1, s + 1)) return s; } return 0; } Compiler_gen_token_decl.cpp100644000765000024 16334113603257356 24674 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/util#include TokenInfo decl_tokens[] = { {Enum::Token::Type::Return, Enum::Token::Kind::Return, "Return", "return"}, {Enum::Token::Type::Add, Enum::Token::Kind::Operator, "Add", "+"}, {Enum::Token::Type::Sub, Enum::Token::Kind::Operator, "Sub", "-"}, {Enum::Token::Type::Mul, Enum::Token::Kind::Operator, "Mul", "*"}, {Enum::Token::Type::Div, Enum::Token::Kind::Operator, "Div", "/"}, {Enum::Token::Type::Mod, Enum::Token::Kind::Operator, "Mod", "%"}, {Enum::Token::Type::ThreeTermOperator, Enum::Token::Kind::Operator, "ThreeTermOperator", "?"}, {Enum::Token::Type::Greater, Enum::Token::Kind::Operator, "Greater", ">"}, {Enum::Token::Type::Less, Enum::Token::Kind::Operator, "Less", "<"}, {Enum::Token::Type::StringAdd, Enum::Token::Kind::Operator, "StringAdd", "."}, {Enum::Token::Type::Ref, Enum::Token::Kind::Operator, "Ref", "\\"}, {Enum::Token::Type::Glob, Enum::Token::Kind::Operator, "Glob", "*"}, {Enum::Token::Type::BitNot, Enum::Token::Kind::Operator, "BitNot", "~"}, {Enum::Token::Type::BitOr, Enum::Token::Kind::Operator, "BitOr", "|"}, {Enum::Token::Type::AlphabetOr, Enum::Token::Kind::Operator, "AlphabetOr", "or"}, {Enum::Token::Type::BitAnd, Enum::Token::Kind::Operator, "BitAnd", "&"}, {Enum::Token::Type::AlphabetAnd, Enum::Token::Kind::Operator, "AlphabetAnd", "and"}, {Enum::Token::Type::BitXOr, Enum::Token::Kind::Operator, "BitXOr", "^"}, {Enum::Token::Type::AlphabetXOr, Enum::Token::Kind::Operator, "AlphabetXOr", "xor"}, {Enum::Token::Type::StringMul, Enum::Token::Kind::Operator, "StringMul", "x"}, {Enum::Token::Type::AddEqual, Enum::Token::Kind::Assign, "AddEqual", "+="}, {Enum::Token::Type::SubEqual, Enum::Token::Kind::Assign, "SubEqual", "-="}, {Enum::Token::Type::MulEqual, Enum::Token::Kind::Assign, "MulEqual", "*="}, {Enum::Token::Type::DivEqual, Enum::Token::Kind::Assign, "DivEqual", "/="}, {Enum::Token::Type::ModEqual, Enum::Token::Kind::Assign, "ModEqual", "%="}, {Enum::Token::Type::StringAddEqual, Enum::Token::Kind::Assign, "StringAddEqual", ".="}, {Enum::Token::Type::LeftShiftEqual, Enum::Token::Kind::Assign, "LeftShiftEqual", "<<="}, {Enum::Token::Type::RightShiftEqual, Enum::Token::Kind::Assign, "RightShiftEqual", ">>="}, {Enum::Token::Type::StringMulEqual, Enum::Token::Kind::Assign, "StringMulEqual", "x="}, {Enum::Token::Type::GreaterEqual, Enum::Token::Kind::Operator, "GreaterEqual", ">="}, {Enum::Token::Type::LessEqual, Enum::Token::Kind::Operator, "LessEqual", "<="}, {Enum::Token::Type::EqualEqual, Enum::Token::Kind::Operator, "EqualEqual", "=="}, {Enum::Token::Type::Diamond, Enum::Token::Kind::Operator, "Diamond", "<>"}, {Enum::Token::Type::Compare, Enum::Token::Kind::Operator, "Compare", "<=>"}, {Enum::Token::Type::PolymorphicCompare, Enum::Token::Kind::Operator, "PolymorphicCompare", "~~"}, {Enum::Token::Type::RegOK, Enum::Token::Kind::Operator, "RegOK", "=~"}, {Enum::Token::Type::RegNot, Enum::Token::Kind::Operator, "RegNot", "!~"}, {Enum::Token::Type::NotEqual, Enum::Token::Kind::Operator, "NotEqual", "!="}, {Enum::Token::Type::StringLess, Enum::Token::Kind::Operator, "StringLess", "lt"}, {Enum::Token::Type::StringLessEqual, Enum::Token::Kind::Operator, "StringLessEqual", "le"}, {Enum::Token::Type::StringGreater, Enum::Token::Kind::Operator, "StringGreater", "gt"}, {Enum::Token::Type::StringGreaterEqual, Enum::Token::Kind::Operator, "StringGreaterEqual", "ge"}, {Enum::Token::Type::StringEqual, Enum::Token::Kind::Operator, "StringEqual", "eq"}, {Enum::Token::Type::StringNotEqual, Enum::Token::Kind::Operator, "StringNotEqual", "ne"}, {Enum::Token::Type::StringCompare, Enum::Token::Kind::Operator, "StringCompare", "cmp"}, {Enum::Token::Type::Inc, Enum::Token::Kind::Operator, "Inc", "++"}, {Enum::Token::Type::Dec, Enum::Token::Kind::Operator, "Dec", "--"}, {Enum::Token::Type::Exp, Enum::Token::Kind::Operator, "Exp", "**"}, {Enum::Token::Type::PowerEqual, Enum::Token::Kind::Assign, "PowerEqual", "**="}, {Enum::Token::Type::DefaultEqual, Enum::Token::Kind::Assign, "DefaultEqual", "//="}, {Enum::Token::Type::LeftShift, Enum::Token::Kind::Operator, "LeftShift", "<<"}, {Enum::Token::Type::RightShift, Enum::Token::Kind::Operator, "RightShift", ">>"}, {Enum::Token::Type::And, Enum::Token::Kind::Operator, "And", "&&"}, {Enum::Token::Type::Or, Enum::Token::Kind::Operator, "Or", "||"}, {Enum::Token::Type::AndBitEqual, Enum::Token::Kind::Assign, "AndBitEqual", "&="}, {Enum::Token::Type::OrBitEqual, Enum::Token::Kind::Assign, "OrBitEqual", "|="}, {Enum::Token::Type::NotBitEqual, Enum::Token::Kind::Assign, "NotBitEqual", "^="}, {Enum::Token::Type::OrEqual, Enum::Token::Kind::Assign, "OrEqual", "||="}, {Enum::Token::Type::AndEqual, Enum::Token::Kind::Assign, "AndEqual", "&&="}, {Enum::Token::Type::Slice, Enum::Token::Kind::Operator, "Slice", ".."}, {Enum::Token::Type::DefaultOperator, Enum::Token::Kind::Operator, "DefaultOperator", "//"}, {Enum::Token::Type::ToDo, Enum::Token::Kind::Operator, "ToDo", "..."}, {Enum::Token::Type::VarDecl, Enum::Token::Kind::Decl, "VarDecl", "my"}, {Enum::Token::Type::FunctionDecl, Enum::Token::Kind::Decl, "FunctionDecl", "sub"}, {Enum::Token::Type::Method, Enum::Token::Kind::Function, "Method", ""}, {Enum::Token::Type::Assign, Enum::Token::Kind::Assign, "Assign", "="}, {Enum::Token::Type::ArraySize, Enum::Token::Kind::SingleTerm, "ArraySize", "$#"}, {Enum::Token::Type::Is, Enum::Token::Kind::SingleTerm, "Is", ""}, {Enum::Token::Type::Not, Enum::Token::Kind::SingleTerm, "Not", "!"}, {Enum::Token::Type::AlphabetNot, Enum::Token::Kind::SingleTerm, "AlphabetNot", "not"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chomp"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chop"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chr"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "crypt"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "index"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lc"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lcfirst"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "length"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ord"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pack"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unpack"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sort"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "reverse"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rindex"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sprintf"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "substr"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "uc"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ucfirst"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pos"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "quotemeta"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "split"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "study"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pop"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "push"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "splice"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shift"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unshift"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "grep"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "join"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "map"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "delete"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "each"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exists"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "keys"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "values"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "binmode"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "close"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "closedir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dbmclose"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dbmopen"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "die"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "eof"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fileno"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "flock"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "format"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getc"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "print"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "say"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "printf"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "read"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "readdir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rewinddir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "seek"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "seekdir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "select"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "syscall"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sysread"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sysseek"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "syswrite"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tell"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "telldir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "truncate"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "warn"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "write"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "vec"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chdir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chmod"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chown"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chroot"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fcntl"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "glob"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ioctl"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "link"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lstat"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "mkdir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "open"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "opendir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "readlink"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rename"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rmdir"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "stat"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "symlink"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "umask"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unlink"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "utime"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "caller"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dump"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "eval"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exit"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "wantarray"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "formline"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "reset"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "scalar"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "alarm"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exec"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fork"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpgrp"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getppid"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpriority"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "kill"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pipe"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpgrp"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpriority"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sleep"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "system"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "times"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "wait"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "waitpid"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "no"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tie"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tied"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "untie"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "accept"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "bind"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "connect"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpeername"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getsockname"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getsockopt"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "listen"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "recv"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "send"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setsockopt"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shutdown"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "socket"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "socketpair"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgctl"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgget"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgrcv"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgsnd"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semctl"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semget"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semop"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmctl"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmget"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmread"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmwrite"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endgrent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endhostent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endnetent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endpwent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrgid"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrnam"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getlogin"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwnam"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwuid"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setgrent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpwent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endprotoent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endservent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostbyaddr"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostbyname"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetbyaddr"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetbyname"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotobyname"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotobynumber"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotoent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservbyname"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservbyport"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sethostent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setnetent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setprotoent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setservent"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gmtime"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "localtime"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "time"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ref"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "bless"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "defined"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "abs"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "atan2"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "cos"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exp"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "hex"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "int"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "log"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "oct"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rand"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sin"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sqrt"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "srand"}, {Enum::Token::Type::RequireDecl, Enum::Token::Kind::Decl, "RequireDecl", "require"}, {Enum::Token::Type::Import, Enum::Token::Kind::Import, "Import", "import"}, {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__PACKAGE__"}, {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__FILE__"}, {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__LINE__"}, {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__SUB__"}, {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__DATA__"}, {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__END__"}, {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "BEGIN"}, {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "CHECK"}, {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "INIT"}, {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "END"}, {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "UNITCHECK"}, {Enum::Token::Type::AUTOLOAD, Enum::Token::Kind::AUTOLOAD, "AUTOLOAD", "AUTOLOAD"}, {Enum::Token::Type::CORE, Enum::Token::Kind::CORE, "CORE", "CORE"}, {Enum::Token::Type::DESTROY, Enum::Token::Kind::DESTROY, "DESTROY", "DESTROY"}, {Enum::Token::Type::STDIN, Enum::Token::Kind::Handle, "STDIN", "STDIN"}, {Enum::Token::Type::STDOUT, Enum::Token::Kind::Handle, "STDOUT", "STDOUT"}, {Enum::Token::Type::STDERR, Enum::Token::Kind::Handle, "STDERR", "STDERR"}, {Enum::Token::Type::Redo, Enum::Token::Kind::Control, "Redo", "redo"}, {Enum::Token::Type::Next, Enum::Token::Kind::Control, "Next", "next"}, {Enum::Token::Type::Last, Enum::Token::Kind::Control, "Last", "last"}, {Enum::Token::Type::Goto, Enum::Token::Kind::Control, "Goto", "goto"}, {Enum::Token::Type::Continue, Enum::Token::Kind::Control, "Continue", "continue"}, {Enum::Token::Type::Do, Enum::Token::Kind::Do, "Do", "do"}, {Enum::Token::Type::Break, Enum::Token::Kind::Control, "Break", "break"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-b"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-c"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-d"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-e"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-f"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-g"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-k"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-l"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-o"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-p"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-r"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-s"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-t"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-u"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-w"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-x"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-z"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-A"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-B"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-C"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-M"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-O"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-R"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-S"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-T"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-W"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-X"}, {Enum::Token::Type::LocalDecl, Enum::Token::Kind::Decl, "LocalDecl", "local"}, {Enum::Token::Type::OurDecl, Enum::Token::Kind::Decl, "OurDecl", "our"}, {Enum::Token::Type::StateDecl, Enum::Token::Kind::Decl, "StateDecl", "state"}, {Enum::Token::Type::UseDecl, Enum::Token::Kind::Decl, "UseDecl", "use"}, {Enum::Token::Type::UsedName, Enum::Token::Kind::Module, "UsedName", ""}, {Enum::Token::Type::RequiredName, Enum::Token::Kind::Module, "RequiredName", ""}, {Enum::Token::Type::IfStmt, Enum::Token::Kind::Stmt, "IfStmt", "if"}, {Enum::Token::Type::ElseStmt, Enum::Token::Kind::Stmt, "ElseStmt", "else"}, {Enum::Token::Type::ElsifStmt, Enum::Token::Kind::Stmt, "ElsifStmt", "elsif"}, {Enum::Token::Type::UnlessStmt, Enum::Token::Kind::Stmt, "UnlessStmt", "unless"}, {Enum::Token::Type::UntilStmt, Enum::Token::Kind::Stmt, "UntilStmt", "until"}, {Enum::Token::Type::WhenStmt, Enum::Token::Kind::Stmt, "WhenStmt", "when"}, {Enum::Token::Type::GivenStmt, Enum::Token::Kind::Stmt, "GivenStmt", "given"}, {Enum::Token::Type::DefaultStmt, Enum::Token::Kind::DefaultStmt, "DefaultStmt", "default"}, {Enum::Token::Type::Comma, Enum::Token::Kind::Comma, "Comma", ","}, {Enum::Token::Type::Colon, Enum::Token::Kind::Colon, "Colon", ":"}, {Enum::Token::Type::SemiColon, Enum::Token::Kind::StmtEnd, "SemiColon", ";"}, {Enum::Token::Type::LeftParenthesis, Enum::Token::Kind::Symbol, "LeftParenthesis", "("}, {Enum::Token::Type::RightParenthesis, Enum::Token::Kind::Symbol, "RightParenthesis", ")"}, {Enum::Token::Type::LeftBrace, Enum::Token::Kind::Symbol, "LeftBrace", "{"}, {Enum::Token::Type::RightBrace, Enum::Token::Kind::Symbol, "RightBrace", "}"}, {Enum::Token::Type::LeftBracket, Enum::Token::Kind::Symbol, "LeftBracket", "["}, {Enum::Token::Type::RightBracket, Enum::Token::Kind::Symbol, "RightBracket", "]"}, {Enum::Token::Type::ArrayDereference, Enum::Token::Kind::Modifier, "ArrayDereference", "@{"}, {Enum::Token::Type::HashDereference, Enum::Token::Kind::Modifier, "HashDereference", "%{"}, {Enum::Token::Type::ScalarDereference, Enum::Token::Kind::Modifier, "ScalarDereference", "${"}, {Enum::Token::Type::CodeDereference, Enum::Token::Kind::Modifier, "CodeDereference", "&{"}, {Enum::Token::Type::ShortScalarDereference, Enum::Token::Kind::Modifier, "ShortScalarDereference", ""}, {Enum::Token::Type::ShortArrayDereference, Enum::Token::Kind::Modifier, "ShortArrayDereference", "@$"}, {Enum::Token::Type::ShortHashDereference, Enum::Token::Kind::Modifier, "ShortHashDereference", "%$"}, {Enum::Token::Type::ShortCodeDereference, Enum::Token::Kind::Modifier, "ShortCodeDereference", "&$"}, {Enum::Token::Type::ArraySizeDereference, Enum::Token::Kind::Modifier, "ArraySizeDereference", "$#{"}, {Enum::Token::Type::Key, Enum::Token::Kind::Term, "Key", ""}, {Enum::Token::Type::BareWord, Enum::Token::Kind::Term, "BareWord", ""}, {Enum::Token::Type::Arrow, Enum::Token::Kind::Operator, "Arrow", "=>"}, {Enum::Token::Type::Pointer, Enum::Token::Kind::Operator, "Pointer", "->"}, {Enum::Token::Type::NamespaceResolver, Enum::Token::Kind::Operator, "NamespaceResolver", "::"}, {Enum::Token::Type::Namespace, Enum::Token::Kind::Namespace, "Namespace", ""}, {Enum::Token::Type::Package, Enum::Token::Kind::Package, "Package", "package"}, {Enum::Token::Type::Class, Enum::Token::Kind::Class, "Class", ""}, {Enum::Token::Type::CallDecl, Enum::Token::Kind::Decl, "CallDecl", "&"}, {Enum::Token::Type::CodeRef, Enum::Token::Kind::SingleTerm, "CodeRef", "\\&"}, {Enum::Token::Type::WhileStmt, Enum::Token::Kind::Stmt, "WhileStmt", "while"}, {Enum::Token::Type::ForStmt, Enum::Token::Kind::Stmt, "ForStmt", "for"}, {Enum::Token::Type::ForeachStmt, Enum::Token::Kind::Stmt, "ForeachStmt", "foreach"}, {Enum::Token::Type::Annotation, Enum::Token::Kind::Annotation, "Annotation", "#@"}, {Enum::Token::Type::ArgumentArray, Enum::Token::Kind::Term, "ArgumentArray", "@_"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$_"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$0"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$1"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$2"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$3"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$4"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$5"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$6"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$7"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$8"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$9"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$&"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$`"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$'"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$+"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$."}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$/"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$|"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$*"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$,"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$\\"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$\""}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$%"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$="}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$-"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$~"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$:"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$?"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$!"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$@"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$$"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$<"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$>"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$("}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$)"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$["}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$]"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$;"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^A"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^D"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^E"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^F"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^G"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^H"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^I"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^L"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^M"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^O"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^P"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^R"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^T"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^W"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^X"}, {Enum::Token::Type::ConstValue, Enum::Token::Kind::Term, "ConstValue", ""}, {Enum::Token::Type::ProgramArgument, Enum::Token::Kind::Term, "ProgramArgument", "@ARGV"}, {Enum::Token::Type::LibraryDirectories, Enum::Token::Kind::Term, "LibraryDirectories", "@INC"}, {Enum::Token::Type::Environment, Enum::Token::Kind::Term, "Environment", "%ENV"}, {Enum::Token::Type::Include, Enum::Token::Kind::Term, "Include", "%INC"}, {Enum::Token::Type::Signal, Enum::Token::Kind::Term, "Signal", "%SIG"}, {Enum::Token::Type::RegOpt, Enum::Token::Kind::RegOpt, "RegOpt", ""}, {Enum::Token::Type::RegQuote, Enum::Token::Kind::RegPrefix, "RegQuote", "q"}, {Enum::Token::Type::RegDoubleQuote, Enum::Token::Kind::RegPrefix, "RegDoubleQuote", "qq"}, {Enum::Token::Type::RegList, Enum::Token::Kind::RegPrefix, "RegList", "qw"}, {Enum::Token::Type::RegExec, Enum::Token::Kind::RegPrefix, "RegExec", "qx"}, {Enum::Token::Type::RegDecl, Enum::Token::Kind::RegPrefix, "RegDecl", "qr"}, {Enum::Token::Type::RegMatch, Enum::Token::Kind::RegPrefix, "RegMatch", "m"}, {Enum::Token::Type::RegDelim, Enum::Token::Kind::Term, "RegDelim", ""}, {Enum::Token::Type::HandleDelim, Enum::Token::Kind::Term, "HandleDelim", ""}, {Enum::Token::Type::RegMiddleDelim, Enum::Token::Kind::Term, "RegMiddleDelim", ""}, {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "tr"}, {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "y"}, {Enum::Token::Type::RegReplace, Enum::Token::Kind::RegReplacePrefix, "RegReplace", "s"}, {Enum::Token::Type::RegReplaceFrom, Enum::Token::Kind::Term, "RegReplaceFrom", ""}, {Enum::Token::Type::RegReplaceTo, Enum::Token::Kind::Term, "RegReplaceTo", ""}, {Enum::Token::Type::FieldDecl, Enum::Token::Kind::Decl, "FieldDecl", ""}, {Enum::Token::Type::TypeRef, Enum::Token::Kind::Ref, "TypeRef", ""}, {Enum::Token::Type::LabelRef, Enum::Token::Kind::Ref, "LabelRef", ""}, {Enum::Token::Type::LocalVarDecl, Enum::Token::Kind::Decl, "LocalVarDecl", ""}, {Enum::Token::Type::GlobalVarDecl, Enum::Token::Kind::Decl, "GlobalVarDecl", ""}, {Enum::Token::Type::MultiLocalVarDecl, Enum::Token::Kind::Decl, "MultiLocalVarDecl", ""}, {Enum::Token::Type::MultiGlobalVarDecl, Enum::Token::Kind::Decl, "MultiGlobalVarDecl", ""}, {Enum::Token::Type::Prototype, Enum::Token::Kind::Term, "Prototype", ""}, {Enum::Token::Type::Var, Enum::Token::Kind::Term, "Var", ""}, {Enum::Token::Type::CodeVar, Enum::Token::Kind::Term, "CodeVar", ""}, {Enum::Token::Type::ArrayVar, Enum::Token::Kind::Term, "ArrayVar", ""}, {Enum::Token::Type::HashVar, Enum::Token::Kind::Term, "HashVar", ""}, {Enum::Token::Type::Int, Enum::Token::Kind::Term, "Int", ""}, {Enum::Token::Type::Double, Enum::Token::Kind::Term, "Double", ""}, {Enum::Token::Type::String, Enum::Token::Kind::Term, "String", ""}, {Enum::Token::Type::RawString, Enum::Token::Kind::Term, "RawString", ""}, {Enum::Token::Type::ExecString, Enum::Token::Kind::Term, "ExecString", ""}, {Enum::Token::Type::VersionString, Enum::Token::Kind::Term, "VersionString", ""}, {Enum::Token::Type::HereDocumentTag, Enum::Token::Kind::Term, "HereDocumentTag", ""}, {Enum::Token::Type::HereDocumentRawTag, Enum::Token::Kind::Term, "HereDocumentRawTag", ""}, {Enum::Token::Type::HereDocumentExecTag, Enum::Token::Kind::Term, "HereDocumentExecTag", ""}, {Enum::Token::Type::HereDocumentBareTag, Enum::Token::Kind::Term, "HereDocumentBareTag", ""}, {Enum::Token::Type::RawHereDocument, Enum::Token::Kind::Term, "RawHereDocument", ""}, {Enum::Token::Type::HereDocument, Enum::Token::Kind::Term, "HereDocument", ""}, {Enum::Token::Type::HereDocumentEnd, Enum::Token::Kind::Term, "HereDocumentEnd", ""}, {Enum::Token::Type::FormatDecl, Enum::Token::Kind::Decl, "FormatDecl", ""}, {Enum::Token::Type::Format, Enum::Token::Kind::Term, "Format", ""}, {Enum::Token::Type::FormatEnd, Enum::Token::Kind::Term, "FormatEnd", ""}, {Enum::Token::Type::Object, Enum::Token::Kind::Term, "Object", ""}, {Enum::Token::Type::RegExp, Enum::Token::Kind::Term, "RegExp", ""}, {Enum::Token::Type::Array, Enum::Token::Kind::Term, "Array", ""}, {Enum::Token::Type::Hash, Enum::Token::Kind::Term, "Hash", ""}, {Enum::Token::Type::Operator, Enum::Token::Kind::Operator, "Operator", ""}, {Enum::Token::Type::LocalVar, Enum::Token::Kind::Term, "LocalVar", ""}, {Enum::Token::Type::LocalArrayVar, Enum::Token::Kind::Term, "LocalArrayVar", ""}, {Enum::Token::Type::LocalHashVar, Enum::Token::Kind::Term, "LocalHashVar", ""}, {Enum::Token::Type::GlobalVar, Enum::Token::Kind::Term, "GlobalVar", ""}, {Enum::Token::Type::GlobalArrayVar, Enum::Token::Kind::Term, "GlobalArrayVar", ""}, {Enum::Token::Type::GlobalHashVar, Enum::Token::Kind::Term, "GlobalHashVar", ""}, {Enum::Token::Type::ArrayRef, Enum::Token::Kind::Ref, "ArrayRef", ""}, {Enum::Token::Type::HashRef, Enum::Token::Kind::Ref, "HashRef", ""}, {Enum::Token::Type::ArrayAt, Enum::Token::Kind::Get, "ArrayAt", ""}, {Enum::Token::Type::HashAt, Enum::Token::Kind::Get, "HashAt", ""}, {Enum::Token::Type::ArraySet, Enum::Token::Kind::Set, "ArraySet", ""}, {Enum::Token::Type::HashSet, Enum::Token::Kind::Set, "HashSet", ""}, {Enum::Token::Type::Function, Enum::Token::Kind::Decl, "Function", ""}, {Enum::Token::Type::Call, Enum::Token::Kind::Function, "Call", ""}, {Enum::Token::Type::Argument, Enum::Token::Kind::Term, "Argument", ""}, {Enum::Token::Type::List, Enum::Token::Kind::Term, "List", ""}, {Enum::Token::Type::Default, Enum::Token::Kind::Term, "Default", "undef"}, {Enum::Token::Type::Pod, Enum::Token::Kind::Verbose, "Pod", ""}, {Enum::Token::Type::Comment, Enum::Token::Kind::Verbose, "Comment", ""}, {Enum::Token::Type::WhiteSpace, Enum::Token::Kind::Verbose, "WhiteSpace", ""}, {Enum::Token::Type::Undefined, Enum::Token::Kind::Undefined, "Undefined", ""}, {Enum::Token::Type::PostDeref, Enum::Token::Kind::Symbol, "PostDeref", ""}, {Enum::Token::Type::PostDerefStar, Enum::Token::Kind::Symbol, "PostDerefStar", ""}, {Enum::Token::Type::PostDerefArraySliceOpenBracket, Enum::Token::Kind::Symbol, "PostDerefArraySliceOpenBracket", ""}, {Enum::Token::Type::PostDerefArraySliceCloseBracket, Enum::Token::Kind::Symbol, "PostDerefArraySliceCloseBracket", ""}, {Enum::Token::Type::PostDerefHashSliceOpenBrace, Enum::Token::Kind::Symbol, "PostDerefHashSliceOpenBrace", ""}, {Enum::Token::Type::PostDerefHashSliceCloseBrace, Enum::Token::Kind::Symbol, "PostDerefHashSliceCloseBrace", ""}, {Enum::Token::Type::PostDerefCodeOpenParen, Enum::Token::Kind::Symbol, "PostDerefCodeOpenParen", ""}, {Enum::Token::Type::PostDerefCodeCloseParen, Enum::Token::Kind::Symbol, "PostDerefCodeCloseParen", ""} }; TokenInfo type_to_info[] = { {Enum::Token::Type::Return, Enum::Token::Kind::Return, "Return", "return"}, {Enum::Token::Type::Add, Enum::Token::Kind::Operator, "Add", "+"}, {Enum::Token::Type::Sub, Enum::Token::Kind::Operator, "Sub", "-"}, {Enum::Token::Type::Mul, Enum::Token::Kind::Operator, "Mul", "*"}, {Enum::Token::Type::Div, Enum::Token::Kind::Operator, "Div", "/"}, {Enum::Token::Type::Mod, Enum::Token::Kind::Operator, "Mod", "%"}, {Enum::Token::Type::ThreeTermOperator, Enum::Token::Kind::Operator, "ThreeTermOperator", "?"}, {Enum::Token::Type::Greater, Enum::Token::Kind::Operator, "Greater", ">"}, {Enum::Token::Type::Less, Enum::Token::Kind::Operator, "Less", "<"}, {Enum::Token::Type::StringAdd, Enum::Token::Kind::Operator, "StringAdd", "."}, {Enum::Token::Type::Ref, Enum::Token::Kind::Operator, "Ref", "\\"}, {Enum::Token::Type::Glob, Enum::Token::Kind::Operator, "Glob", "*"}, {Enum::Token::Type::BitNot, Enum::Token::Kind::Operator, "BitNot", "~"}, {Enum::Token::Type::BitOr, Enum::Token::Kind::Operator, "BitOr", "|"}, {Enum::Token::Type::AlphabetOr, Enum::Token::Kind::Operator, "AlphabetOr", "or"}, {Enum::Token::Type::BitAnd, Enum::Token::Kind::Operator, "BitAnd", "&"}, {Enum::Token::Type::AlphabetAnd, Enum::Token::Kind::Operator, "AlphabetAnd", "and"}, {Enum::Token::Type::BitXOr, Enum::Token::Kind::Operator, "BitXOr", "^"}, {Enum::Token::Type::AlphabetXOr, Enum::Token::Kind::Operator, "AlphabetXOr", "xor"}, {Enum::Token::Type::StringMul, Enum::Token::Kind::Operator, "StringMul", "x"}, {Enum::Token::Type::AddEqual, Enum::Token::Kind::Assign, "AddEqual", "+="}, {Enum::Token::Type::SubEqual, Enum::Token::Kind::Assign, "SubEqual", "-="}, {Enum::Token::Type::MulEqual, Enum::Token::Kind::Assign, "MulEqual", "*="}, {Enum::Token::Type::DivEqual, Enum::Token::Kind::Assign, "DivEqual", "/="}, {Enum::Token::Type::ModEqual, Enum::Token::Kind::Assign, "ModEqual", "%="}, {Enum::Token::Type::StringAddEqual, Enum::Token::Kind::Assign, "StringAddEqual", ".="}, {Enum::Token::Type::LeftShiftEqual, Enum::Token::Kind::Assign, "LeftShiftEqual", "<<="}, {Enum::Token::Type::RightShiftEqual, Enum::Token::Kind::Assign, "RightShiftEqual", ">>="}, {Enum::Token::Type::StringMulEqual, Enum::Token::Kind::Assign, "StringMulEqual", "x="}, {Enum::Token::Type::GreaterEqual, Enum::Token::Kind::Operator, "GreaterEqual", ">="}, {Enum::Token::Type::LessEqual, Enum::Token::Kind::Operator, "LessEqual", "<="}, {Enum::Token::Type::EqualEqual, Enum::Token::Kind::Operator, "EqualEqual", "=="}, {Enum::Token::Type::Diamond, Enum::Token::Kind::Operator, "Diamond", "<>"}, {Enum::Token::Type::Compare, Enum::Token::Kind::Operator, "Compare", "<=>"}, {Enum::Token::Type::PolymorphicCompare, Enum::Token::Kind::Operator, "PolymorphicCompare", "~~"}, {Enum::Token::Type::RegOK, Enum::Token::Kind::Operator, "RegOK", "=~"}, {Enum::Token::Type::RegNot, Enum::Token::Kind::Operator, "RegNot", "!~"}, {Enum::Token::Type::NotEqual, Enum::Token::Kind::Operator, "NotEqual", "!="}, {Enum::Token::Type::StringLess, Enum::Token::Kind::Operator, "StringLess", "lt"}, {Enum::Token::Type::StringLessEqual, Enum::Token::Kind::Operator, "StringLessEqual", "le"}, {Enum::Token::Type::StringGreater, Enum::Token::Kind::Operator, "StringGreater", "gt"}, {Enum::Token::Type::StringGreaterEqual, Enum::Token::Kind::Operator, "StringGreaterEqual", "ge"}, {Enum::Token::Type::StringEqual, Enum::Token::Kind::Operator, "StringEqual", "eq"}, {Enum::Token::Type::StringNotEqual, Enum::Token::Kind::Operator, "StringNotEqual", "ne"}, {Enum::Token::Type::StringCompare, Enum::Token::Kind::Operator, "StringCompare", "cmp"}, {Enum::Token::Type::Inc, Enum::Token::Kind::Operator, "Inc", "++"}, {Enum::Token::Type::Dec, Enum::Token::Kind::Operator, "Dec", "--"}, {Enum::Token::Type::Exp, Enum::Token::Kind::Operator, "Exp", "**"}, {Enum::Token::Type::PowerEqual, Enum::Token::Kind::Assign, "PowerEqual", "**="}, {Enum::Token::Type::DefaultEqual, Enum::Token::Kind::Assign, "DefaultEqual", "//="}, {Enum::Token::Type::LeftShift, Enum::Token::Kind::Operator, "LeftShift", "<<"}, {Enum::Token::Type::RightShift, Enum::Token::Kind::Operator, "RightShift", ">>"}, {Enum::Token::Type::And, Enum::Token::Kind::Operator, "And", "&&"}, {Enum::Token::Type::Or, Enum::Token::Kind::Operator, "Or", "||"}, {Enum::Token::Type::AndBitEqual, Enum::Token::Kind::Assign, "AndBitEqual", "&="}, {Enum::Token::Type::OrBitEqual, Enum::Token::Kind::Assign, "OrBitEqual", "|="}, {Enum::Token::Type::NotBitEqual, Enum::Token::Kind::Assign, "NotBitEqual", "^="}, {Enum::Token::Type::OrEqual, Enum::Token::Kind::Assign, "OrEqual", "||="}, {Enum::Token::Type::AndEqual, Enum::Token::Kind::Assign, "AndEqual", "&&="}, {Enum::Token::Type::Slice, Enum::Token::Kind::Operator, "Slice", ".."}, {Enum::Token::Type::DefaultOperator, Enum::Token::Kind::Operator, "DefaultOperator", "//"}, {Enum::Token::Type::ToDo, Enum::Token::Kind::Operator, "ToDo", "..."}, {Enum::Token::Type::VarDecl, Enum::Token::Kind::Decl, "VarDecl", "my"}, {Enum::Token::Type::FunctionDecl, Enum::Token::Kind::Decl, "FunctionDecl", "sub"}, {Enum::Token::Type::Method, Enum::Token::Kind::Function, "Method", ""}, {Enum::Token::Type::Assign, Enum::Token::Kind::Assign, "Assign", "="}, {Enum::Token::Type::ArraySize, Enum::Token::Kind::SingleTerm, "ArraySize", "$#"}, {Enum::Token::Type::Is, Enum::Token::Kind::SingleTerm, "Is", ""}, {Enum::Token::Type::Not, Enum::Token::Kind::SingleTerm, "Not", "!"}, {Enum::Token::Type::AlphabetNot, Enum::Token::Kind::SingleTerm, "AlphabetNot", "not"}, {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chomp"}, {Enum::Token::Type::RequireDecl, Enum::Token::Kind::Decl, "RequireDecl", "require"}, {Enum::Token::Type::Import, Enum::Token::Kind::Import, "Import", "import"}, {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__PACKAGE__"}, {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__DATA__"}, {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "BEGIN"}, {Enum::Token::Type::AUTOLOAD, Enum::Token::Kind::AUTOLOAD, "AUTOLOAD", "AUTOLOAD"}, {Enum::Token::Type::CORE, Enum::Token::Kind::CORE, "CORE", "CORE"}, {Enum::Token::Type::DESTROY, Enum::Token::Kind::DESTROY, "DESTROY", "DESTROY"}, {Enum::Token::Type::STDIN, Enum::Token::Kind::Handle, "STDIN", "STDIN"}, {Enum::Token::Type::STDOUT, Enum::Token::Kind::Handle, "STDOUT", "STDOUT"}, {Enum::Token::Type::STDERR, Enum::Token::Kind::Handle, "STDERR", "STDERR"}, {Enum::Token::Type::Redo, Enum::Token::Kind::Control, "Redo", "redo"}, {Enum::Token::Type::Next, Enum::Token::Kind::Control, "Next", "next"}, {Enum::Token::Type::Last, Enum::Token::Kind::Control, "Last", "last"}, {Enum::Token::Type::Goto, Enum::Token::Kind::Control, "Goto", "goto"}, {Enum::Token::Type::Continue, Enum::Token::Kind::Control, "Continue", "continue"}, {Enum::Token::Type::Do, Enum::Token::Kind::Do, "Do", "do"}, {Enum::Token::Type::Break, Enum::Token::Kind::Control, "Break", "break"}, {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-b"}, {Enum::Token::Type::LocalDecl, Enum::Token::Kind::Decl, "LocalDecl", "local"}, {Enum::Token::Type::OurDecl, Enum::Token::Kind::Decl, "OurDecl", "our"}, {Enum::Token::Type::StateDecl, Enum::Token::Kind::Decl, "StateDecl", "state"}, {Enum::Token::Type::UseDecl, Enum::Token::Kind::Decl, "UseDecl", "use"}, {Enum::Token::Type::UsedName, Enum::Token::Kind::Module, "UsedName", ""}, {Enum::Token::Type::RequiredName, Enum::Token::Kind::Module, "RequiredName", ""}, {Enum::Token::Type::IfStmt, Enum::Token::Kind::Stmt, "IfStmt", "if"}, {Enum::Token::Type::ElseStmt, Enum::Token::Kind::Stmt, "ElseStmt", "else"}, {Enum::Token::Type::ElsifStmt, Enum::Token::Kind::Stmt, "ElsifStmt", "elsif"}, {Enum::Token::Type::UnlessStmt, Enum::Token::Kind::Stmt, "UnlessStmt", "unless"}, {Enum::Token::Type::UntilStmt, Enum::Token::Kind::Stmt, "UntilStmt", "until"}, {Enum::Token::Type::WhenStmt, Enum::Token::Kind::Stmt, "WhenStmt", "when"}, {Enum::Token::Type::GivenStmt, Enum::Token::Kind::Stmt, "GivenStmt", "given"}, {Enum::Token::Type::DefaultStmt, Enum::Token::Kind::DefaultStmt, "DefaultStmt", "default"}, {Enum::Token::Type::Comma, Enum::Token::Kind::Comma, "Comma", ","}, {Enum::Token::Type::Colon, Enum::Token::Kind::Colon, "Colon", ":"}, {Enum::Token::Type::SemiColon, Enum::Token::Kind::StmtEnd, "SemiColon", ";"}, {Enum::Token::Type::LeftParenthesis, Enum::Token::Kind::Symbol, "LeftParenthesis", "("}, {Enum::Token::Type::RightParenthesis, Enum::Token::Kind::Symbol, "RightParenthesis", ")"}, {Enum::Token::Type::LeftBrace, Enum::Token::Kind::Symbol, "LeftBrace", "{"}, {Enum::Token::Type::RightBrace, Enum::Token::Kind::Symbol, "RightBrace", "}"}, {Enum::Token::Type::LeftBracket, Enum::Token::Kind::Symbol, "LeftBracket", "["}, {Enum::Token::Type::RightBracket, Enum::Token::Kind::Symbol, "RightBracket", "]"}, {Enum::Token::Type::ArrayDereference, Enum::Token::Kind::Modifier, "ArrayDereference", "@{"}, {Enum::Token::Type::HashDereference, Enum::Token::Kind::Modifier, "HashDereference", "%{"}, {Enum::Token::Type::ScalarDereference, Enum::Token::Kind::Modifier, "ScalarDereference", "${"}, {Enum::Token::Type::CodeDereference, Enum::Token::Kind::Modifier, "CodeDereference", "&{"}, {Enum::Token::Type::ShortScalarDereference, Enum::Token::Kind::Modifier, "ShortScalarDereference", ""}, {Enum::Token::Type::ShortArrayDereference, Enum::Token::Kind::Modifier, "ShortArrayDereference", "@$"}, {Enum::Token::Type::ShortHashDereference, Enum::Token::Kind::Modifier, "ShortHashDereference", "%$"}, {Enum::Token::Type::ShortCodeDereference, Enum::Token::Kind::Modifier, "ShortCodeDereference", "&$"}, {Enum::Token::Type::ArraySizeDereference, Enum::Token::Kind::Modifier, "ArraySizeDereference", "$#{"}, {Enum::Token::Type::Key, Enum::Token::Kind::Term, "Key", ""}, {Enum::Token::Type::BareWord, Enum::Token::Kind::Term, "BareWord", ""}, {Enum::Token::Type::Arrow, Enum::Token::Kind::Operator, "Arrow", "=>"}, {Enum::Token::Type::Pointer, Enum::Token::Kind::Operator, "Pointer", "->"}, {Enum::Token::Type::NamespaceResolver, Enum::Token::Kind::Operator, "NamespaceResolver", "::"}, {Enum::Token::Type::Namespace, Enum::Token::Kind::Namespace, "Namespace", ""}, {Enum::Token::Type::Package, Enum::Token::Kind::Package, "Package", "package"}, {Enum::Token::Type::Class, Enum::Token::Kind::Class, "Class", ""}, {Enum::Token::Type::CallDecl, Enum::Token::Kind::Decl, "CallDecl", "&"}, {Enum::Token::Type::CodeRef, Enum::Token::Kind::SingleTerm, "CodeRef", "\\&"}, {Enum::Token::Type::WhileStmt, Enum::Token::Kind::Stmt, "WhileStmt", "while"}, {Enum::Token::Type::ForStmt, Enum::Token::Kind::Stmt, "ForStmt", "for"}, {Enum::Token::Type::ForeachStmt, Enum::Token::Kind::Stmt, "ForeachStmt", "foreach"}, {Enum::Token::Type::Annotation, Enum::Token::Kind::Annotation, "Annotation", "#@"}, {Enum::Token::Type::ArgumentArray, Enum::Token::Kind::Term, "ArgumentArray", "@_"}, {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$_"}, {Enum::Token::Type::ConstValue, Enum::Token::Kind::Term, "ConstValue", ""}, {Enum::Token::Type::ProgramArgument, Enum::Token::Kind::Term, "ProgramArgument", "@ARGV"}, {Enum::Token::Type::LibraryDirectories, Enum::Token::Kind::Term, "LibraryDirectories", "@INC"}, {Enum::Token::Type::Environment, Enum::Token::Kind::Term, "Environment", "%ENV"}, {Enum::Token::Type::Include, Enum::Token::Kind::Term, "Include", "%INC"}, {Enum::Token::Type::Signal, Enum::Token::Kind::Term, "Signal", "%SIG"}, {Enum::Token::Type::RegOpt, Enum::Token::Kind::RegOpt, "RegOpt", ""}, {Enum::Token::Type::RegQuote, Enum::Token::Kind::RegPrefix, "RegQuote", "q"}, {Enum::Token::Type::RegDoubleQuote, Enum::Token::Kind::RegPrefix, "RegDoubleQuote", "qq"}, {Enum::Token::Type::RegList, Enum::Token::Kind::RegPrefix, "RegList", "qw"}, {Enum::Token::Type::RegExec, Enum::Token::Kind::RegPrefix, "RegExec", "qx"}, {Enum::Token::Type::RegDecl, Enum::Token::Kind::RegPrefix, "RegDecl", "qr"}, {Enum::Token::Type::RegMatch, Enum::Token::Kind::RegPrefix, "RegMatch", "m"}, {Enum::Token::Type::RegDelim, Enum::Token::Kind::Term, "RegDelim", ""}, {Enum::Token::Type::HandleDelim, Enum::Token::Kind::Term, "HandleDelim", ""}, {Enum::Token::Type::RegMiddleDelim, Enum::Token::Kind::Term, "RegMiddleDelim", ""}, {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "tr"}, {Enum::Token::Type::RegReplace, Enum::Token::Kind::RegReplacePrefix, "RegReplace", "s"}, {Enum::Token::Type::RegReplaceFrom, Enum::Token::Kind::Term, "RegReplaceFrom", ""}, {Enum::Token::Type::RegReplaceTo, Enum::Token::Kind::Term, "RegReplaceTo", ""}, {Enum::Token::Type::FieldDecl, Enum::Token::Kind::Decl, "FieldDecl", ""}, {Enum::Token::Type::TypeRef, Enum::Token::Kind::Ref, "TypeRef", ""}, {Enum::Token::Type::LabelRef, Enum::Token::Kind::Ref, "LabelRef", ""}, {Enum::Token::Type::LocalVarDecl, Enum::Token::Kind::Decl, "LocalVarDecl", ""}, {Enum::Token::Type::GlobalVarDecl, Enum::Token::Kind::Decl, "GlobalVarDecl", ""}, {Enum::Token::Type::MultiLocalVarDecl, Enum::Token::Kind::Decl, "MultiLocalVarDecl", ""}, {Enum::Token::Type::MultiGlobalVarDecl, Enum::Token::Kind::Decl, "MultiGlobalVarDecl", ""}, {Enum::Token::Type::Prototype, Enum::Token::Kind::Term, "Prototype", ""}, {Enum::Token::Type::Var, Enum::Token::Kind::Term, "Var", ""}, {Enum::Token::Type::CodeVar, Enum::Token::Kind::Term, "CodeVar", ""}, {Enum::Token::Type::ArrayVar, Enum::Token::Kind::Term, "ArrayVar", ""}, {Enum::Token::Type::HashVar, Enum::Token::Kind::Term, "HashVar", ""}, {Enum::Token::Type::Int, Enum::Token::Kind::Term, "Int", ""}, {Enum::Token::Type::Double, Enum::Token::Kind::Term, "Double", ""}, {Enum::Token::Type::String, Enum::Token::Kind::Term, "String", ""}, {Enum::Token::Type::RawString, Enum::Token::Kind::Term, "RawString", ""}, {Enum::Token::Type::ExecString, Enum::Token::Kind::Term, "ExecString", ""}, {Enum::Token::Type::VersionString, Enum::Token::Kind::Term, "VersionString", ""}, {Enum::Token::Type::HereDocumentTag, Enum::Token::Kind::Term, "HereDocumentTag", ""}, {Enum::Token::Type::HereDocumentRawTag, Enum::Token::Kind::Term, "HereDocumentRawTag", ""}, {Enum::Token::Type::HereDocumentExecTag, Enum::Token::Kind::Term, "HereDocumentExecTag", ""}, {Enum::Token::Type::HereDocumentBareTag, Enum::Token::Kind::Term, "HereDocumentBareTag", ""}, {Enum::Token::Type::RawHereDocument, Enum::Token::Kind::Term, "RawHereDocument", ""}, {Enum::Token::Type::HereDocument, Enum::Token::Kind::Term, "HereDocument", ""}, {Enum::Token::Type::HereDocumentEnd, Enum::Token::Kind::Term, "HereDocumentEnd", ""}, {Enum::Token::Type::FormatDecl, Enum::Token::Kind::Decl, "FormatDecl", ""}, {Enum::Token::Type::Format, Enum::Token::Kind::Term, "Format", ""}, {Enum::Token::Type::FormatEnd, Enum::Token::Kind::Term, "FormatEnd", ""}, {Enum::Token::Type::Object, Enum::Token::Kind::Term, "Object", ""}, {Enum::Token::Type::RegExp, Enum::Token::Kind::Term, "RegExp", ""}, {Enum::Token::Type::Array, Enum::Token::Kind::Term, "Array", ""}, {Enum::Token::Type::Hash, Enum::Token::Kind::Term, "Hash", ""}, {Enum::Token::Type::Operator, Enum::Token::Kind::Operator, "Operator", ""}, {Enum::Token::Type::LocalVar, Enum::Token::Kind::Term, "LocalVar", ""}, {Enum::Token::Type::LocalArrayVar, Enum::Token::Kind::Term, "LocalArrayVar", ""}, {Enum::Token::Type::LocalHashVar, Enum::Token::Kind::Term, "LocalHashVar", ""}, {Enum::Token::Type::GlobalVar, Enum::Token::Kind::Term, "GlobalVar", ""}, {Enum::Token::Type::GlobalArrayVar, Enum::Token::Kind::Term, "GlobalArrayVar", ""}, {Enum::Token::Type::GlobalHashVar, Enum::Token::Kind::Term, "GlobalHashVar", ""}, {Enum::Token::Type::ArrayRef, Enum::Token::Kind::Ref, "ArrayRef", ""}, {Enum::Token::Type::HashRef, Enum::Token::Kind::Ref, "HashRef", ""}, {Enum::Token::Type::ArrayAt, Enum::Token::Kind::Get, "ArrayAt", ""}, {Enum::Token::Type::HashAt, Enum::Token::Kind::Get, "HashAt", ""}, {Enum::Token::Type::ArraySet, Enum::Token::Kind::Set, "ArraySet", ""}, {Enum::Token::Type::HashSet, Enum::Token::Kind::Set, "HashSet", ""}, {Enum::Token::Type::Function, Enum::Token::Kind::Decl, "Function", ""}, {Enum::Token::Type::Call, Enum::Token::Kind::Function, "Call", ""}, {Enum::Token::Type::Argument, Enum::Token::Kind::Term, "Argument", ""}, {Enum::Token::Type::List, Enum::Token::Kind::Term, "List", ""}, {Enum::Token::Type::Default, Enum::Token::Kind::Term, "Default", "undef"}, {Enum::Token::Type::Pod, Enum::Token::Kind::Verbose, "Pod", ""}, {Enum::Token::Type::Comment, Enum::Token::Kind::Verbose, "Comment", ""}, {Enum::Token::Type::WhiteSpace, Enum::Token::Kind::Verbose, "WhiteSpace", ""}, {Enum::Token::Type::Undefined, Enum::Token::Kind::Undefined, "Undefined", ""}, {Enum::Token::Type::PostDeref, Enum::Token::Kind::Symbol, "PostDeref", ""}, {Enum::Token::Type::PostDerefStar, Enum::Token::Kind::Symbol, "PostDerefStar", ""}, {Enum::Token::Type::PostDerefArraySliceOpenBracket, Enum::Token::Kind::Symbol, "PostDerefArraySliceOpenBracket", ""}, {Enum::Token::Type::PostDerefArraySliceCloseBracket, Enum::Token::Kind::Symbol, "PostDerefArraySliceCloseBracket", ""}, {Enum::Token::Type::PostDerefHashSliceOpenBrace, Enum::Token::Kind::Symbol, "PostDerefHashSliceOpenBrace", ""}, {Enum::Token::Type::PostDerefHashSliceCloseBrace, Enum::Token::Kind::Symbol, "PostDerefHashSliceCloseBrace", ""}, {Enum::Token::Type::PostDerefCodeOpenParen, Enum::Token::Kind::Symbol, "PostDerefCodeOpenParen", ""}, {Enum::Token::Type::PostDerefCodeCloseParen, Enum::Token::Kind::Symbol, "PostDerefCodeCloseParen", ""} }; Compiler_reserved_keyword.cpp100644000765000024 20704113603257356 25313 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/util/* C++ code produced by gperf version 3.0.3 */ /* Command-line: gperf -L C++ -Z ReservedKeywordMap -t gen/reserved_keywords.gperf */ /* Computed positions: -k'1-2,4-5,$' */ #if !((' ' == 32) && ('!' == 33) && ('"' == 34) && ('#' == 35) \ && ('%' == 37) && ('&' == 38) && ('\'' == 39) && ('(' == 40) \ && (')' == 41) && ('*' == 42) && ('+' == 43) && (',' == 44) \ && ('-' == 45) && ('.' == 46) && ('/' == 47) && ('0' == 48) \ && ('1' == 49) && ('2' == 50) && ('3' == 51) && ('4' == 52) \ && ('5' == 53) && ('6' == 54) && ('7' == 55) && ('8' == 56) \ && ('9' == 57) && (':' == 58) && (';' == 59) && ('<' == 60) \ && ('=' == 61) && ('>' == 62) && ('?' == 63) && ('A' == 65) \ && ('B' == 66) && ('C' == 67) && ('D' == 68) && ('E' == 69) \ && ('F' == 70) && ('G' == 71) && ('H' == 72) && ('I' == 73) \ && ('J' == 74) && ('K' == 75) && ('L' == 76) && ('M' == 77) \ && ('N' == 78) && ('O' == 79) && ('P' == 80) && ('Q' == 81) \ && ('R' == 82) && ('S' == 83) && ('T' == 84) && ('U' == 85) \ && ('V' == 86) && ('W' == 87) && ('X' == 88) && ('Y' == 89) \ && ('Z' == 90) && ('[' == 91) && ('\\' == 92) && (']' == 93) \ && ('^' == 94) && ('_' == 95) && ('a' == 97) && ('b' == 98) \ && ('c' == 99) && ('d' == 100) && ('e' == 101) && ('f' == 102) \ && ('g' == 103) && ('h' == 104) && ('i' == 105) && ('j' == 106) \ && ('k' == 107) && ('l' == 108) && ('m' == 109) && ('n' == 110) \ && ('o' == 111) && ('p' == 112) && ('q' == 113) && ('r' == 114) \ && ('s' == 115) && ('t' == 116) && ('u' == 117) && ('v' == 118) \ && ('w' == 119) && ('x' == 120) && ('y' == 121) && ('z' == 122) \ && ('{' == 123) && ('|' == 124) && ('}' == 125) && ('~' == 126)) /* The character set is not based on ISO-646. */ #error "gperf generated tables don't work with this execution character set. Please report a bug to ." #endif #include #define TOTAL_KEYWORDS 411 #define MIN_WORD_LENGTH 1 #define MAX_WORD_LENGTH 16 #define MIN_HASH_VALUE 1 #define MAX_HASH_VALUE 1262 /* maximum key range = 1262, duplicates = 0 */ inline unsigned int ReservedKeywordMap::hash (register const char *str, register unsigned int len) { static unsigned short asso_values[] = { 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 140, 400, 195, 25, 185, 20, 395, 325, 320, 200, 235, 315, 420, 190, 285, 380, 370, 40, 365, 360, 350, 345, 335, 245, 95, 80, 305, 250, 5, 215, 290, 60, 140, 65, 20, 200, 70, 85, 40, 25, 5, 1263, 15, 15, 230, 60, 185, 45, 1263, 110, 85, 55, 200, 15, 225, 205, 45, 1263, 260, 165, 210, 125, 120, 65, 245, 270, 50, 150, 0, 280, 25, 125, 150, 20, 160, 120, 115, 20, 80, 35, 95, 40, 15, 10, 155, 65, 240, 220, 345, 125, 295, 280, 0, 170, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263, 1263 }; register int hval = (int)len; switch (hval) { default: hval += asso_values[(unsigned char)str[4]]; /*FALLTHROUGH*/ case 4: hval += asso_values[(unsigned char)str[3]]; /*FALLTHROUGH*/ case 3: case 2: hval += asso_values[(unsigned char)str[1]]; /*FALLTHROUGH*/ case 1: hval += asso_values[(unsigned char)str[0]]; break; } return hval + asso_values[(unsigned char)str[len - 1]]; } ReservedKeyword * ReservedKeywordMap::in_word_set (register const char *str, register unsigned int len) { static ReservedKeyword wordlist[] = { {""}, #line 334 "gen/reserved_keywords.gperf" {"}", {Enum::Token::Type::RightBrace, Enum::Token::Kind::Symbol, "RightBrace", "}"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 74 "gen/reserved_keywords.gperf" {"=", {Enum::Token::Type::Assign, Enum::Token::Kind::Assign, "Assign", "="}}, {""}, {""}, {""}, {""}, {""}, #line 41 "gen/reserved_keywords.gperf" {"==", {Enum::Token::Type::EqualEqual, Enum::Token::Kind::Operator, "EqualEqual", "=="}}, {""}, {""}, {""}, {""}, #line 53 "gen/reserved_keywords.gperf" {"ne", {Enum::Token::Type::StringNotEqual, Enum::Token::Kind::Operator, "StringNotEqual", "ne"}}, {""}, {""}, {""}, {""}, #line 51 "gen/reserved_keywords.gperf" {"ge", {Enum::Token::Type::StringGreaterEqual, Enum::Token::Kind::Operator, "StringGreaterEqual", "ge"}}, {""}, {""}, {""}, #line 422 "gen/reserved_keywords.gperf" {"s", {Enum::Token::Type::RegReplace, Enum::Token::Kind::RegReplacePrefix, "RegReplace", "s"}}, #line 64 "gen/reserved_keywords.gperf" {"&=", {Enum::Token::Type::AndBitEqual, Enum::Token::Kind::Assign, "AndBitEqual", "&="}}, {""}, {""}, {""}, {""}, #line 378 "gen/reserved_keywords.gperf" {"$=", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$="}}, {""}, {""}, #line 318 "gen/reserved_keywords.gperf" {"state", {Enum::Token::Type::StateDecl, Enum::Token::Kind::Decl, "StateDecl", "state"}}, {""}, {""}, {""}, #line 283 "gen/reserved_keywords.gperf" {"next", {Enum::Token::Type::Next, Enum::Token::Kind::Control, "Next", "next"}}, {""}, {""}, #line 50 "gen/reserved_keywords.gperf" {"gt", {Enum::Token::Type::StringGreater, Enum::Token::Kind::Operator, "StringGreater", "gt"}}, #line 68 "gen/reserved_keywords.gperf" {"&&=", {Enum::Token::Type::AndEqual, Enum::Token::Kind::Assign, "AndEqual", "&&="}}, #line 158 "gen/reserved_keywords.gperf" {"stat", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "stat"}}, #line 244 "gen/reserved_keywords.gperf" {"setservent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setservent"}}, {""}, {""}, #line 238 "gen/reserved_keywords.gperf" {"getservbyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservbyname"}}, #line 242 "gen/reserved_keywords.gperf" {"setnetent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setnetent"}}, #line 228 "gen/reserved_keywords.gperf" {"endservent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endservent"}}, #line 208 "gen/reserved_keywords.gperf" {"semget", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semget"}}, #line 233 "gen/reserved_keywords.gperf" {"getnetbyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetbyname"}}, {""}, #line 216 "gen/reserved_keywords.gperf" {"endnetent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endnetent"}}, #line 240 "gen/reserved_keywords.gperf" {"getservent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservent"}}, {""}, #line 62 "gen/reserved_keywords.gperf" {"&&", {Enum::Token::Type::And, Enum::Token::Kind::Operator, "And", "&&"}}, #line 239 "gen/reserved_keywords.gperf" {"getservbyport", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getservbyport"}}, #line 234 "gen/reserved_keywords.gperf" {"getnetent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetent"}}, #line 169 "gen/reserved_keywords.gperf" {"reset", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "reset"}}, {""}, #line 366 "gen/reserved_keywords.gperf" {"$&", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$&"}}, {""}, {""}, {""}, #line 193 "gen/reserved_keywords.gperf" {"getpeername", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpeername"}}, #line 343 "gen/reserved_keywords.gperf" {"&$", {Enum::Token::Type::ShortCodeDereference, Enum::Token::Kind::Modifier, "ShortCodeDereference", "&$"}}, {""}, {""}, {""}, {""}, #line 386 "gen/reserved_keywords.gperf" {"$$", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$$"}}, {""}, {""}, {""}, #line 132 "gen/reserved_keywords.gperf" {"select", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "select"}}, {""}, {""}, {""}, {""}, {""}, #line 90 "gen/reserved_keywords.gperf" {"reverse", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "reverse"}}, {""}, {""}, {""}, {""}, #line 420 "gen/reserved_keywords.gperf" {"tr", {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "tr"}}, {""}, {""}, {""}, {""}, #line 232 "gen/reserved_keywords.gperf" {"getnetbyaddr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getnetbyaddr"}}, #line 225 "gen/reserved_keywords.gperf" {"setgrent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setgrent"}}, {""}, #line 272 "gen/reserved_keywords.gperf" {"CHECK", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "CHECK"}}, {""}, {""}, #line 214 "gen/reserved_keywords.gperf" {"endgrent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endgrent"}}, {""}, {""}, {""}, #line 358 "gen/reserved_keywords.gperf" {"$2", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$2"}}, #line 218 "gen/reserved_keywords.gperf" {"getgrent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrent"}}, #line 410 "gen/reserved_keywords.gperf" {"@INC", {Enum::Token::Type::LibraryDirectories, Enum::Token::Kind::Term, "LibraryDirectories", "@INC"}}, {""}, #line 243 "gen/reserved_keywords.gperf" {"setprotoent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setprotoent"}}, #line 341 "gen/reserved_keywords.gperf" {"@$", {Enum::Token::Type::ShortArrayDereference, Enum::Token::Kind::Modifier, "ShortArrayDereference", "@$"}}, #line 77 "gen/reserved_keywords.gperf" {"not", {Enum::Token::Type::AlphabetNot, Enum::Token::Kind::SingleTerm, "AlphabetNot", "not"}}, #line 235 "gen/reserved_keywords.gperf" {"getprotobyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotobyname"}}, {""}, #line 227 "gen/reserved_keywords.gperf" {"endprotoent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endprotoent"}}, #line 179 "gen/reserved_keywords.gperf" {"setpgrp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpgrp"}}, #line 142 "gen/reserved_keywords.gperf" {"vec", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "vec"}}, #line 89 "gen/reserved_keywords.gperf" {"sort", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sort"}}, #line 124 "gen/reserved_keywords.gperf" {"print", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "print"}}, #line 237 "gen/reserved_keywords.gperf" {"getprotoent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotoent"}}, #line 49 "gen/reserved_keywords.gperf" {"le", {Enum::Token::Type::StringLessEqual, Enum::Token::Kind::Operator, "StringLessEqual", "le"}}, {""}, #line 321 "gen/reserved_keywords.gperf" {"else", {Enum::Token::Type::ElseStmt, Enum::Token::Kind::Stmt, "ElseStmt", "else"}}, {""}, {""}, #line 174 "gen/reserved_keywords.gperf" {"getpgrp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpgrp"}}, #line 139 "gen/reserved_keywords.gperf" {"truncate", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "truncate"}}, #line 123 "gen/reserved_keywords.gperf" {"getc", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getc"}}, #line 199 "gen/reserved_keywords.gperf" {"setsockopt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setsockopt"}}, #line 194 "gen/reserved_keywords.gperf" {"getsockname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getsockname"}}, {""}, #line 96 "gen/reserved_keywords.gperf" {"pos", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pos"}}, #line 261 "gen/reserved_keywords.gperf" {"sqrt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sqrt"}}, {""}, {""}, #line 66 "gen/reserved_keywords.gperf" {"^=", {Enum::Token::Type::NotBitEqual, Enum::Token::Kind::Assign, "NotBitEqual", "^="}}, {""}, #line 105 "gen/reserved_keywords.gperf" {"grep", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "grep"}}, #line 195 "gen/reserved_keywords.gperf" {"getsockopt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getsockopt"}}, {""}, #line 48 "gen/reserved_keywords.gperf" {"lt", {Enum::Token::Type::StringLess, Enum::Token::Kind::Operator, "StringLess", "lt"}}, #line 258 "gen/reserved_keywords.gperf" {"oct", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "oct"}}, #line 106 "gen/reserved_keywords.gperf" {"join", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "join"}}, {""}, {""}, #line 385 "gen/reserved_keywords.gperf" {"$@", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$@"}}, #line 253 "gen/reserved_keywords.gperf" {"cos", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "cos"}}, {""}, #line 81 "gen/reserved_keywords.gperf" {"crypt", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "crypt"}}, {""}, #line 47 "gen/reserved_keywords.gperf" {"!=", {Enum::Token::Type::NotEqual, Enum::Token::Kind::Operator, "NotEqual", "!="}}, #line 100 "gen/reserved_keywords.gperf" {"pop", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pop"}}, {""}, {""}, #line 236 "gen/reserved_keywords.gperf" {"getprotobynumber", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getprotobynumber"}}, #line 367 "gen/reserved_keywords.gperf" {"$`", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$`"}}, #line 400 "gen/reserved_keywords.gperf" {"$^I", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^I"}}, #line 153 "gen/reserved_keywords.gperf" {"open", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "open"}}, {""}, #line 329 "gen/reserved_keywords.gperf" {":", {Enum::Token::Type::Colon, Enum::Token::Kind::Colon, "Colon", ":"}}, #line 25 "gen/reserved_keywords.gperf" {"or", {Enum::Token::Type::AlphabetOr, Enum::Token::Kind::Operator, "AlphabetOr", "or"}}, #line 187 "gen/reserved_keywords.gperf" {"tie", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tie"}}, #line 247 "gen/reserved_keywords.gperf" {"time", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "time"}}, {""}, #line 108 "gen/reserved_keywords.gperf" {"delete", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "delete"}}, #line 192 "gen/reserved_keywords.gperf" {"connect", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "connect"}}, #line 401 "gen/reserved_keywords.gperf" {"$^L", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^L"}}, {""}, #line 209 "gen/reserved_keywords.gperf" {"semop", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semop"}}, #line 204 "gen/reserved_keywords.gperf" {"msgget", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgget"}}, {""}, #line 319 "gen/reserved_keywords.gperf" {"use", {Enum::Token::Type::UseDecl, Enum::Token::Kind::Decl, "UseDecl", "use"}}, #line 197 "gen/reserved_keywords.gperf" {"recv", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "recv"}}, {""}, {""}, #line 418 "gen/reserved_keywords.gperf" {"qr", {Enum::Token::Type::RegDecl, Enum::Token::Kind::RegPrefix, "RegDecl", "qr"}}, #line 399 "gen/reserved_keywords.gperf" {"$^H", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^H"}}, #line 273 "gen/reserved_keywords.gperf" {"INIT", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "INIT"}}, {""}, #line 211 "gen/reserved_keywords.gperf" {"shmget", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmget"}}, #line 186 "gen/reserved_keywords.gperf" {"no", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "no"}}, #line 256 "gen/reserved_keywords.gperf" {"int", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "int"}}, {""}, {""}, {""}, #line 382 "gen/reserved_keywords.gperf" {"$:", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$:"}}, #line 260 "gen/reserved_keywords.gperf" {"sin", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sin"}}, #line 178 "gen/reserved_keywords.gperf" {"pipe", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pipe"}}, #line 114 "gen/reserved_keywords.gperf" {"close", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "close"}}, #line 414 "gen/reserved_keywords.gperf" {"q", {Enum::Token::Type::RegQuote, Enum::Token::Kind::RegPrefix, "RegQuote", "q"}}, #line 52 "gen/reserved_keywords.gperf" {"eq", {Enum::Token::Type::StringEqual, Enum::Token::Kind::Operator, "StringEqual", "eq"}}, #line 398 "gen/reserved_keywords.gperf" {"$^G", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^G"}}, #line 111 "gen/reserved_keywords.gperf" {"keys", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "keys"}}, #line 183 "gen/reserved_keywords.gperf" {"times", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "times"}}, {""}, #line 34 "gen/reserved_keywords.gperf" {"%=", {Enum::Token::Type::ModEqual, Enum::Token::Kind::Assign, "ModEqual", "%="}}, #line 404 "gen/reserved_keywords.gperf" {"$^P", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^P"}}, {""}, {""}, #line 207 "gen/reserved_keywords.gperf" {"semctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "semctl"}}, #line 35 "gen/reserved_keywords.gperf" {".=", {Enum::Token::Type::StringAddEqual, Enum::Token::Kind::Assign, "StringAddEqual", ".="}}, #line 54 "gen/reserved_keywords.gperf" {"cmp", {Enum::Token::Type::StringCompare, Enum::Token::Kind::Operator, "StringCompare", "cmp"}}, #line 282 "gen/reserved_keywords.gperf" {"redo", {Enum::Token::Type::Redo, Enum::Token::Kind::Control, "Redo", "redo"}}, {""}, {""}, #line 349 "gen/reserved_keywords.gperf" {"\\&", {Enum::Token::Type::CodeRef, Enum::Token::Kind::SingleTerm, "CodeRef", "\\&"}}, #line 406 "gen/reserved_keywords.gperf" {"$^T", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^T"}}, {""}, #line 181 "gen/reserved_keywords.gperf" {"sleep", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sleep"}}, #line 323 "gen/reserved_keywords.gperf" {"unless", {Enum::Token::Type::UnlessStmt, Enum::Token::Kind::Stmt, "UnlessStmt", "unless"}}, #line 32 "gen/reserved_keywords.gperf" {"*=", {Enum::Token::Type::MulEqual, Enum::Token::Kind::Assign, "MulEqual", "*="}}, #line 220 "gen/reserved_keywords.gperf" {"getgrnam", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrnam"}}, {""}, {""}, {""}, #line 365 "gen/reserved_keywords.gperf" {"$9", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$9"}}, #line 80 "gen/reserved_keywords.gperf" {"chr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chr"}}, {""}, #line 326 "gen/reserved_keywords.gperf" {"given", {Enum::Token::Type::GivenStmt, Enum::Token::Kind::Stmt, "GivenStmt", "given"}}, {""}, #line 83 "gen/reserved_keywords.gperf" {"lc", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lc"}}, #line 396 "gen/reserved_keywords.gperf" {"$^E", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^E"}}, {""}, #line 98 "gen/reserved_keywords.gperf" {"split", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "split"}}, {""}, #line 39 "gen/reserved_keywords.gperf" {">=", {Enum::Token::Type::GreaterEqual, Enum::Token::Kind::Operator, "GreaterEqual", ">="}}, #line 257 "gen/reserved_keywords.gperf" {"log", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "log"}}, {""}, {""}, #line 419 "gen/reserved_keywords.gperf" {"m", {Enum::Token::Type::RegMatch, Enum::Token::Kind::RegPrefix, "RegMatch", "m"}}, #line 38 "gen/reserved_keywords.gperf" {"x=", {Enum::Token::Type::StringMulEqual, Enum::Token::Kind::Assign, "StringMulEqual", "x="}}, #line 115 "gen/reserved_keywords.gperf" {"closedir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "closedir"}}, #line 412 "gen/reserved_keywords.gperf" {"%INC", {Enum::Token::Type::Include, Enum::Token::Kind::Term, "Include", "%INC"}}, {""}, {""}, #line 342 "gen/reserved_keywords.gperf" {"%$", {Enum::Token::Type::ShortHashDereference, Enum::Token::Kind::Modifier, "ShortHashDereference", "%$"}}, #line 397 "gen/reserved_keywords.gperf" {"$^F", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^F"}}, {""}, #line 241 "gen/reserved_keywords.gperf" {"sethostent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sethostent"}}, #line 93 "gen/reserved_keywords.gperf" {"substr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "substr"}}, #line 347 "gen/reserved_keywords.gperf" {"::", {Enum::Token::Type::NamespaceResolver, Enum::Token::Kind::Operator, "NamespaceResolver", "::"}}, #line 230 "gen/reserved_keywords.gperf" {"gethostbyname", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostbyname"}}, #line 166 "gen/reserved_keywords.gperf" {"exit", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exit"}}, #line 215 "gen/reserved_keywords.gperf" {"endhostent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endhostent"}}, {""}, #line 30 "gen/reserved_keywords.gperf" {"+=", {Enum::Token::Type::AddEqual, Enum::Token::Kind::Assign, "AddEqual", "+="}}, #line 219 "gen/reserved_keywords.gperf" {"getgrgid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getgrgid"}}, #line 79 "gen/reserved_keywords.gperf" {"chop", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chop"}}, #line 231 "gen/reserved_keywords.gperf" {"gethostent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostent"}}, #line 27 "gen/reserved_keywords.gperf" {"^", {Enum::Token::Type::BitXOr, Enum::Token::Kind::Operator, "BitXOr", "^"}}, #line 175 "gen/reserved_keywords.gperf" {"getppid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getppid"}}, #line 221 "gen/reserved_keywords.gperf" {"getlogin", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getlogin"}}, #line 137 "gen/reserved_keywords.gperf" {"tell", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tell"}}, {""}, #line 102 "gen/reserved_keywords.gperf" {"splice", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "splice"}}, #line 94 "gen/reserved_keywords.gperf" {"uc", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "uc"}}, #line 254 "gen/reserved_keywords.gperf" {"exp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exp"}}, #line 129 "gen/reserved_keywords.gperf" {"rewinddir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rewinddir"}}, {""}, #line 13 "gen/reserved_keywords.gperf" {"return", {Enum::Token::Type::Return, Enum::Token::Kind::Return, "Return", "return"}}, #line 40 "gen/reserved_keywords.gperf" {"<=", {Enum::Token::Type::LessEqual, Enum::Token::Kind::Operator, "LessEqual", "<="}}, #line 405 "gen/reserved_keywords.gperf" {"$^R", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^R"}}, {""}, #line 271 "gen/reserved_keywords.gperf" {"BEGIN", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "BEGIN"}}, #line 110 "gen/reserved_keywords.gperf" {"exists", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exists"}}, #line 355 "gen/reserved_keywords.gperf" {"$_", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$_"}}, {""}, #line 285 "gen/reserved_keywords.gperf" {"goto", {Enum::Token::Type::Goto, Enum::Token::Kind::Control, "Goto", "goto"}}, #line 279 "gen/reserved_keywords.gperf" {"STDIN", {Enum::Token::Type::STDIN, Enum::Token::Kind::Handle, "STDIN", "STDIN"}}, #line 201 "gen/reserved_keywords.gperf" {"socket", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "socket"}}, {""}, #line 86 "gen/reserved_keywords.gperf" {"ord", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ord"}}, {""}, #line 409 "gen/reserved_keywords.gperf" {"@ARGV", {Enum::Token::Type::ProgramArgument, Enum::Token::Kind::Term, "ProgramArgument", "@ARGV"}}, {""}, #line 381 "gen/reserved_keywords.gperf" {"$^", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^"}}, #line 317 "gen/reserved_keywords.gperf" {"our", {Enum::Token::Type::OurDecl, Enum::Token::Kind::Decl, "OurDecl", "our"}}, {""}, {""}, #line 76 "gen/reserved_keywords.gperf" {"!", {Enum::Token::Type::Not, Enum::Token::Kind::SingleTerm, "Not", "!"}}, {""}, #line 229 "gen/reserved_keywords.gperf" {"gethostbyaddr", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gethostbyaddr"}}, {""}, #line 162 "gen/reserved_keywords.gperf" {"utime", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "utime"}}, #line 85 "gen/reserved_keywords.gperf" {"length", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "length"}}, #line 415 "gen/reserved_keywords.gperf" {"qq", {Enum::Token::Type::RegDoubleQuote, Enum::Token::Kind::RegPrefix, "RegDoubleQuote", "qq"}}, {""}, #line 411 "gen/reserved_keywords.gperf" {"%ENV", {Enum::Token::Type::Environment, Enum::Token::Kind::Term, "Environment", "%ENV"}}, {""}, #line 205 "gen/reserved_keywords.gperf" {"msgrcv", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgrcv"}}, #line 65 "gen/reserved_keywords.gperf" {"|=", {Enum::Token::Type::OrBitEqual, Enum::Token::Kind::Assign, "OrBitEqual", "|="}}, #line 394 "gen/reserved_keywords.gperf" {"$^A", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^A"}}, {""}, #line 141 "gen/reserved_keywords.gperf" {"write", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "write"}}, {""}, #line 33 "gen/reserved_keywords.gperf" {"/=", {Enum::Token::Type::DivEqual, Enum::Token::Kind::Assign, "DivEqual", "/="}}, #line 286 "gen/reserved_keywords.gperf" {"continue", {Enum::Token::Type::Continue, Enum::Token::Kind::Control, "Continue", "continue"}}, {""}, {""}, {""}, #line 354 "gen/reserved_keywords.gperf" {"@_", {Enum::Token::Type::ArgumentArray, Enum::Token::Kind::Term, "ArgumentArray", "@_"}}, #line 118 "gen/reserved_keywords.gperf" {"die", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "die"}}, {""}, #line 202 "gen/reserved_keywords.gperf" {"socketpair", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "socketpair"}}, #line 196 "gen/reserved_keywords.gperf" {"listen", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "listen"}}, #line 384 "gen/reserved_keywords.gperf" {"$!", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$!"}}, #line 226 "gen/reserved_keywords.gperf" {"setpwent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpwent"}}, #line 165 "gen/reserved_keywords.gperf" {"eval", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "eval"}}, {""}, {""}, #line 287 "gen/reserved_keywords.gperf" {"do", {Enum::Token::Type::Do, Enum::Token::Kind::Do, "Do", "do"}}, #line 217 "gen/reserved_keywords.gperf" {"endpwent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "endpwent"}}, {""}, {""}, #line 203 "gen/reserved_keywords.gperf" {"msgctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgctl"}}, #line 353 "gen/reserved_keywords.gperf" {"#@", {Enum::Token::Type::Annotation, Enum::Token::Kind::Annotation, "Annotation", "#@"}}, #line 222 "gen/reserved_keywords.gperf" {"getpwent", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwent"}}, #line 198 "gen/reserved_keywords.gperf" {"send", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "send"}}, {""}, #line 206 "gen/reserved_keywords.gperf" {"msgsnd", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "msgsnd"}}, {""}, #line 248 "gen/reserved_keywords.gperf" {"ref", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ref"}}, #line 172 "gen/reserved_keywords.gperf" {"exec", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "exec"}}, {""}, #line 210 "gen/reserved_keywords.gperf" {"shmctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmctl"}}, #line 138 "gen/reserved_keywords.gperf" {"telldir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "telldir"}}, #line 200 "gen/reserved_keywords.gperf" {"shutdown", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shutdown"}}, {""}, #line 189 "gen/reserved_keywords.gperf" {"untie", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "untie"}}, #line 22 "gen/reserved_keywords.gperf" {"\\", {Enum::Token::Type::Ref, Enum::Token::Kind::Operator, "Ref", "\\"}}, #line 154 "gen/reserved_keywords.gperf" {"opendir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "opendir"}}, #line 274 "gen/reserved_keywords.gperf" {"END", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "END"}}, {""}, {""}, {""}, #line 212 "gen/reserved_keywords.gperf" {"shmread", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmread"}}, #line 403 "gen/reserved_keywords.gperf" {"$^O", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^O"}}, #line 130 "gen/reserved_keywords.gperf" {"seek", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "seek"}}, {""}, #line 23 "gen/reserved_keywords.gperf" {"~", {Enum::Token::Type::BitNot, Enum::Token::Kind::Operator, "BitNot", "~"}}, {""}, #line 28 "gen/reserved_keywords.gperf" {"xor", {Enum::Token::Type::AlphabetXOr, Enum::Token::Kind::Operator, "AlphabetXOr", "xor"}}, #line 127 "gen/reserved_keywords.gperf" {"read", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "read"}}, {""}, #line 190 "gen/reserved_keywords.gperf" {"accept", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "accept"}}, #line 45 "gen/reserved_keywords.gperf" {"=~", {Enum::Token::Type::RegOK, Enum::Token::Kind::Operator, "RegOK", "=~"}}, #line 255 "gen/reserved_keywords.gperf" {"hex", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "hex"}}, #line 277 "gen/reserved_keywords.gperf" {"CORE", {Enum::Token::Type::CORE, Enum::Token::Kind::CORE, "CORE", "CORE"}}, {""}, #line 146 "gen/reserved_keywords.gperf" {"chroot", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chroot"}}, #line 263 "gen/reserved_keywords.gperf" {"require", {Enum::Token::Type::RequireDecl, Enum::Token::Kind::Decl, "RequireDecl", "require"}}, #line 395 "gen/reserved_keywords.gperf" {"$^D", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^D"}}, #line 413 "gen/reserved_keywords.gperf" {"%SIG", {Enum::Token::Type::Signal, Enum::Token::Kind::Term, "Signal", "%SIG"}}, {""}, {""}, #line 375 "gen/reserved_keywords.gperf" {"$\\", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$\\"}}, #line 408 "gen/reserved_keywords.gperf" {"$^X", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^X"}}, #line 275 "gen/reserved_keywords.gperf" {"UNITCHECK", {Enum::Token::Type::ModWord, Enum::Token::Kind::ModWord, "ModWord", "UNITCHECK"}}, #line 252 "gen/reserved_keywords.gperf" {"atan2", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "atan2"}}, {""}, {""}, #line 119 "gen/reserved_keywords.gperf" {"eof", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "eof"}}, {""}, #line 78 "gen/reserved_keywords.gperf" {"chomp", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chomp"}}, {""}, #line 380 "gen/reserved_keywords.gperf" {"$~", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$~"}}, {""}, {""}, {""}, #line 17 "gen/reserved_keywords.gperf" {"%", {Enum::Token::Type::Mod, Enum::Token::Kind::Operator, "Mod", "%"}}, #line 131 "gen/reserved_keywords.gperf" {"seekdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "seekdir"}}, {""}, {""}, {""}, {""}, #line 84 "gen/reserved_keywords.gperf" {"lcfirst", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lcfirst"}}, #line 407 "gen/reserved_keywords.gperf" {"$^W", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^W"}}, #line 164 "gen/reserved_keywords.gperf" {"dump", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dump"}}, #line 262 "gen/reserved_keywords.gperf" {"srand", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "srand"}}, #line 21 "gen/reserved_keywords.gperf" {".", {Enum::Token::Type::StringAdd, Enum::Token::Kind::Operator, "StringAdd", "."}}, {""}, #line 402 "gen/reserved_keywords.gperf" {"$^M", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$^M"}}, {""}, {""}, {""}, #line 128 "gen/reserved_keywords.gperf" {"readdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "readdir"}}, #line 266 "gen/reserved_keywords.gperf" {"__FILE__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__FILE__"}}, #line 284 "gen/reserved_keywords.gperf" {"last", {Enum::Token::Type::Last, Enum::Token::Kind::Control, "Last", "last"}}, #line 157 "gen/reserved_keywords.gperf" {"rmdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rmdir"}}, #line 126 "gen/reserved_keywords.gperf" {"printf", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "printf"}}, {""}, {""}, {""}, {""}, {""}, #line 377 "gen/reserved_keywords.gperf" {"$%", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$%"}}, #line 107 "gen/reserved_keywords.gperf" {"map", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "map"}}, {""}, {""}, #line 264 "gen/reserved_keywords.gperf" {"import", {Enum::Token::Type::Import, Enum::Token::Kind::Import, "Import", "import"}}, {""}, #line 351 "gen/reserved_keywords.gperf" {"for", {Enum::Token::Type::ForStmt, Enum::Token::Kind::Stmt, "ForStmt", "for"}}, {""}, #line 151 "gen/reserved_keywords.gperf" {"lstat", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "lstat"}}, #line 156 "gen/reserved_keywords.gperf" {"rename", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rename"}}, #line 370 "gen/reserved_keywords.gperf" {"$.", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$."}}, #line 58 "gen/reserved_keywords.gperf" {"**=", {Enum::Token::Type::PowerEqual, Enum::Token::Kind::Assign, "PowerEqual", "**="}}, #line 325 "gen/reserved_keywords.gperf" {"when", {Enum::Token::Type::WhenStmt, Enum::Token::Kind::Stmt, "WhenStmt", "when"}}, #line 143 "gen/reserved_keywords.gperf" {"chdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chdir"}}, #line 245 "gen/reserved_keywords.gperf" {"gmtime", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "gmtime"}}, #line 95 "gen/reserved_keywords.gperf" {"ucfirst", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ucfirst"}}, {""}, {""}, {""}, {""}, #line 75 "gen/reserved_keywords.gperf" {"$#", {Enum::Token::Type::ArraySize, Enum::Token::Kind::SingleTerm, "ArraySize", "$#"}}, #line 26 "gen/reserved_keywords.gperf" {"and", {Enum::Token::Type::AlphabetAnd, Enum::Token::Kind::Operator, "AlphabetAnd", "and"}}, {""}, {""}, #line 336 "gen/reserved_keywords.gperf" {"]", {Enum::Token::Type::RightBracket, Enum::Token::Kind::Symbol, "RightBracket", "]"}}, #line 292 "gen/reserved_keywords.gperf" {"-e", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-e"}}, #line 223 "gen/reserved_keywords.gperf" {"getpwnam", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwnam"}}, {""}, {""}, {""}, #line 373 "gen/reserved_keywords.gperf" {"$*", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$*"}}, #line 213 "gen/reserved_keywords.gperf" {"shmwrite", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shmwrite"}}, {""}, {""}, #line 19 "gen/reserved_keywords.gperf" {">", {Enum::Token::Type::Greater, Enum::Token::Kind::Operator, "Greater", ">"}}, #line 31 "gen/reserved_keywords.gperf" {"-=", {Enum::Token::Type::SubEqual, Enum::Token::Kind::Assign, "SubEqual", "-="}}, #line 267 "gen/reserved_keywords.gperf" {"__LINE__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__LINE__"}}, {""}, {""}, #line 281 "gen/reserved_keywords.gperf" {"STDERR", {Enum::Token::Type::STDERR, Enum::Token::Kind::Handle, "STDERR", "STDERR"}}, #line 345 "gen/reserved_keywords.gperf" {"=>", {Enum::Token::Type::Arrow, Enum::Token::Kind::Operator, "Arrow", "=>"}}, #line 37 "gen/reserved_keywords.gperf" {">>=", {Enum::Token::Type::RightShiftEqual, Enum::Token::Kind::Assign, "RightShiftEqual", ">>="}}, {""}, #line 249 "gen/reserved_keywords.gperf" {"bless", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "bless"}}, #line 29 "gen/reserved_keywords.gperf" {"x", {Enum::Token::Type::StringMul, Enum::Token::Kind::Operator, "StringMul", "x"}}, #line 301 "gen/reserved_keywords.gperf" {"-t", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-t"}}, #line 73 "gen/reserved_keywords.gperf" {"sub", {Enum::Token::Type::FunctionDecl, Enum::Token::Kind::Decl, "FunctionDecl", "sub"}}, #line 101 "gen/reserved_keywords.gperf" {"push", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "push"}}, #line 103 "gen/reserved_keywords.gperf" {"shift", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "shift"}}, #line 180 "gen/reserved_keywords.gperf" {"setpriority", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "setpriority"}}, #line 392 "gen/reserved_keywords.gperf" {"$]", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$]"}}, {""}, {""}, {""}, {""}, #line 300 "gen/reserved_keywords.gperf" {"-s", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-s"}}, {""}, {""}, {""}, #line 176 "gen/reserved_keywords.gperf" {"getpriority", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpriority"}}, #line 388 "gen/reserved_keywords.gperf" {"$>", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$>"}}, #line 224 "gen/reserved_keywords.gperf" {"getpwuid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "getpwuid"}}, {""}, #line 145 "gen/reserved_keywords.gperf" {"chown", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chown"}}, #line 163 "gen/reserved_keywords.gperf" {"caller", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "caller"}}, #line 308 "gen/reserved_keywords.gperf" {"-C", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-C"}}, {""}, #line 188 "gen/reserved_keywords.gperf" {"tied", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "tied"}}, {""}, {""}, #line 104 "gen/reserved_keywords.gperf" {"unshift", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unshift"}}, {""}, {""}, {""}, #line 14 "gen/reserved_keywords.gperf" {"+", {Enum::Token::Type::Add, Enum::Token::Kind::Operator, "Add", "+"}}, #line 294 "gen/reserved_keywords.gperf" {"-g", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-g"}}, #line 43 "gen/reserved_keywords.gperf" {"<=>", {Enum::Token::Type::Compare, Enum::Token::Kind::Operator, "Compare", "<=>"}}, {""}, {""}, #line 170 "gen/reserved_keywords.gperf" {"scalar", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "scalar"}}, #line 250 "gen/reserved_keywords.gperf" {"defined", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "defined"}}, #line 155 "gen/reserved_keywords.gperf" {"readlink", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "readlink"}}, {""}, {""}, {""}, #line 46 "gen/reserved_keywords.gperf" {"!~", {Enum::Token::Type::RegNot, Enum::Token::Kind::Operator, "RegNot", "!~"}}, {""}, {""}, #line 149 "gen/reserved_keywords.gperf" {"ioctl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "ioctl"}}, #line 112 "gen/reserved_keywords.gperf" {"values", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "values"}}, #line 278 "gen/reserved_keywords.gperf" {"DESTROY", {Enum::Token::Type::DESTROY, Enum::Token::Kind::DESTROY, "DESTROY", "DESTROY"}}, {""}, {""}, #line 350 "gen/reserved_keywords.gperf" {"while", {Enum::Token::Type::WhileStmt, Enum::Token::Kind::Stmt, "WhileStmt", "while"}}, #line 182 "gen/reserved_keywords.gperf" {"system", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "system"}}, #line 298 "gen/reserved_keywords.gperf" {"-p", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-p"}}, {""}, {""}, {""}, {""}, #line 369 "gen/reserved_keywords.gperf" {"$+", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$+"}}, {""}, #line 109 "gen/reserved_keywords.gperf" {"each", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "each"}}, {""}, #line 20 "gen/reserved_keywords.gperf" {"<", {Enum::Token::Type::Less, Enum::Token::Kind::Operator, "Less", "<"}}, #line 299 "gen/reserved_keywords.gperf" {"-r", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-r"}}, {""}, {""}, {""}, {""}, #line 92 "gen/reserved_keywords.gperf" {"sprintf", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sprintf"}}, #line 36 "gen/reserved_keywords.gperf" {"<<=", {Enum::Token::Type::LeftShiftEqual, Enum::Token::Kind::Assign, "LeftShiftEqual", "<<="}}, #line 184 "gen/reserved_keywords.gperf" {"wait", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "wait"}}, #line 152 "gen/reserved_keywords.gperf" {"mkdir", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "mkdir"}}, #line 161 "gen/reserved_keywords.gperf" {"unlink", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unlink"}}, #line 44 "gen/reserved_keywords.gperf" {"~~", {Enum::Token::Type::PolymorphicCompare, Enum::Token::Kind::Operator, "PolymorphicCompare", "~~"}}, {""}, #line 97 "gen/reserved_keywords.gperf" {"quotemeta", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "quotemeta"}}, {""}, {""}, #line 364 "gen/reserved_keywords.gperf" {"$8", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$8"}}, #line 344 "gen/reserved_keywords.gperf" {"$#{", {Enum::Token::Type::ArraySizeDereference, Enum::Token::Kind::Modifier, "ArraySizeDereference", "$#{"}}, {""}, {""}, #line 335 "gen/reserved_keywords.gperf" {"[", {Enum::Token::Type::LeftBracket, Enum::Token::Kind::Symbol, "LeftBracket", "["}}, #line 290 "gen/reserved_keywords.gperf" {"-c", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-c"}}, {""}, {""}, {""}, {""}, #line 387 "gen/reserved_keywords.gperf" {"$<", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$<"}}, {""}, #line 140 "gen/reserved_keywords.gperf" {"warn", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "warn"}}, {""}, #line 265 "gen/reserved_keywords.gperf" {"__PACKAGE__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__PACKAGE__"}}, #line 313 "gen/reserved_keywords.gperf" {"-T", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-T"}}, #line 251 "gen/reserved_keywords.gperf" {"abs", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "abs"}}, {""}, {""}, #line 120 "gen/reserved_keywords.gperf" {"fileno", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fileno"}}, #line 417 "gen/reserved_keywords.gperf" {"qx", {Enum::Token::Type::RegExec, Enum::Token::Kind::RegPrefix, "RegExec", "qx"}}, {""}, {""}, {""}, {""}, #line 135 "gen/reserved_keywords.gperf" {"sysseek", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sysseek"}}, {""}, {""}, {""}, {""}, #line 391 "gen/reserved_keywords.gperf" {"$[", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$["}}, {""}, {""}, {""}, {""}, #line 307 "gen/reserved_keywords.gperf" {"-B", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-B"}}, {""}, #line 177 "gen/reserved_keywords.gperf" {"kill", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "kill"}}, {""}, {""}, #line 134 "gen/reserved_keywords.gperf" {"sysread", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "sysread"}}, {""}, {""}, #line 144 "gen/reserved_keywords.gperf" {"chmod", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "chmod"}}, #line 24 "gen/reserved_keywords.gperf" {"|", {Enum::Token::Type::BitOr, Enum::Token::Kind::Operator, "BitOr", "|"}}, #line 117 "gen/reserved_keywords.gperf" {"dbmopen", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dbmopen"}}, #line 269 "gen/reserved_keywords.gperf" {"__DATA__", {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__DATA__"}}, {""}, {""}, #line 91 "gen/reserved_keywords.gperf" {"rindex", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rindex"}}, #line 327 "gen/reserved_keywords.gperf" {"default", {Enum::Token::Type::DefaultStmt, Enum::Token::Kind::DefaultStmt, "DefaultStmt", "default"}}, #line 67 "gen/reserved_keywords.gperf" {"||=", {Enum::Token::Type::OrEqual, Enum::Token::Kind::Assign, "OrEqual", "||="}}, {""}, #line 324 "gen/reserved_keywords.gperf" {"until", {Enum::Token::Type::UntilStmt, Enum::Token::Kind::Stmt, "UntilStmt", "until"}}, #line 16 "gen/reserved_keywords.gperf" {"/", {Enum::Token::Type::Div, Enum::Token::Kind::Operator, "Div", "/"}}, #line 69 "gen/reserved_keywords.gperf" {"..", {Enum::Token::Type::Slice, Enum::Token::Kind::Operator, "Slice", ".."}}, #line 71 "gen/reserved_keywords.gperf" {"...", {Enum::Token::Type::ToDo, Enum::Token::Kind::Operator, "ToDo", "..."}}, #line 246 "gen/reserved_keywords.gperf" {"localtime", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "localtime"}}, {""}, {""}, #line 416 "gen/reserved_keywords.gperf" {"qw", {Enum::Token::Type::RegList, Enum::Token::Kind::RegPrefix, "RegList", "qw"}}, #line 59 "gen/reserved_keywords.gperf" {"//=", {Enum::Token::Type::DefaultEqual, Enum::Token::Kind::Assign, "DefaultEqual", "//="}}, {""}, {""}, #line 18 "gen/reserved_keywords.gperf" {"?", {Enum::Token::Type::ThreeTermOperator, Enum::Token::Kind::Operator, "ThreeTermOperator", "?"}}, #line 297 "gen/reserved_keywords.gperf" {"-o", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-o"}}, {""}, {""}, #line 147 "gen/reserved_keywords.gperf" {"fcntl", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fcntl"}}, #line 280 "gen/reserved_keywords.gperf" {"STDOUT", {Enum::Token::Type::STDOUT, Enum::Token::Kind::Handle, "STDOUT", "STDOUT"}}, #line 372 "gen/reserved_keywords.gperf" {"$|", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$|"}}, {""}, #line 259 "gen/reserved_keywords.gperf" {"rand", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "rand"}}, {""}, #line 333 "gen/reserved_keywords.gperf" {"{", {Enum::Token::Type::LeftBrace, Enum::Token::Kind::Symbol, "LeftBrace", "{"}}, #line 312 "gen/reserved_keywords.gperf" {"-S", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-S"}}, {""}, #line 150 "gen/reserved_keywords.gperf" {"link", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "link"}}, {""}, {""}, #line 371 "gen/reserved_keywords.gperf" {"$/", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$/"}}, #line 116 "gen/reserved_keywords.gperf" {"dbmclose", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "dbmclose"}}, {""}, {""}, {""}, #line 57 "gen/reserved_keywords.gperf" {"**", {Enum::Token::Type::Exp, Enum::Token::Kind::Operator, "Exp", "**"}}, #line 168 "gen/reserved_keywords.gperf" {"formline", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "formline"}}, #line 87 "gen/reserved_keywords.gperf" {"pack", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "pack"}}, {""}, {""}, #line 383 "gen/reserved_keywords.gperf" {"$?", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$?"}}, #line 125 "gen/reserved_keywords.gperf" {"say", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "say"}}, {""}, #line 160 "gen/reserved_keywords.gperf" {"umask", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "umask"}}, #line 330 "gen/reserved_keywords.gperf" {";", {Enum::Token::Type::SemiColon, Enum::Token::Kind::StmtEnd, "SemiColon", ";"}}, #line 340 "gen/reserved_keywords.gperf" {"&{", {Enum::Token::Type::CodeDereference, Enum::Token::Kind::Modifier, "CodeDereference", "&{"}}, {""}, {""}, #line 82 "gen/reserved_keywords.gperf" {"index", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "index"}}, {""}, #line 339 "gen/reserved_keywords.gperf" {"${", {Enum::Token::Type::ScalarDereference, Enum::Token::Kind::Modifier, "ScalarDereference", "${"}}, {""}, {""}, {""}, {""}, #line 113 "gen/reserved_keywords.gperf" {"binmode", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "binmode"}}, {""}, {""}, {""}, {""}, #line 270 "gen/reserved_keywords.gperf" {"__END__", {Enum::Token::Type::DataWord, Enum::Token::Kind::DataWord, "DataWord", "__END__"}}, {""}, {""}, {""}, #line 328 "gen/reserved_keywords.gperf" {",", {Enum::Token::Type::Comma, Enum::Token::Kind::Comma, "Comma", ","}}, #line 268 "gen/reserved_keywords.gperf" {"__SUB__", {Enum::Token::Type::SpecificKeyword, Enum::Token::Kind::SpecificKeyword, "SpecificKeyword", "__SUB__"}}, {""}, {""}, {""}, #line 88 "gen/reserved_keywords.gperf" {"unpack", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "unpack"}}, #line 393 "gen/reserved_keywords.gperf" {"$;", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$;"}}, {""}, {""}, #line 171 "gen/reserved_keywords.gperf" {"alarm", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "alarm"}}, #line 332 "gen/reserved_keywords.gperf" {")", {Enum::Token::Type::RightParenthesis, Enum::Token::Kind::Symbol, "RightParenthesis", ")"}}, #line 311 "gen/reserved_keywords.gperf" {"-R", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-R"}}, {""}, {""}, {""}, {""}, #line 61 "gen/reserved_keywords.gperf" {">>", {Enum::Token::Type::RightShift, Enum::Token::Kind::Operator, "RightShift", ">>"}}, #line 136 "gen/reserved_keywords.gperf" {"syswrite", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "syswrite"}}, {""}, {""}, #line 331 "gen/reserved_keywords.gperf" {"(", {Enum::Token::Type::LeftParenthesis, Enum::Token::Kind::Symbol, "LeftParenthesis", "("}}, #line 337 "gen/reserved_keywords.gperf" {"@{", {Enum::Token::Type::ArrayDereference, Enum::Token::Kind::Modifier, "ArrayDereference", "@{"}}, {""}, {""}, {""}, {""}, #line 374 "gen/reserved_keywords.gperf" {"$,", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$,"}}, {""}, {""}, {""}, {""}, #line 296 "gen/reserved_keywords.gperf" {"-l", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-l"}}, {""}, {""}, {""}, {""}, #line 390 "gen/reserved_keywords.gperf" {"$)", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$)"}}, {""}, {""}, {""}, {""}, #line 305 "gen/reserved_keywords.gperf" {"-z", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-z"}}, {""}, {""}, {""}, {""}, #line 389 "gen/reserved_keywords.gperf" {"$(", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$("}}, {""}, {""}, {""}, {""}, #line 42 "gen/reserved_keywords.gperf" {"<>", {Enum::Token::Type::Diamond, Enum::Token::Kind::Operator, "Diamond", "<>"}}, {""}, #line 173 "gen/reserved_keywords.gperf" {"fork", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "fork"}}, {""}, {""}, #line 185 "gen/reserved_keywords.gperf" {"waitpid", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "waitpid"}}, {""}, #line 148 "gen/reserved_keywords.gperf" {"glob", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "glob"}}, #line 316 "gen/reserved_keywords.gperf" {"local", {Enum::Token::Type::LocalDecl, Enum::Token::Kind::Decl, "LocalDecl", "local"}}, #line 421 "gen/reserved_keywords.gperf" {"y", {Enum::Token::Type::RegAllReplace, Enum::Token::Kind::RegReplacePrefix, "RegAllReplace", "y"}}, #line 348 "gen/reserved_keywords.gperf" {"package", {Enum::Token::Type::Package, Enum::Token::Kind::Package, "Package", "package"}}, {""}, {""}, {""}, {""}, #line 363 "gen/reserved_keywords.gperf" {"$7", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$7"}}, {""}, {""}, {""}, {""}, #line 306 "gen/reserved_keywords.gperf" {"-A", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-A"}}, {""}, {""}, {""}, {""}, #line 55 "gen/reserved_keywords.gperf" {"++", {Enum::Token::Type::Inc, Enum::Token::Kind::Operator, "Inc", "++"}}, {""}, {""}, {""}, {""}, #line 320 "gen/reserved_keywords.gperf" {"if", {Enum::Token::Type::IfStmt, Enum::Token::Kind::Stmt, "IfStmt", "if"}}, {""}, {""}, {""}, {""}, #line 362 "gen/reserved_keywords.gperf" {"$6", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$6"}}, {""}, {""}, {""}, {""}, #line 291 "gen/reserved_keywords.gperf" {"-d", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-d"}}, {""}, #line 191 "gen/reserved_keywords.gperf" {"bind", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "bind"}}, {""}, {""}, #line 361 "gen/reserved_keywords.gperf" {"$5", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$5"}}, {""}, {""}, {""}, {""}, #line 302 "gen/reserved_keywords.gperf" {"-u", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-u"}}, {""}, {""}, {""}, #line 122 "gen/reserved_keywords.gperf" {"format", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "format"}}, #line 352 "gen/reserved_keywords.gperf" {"foreach", {Enum::Token::Type::ForeachStmt, Enum::Token::Kind::Stmt, "ForeachStmt", "foreach"}}, {""}, {""}, #line 423 "gen/reserved_keywords.gperf" {"undef", {Enum::Token::Type::Default, Enum::Token::Kind::Term, "Default", "undef"}}, {""}, #line 295 "gen/reserved_keywords.gperf" {"-k", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-k"}}, {""}, {""}, {""}, {""}, #line 360 "gen/reserved_keywords.gperf" {"$4", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$4"}}, #line 276 "gen/reserved_keywords.gperf" {"AUTOLOAD", {Enum::Token::Type::AUTOLOAD, Enum::Token::Kind::AUTOLOAD, "AUTOLOAD", "AUTOLOAD"}}, {""}, {""}, {""}, #line 60 "gen/reserved_keywords.gperf" {"<<", {Enum::Token::Type::LeftShift, Enum::Token::Kind::Operator, "LeftShift", "<<"}}, {""}, {""}, {""}, {""}, #line 359 "gen/reserved_keywords.gperf" {"$3", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$3"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 357 "gen/reserved_keywords.gperf" {"$1", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$1"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 121 "gen/reserved_keywords.gperf" {"flock", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "flock"}}, {""}, #line 338 "gen/reserved_keywords.gperf" {"%{", {Enum::Token::Type::HashDereference, Enum::Token::Kind::Modifier, "HashDereference", "%{"}}, {""}, {""}, {""}, {""}, #line 133 "gen/reserved_keywords.gperf" {"syscall", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "syscall"}}, {""}, {""}, {""}, {""}, #line 356 "gen/reserved_keywords.gperf" {"$0", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$0"}}, {""}, {""}, {""}, {""}, #line 310 "gen/reserved_keywords.gperf" {"-O", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-O"}}, {""}, {""}, {""}, {""}, #line 159 "gen/reserved_keywords.gperf" {"symlink", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "symlink"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 72 "gen/reserved_keywords.gperf" {"my", {Enum::Token::Type::VarDecl, Enum::Token::Kind::Decl, "VarDecl", "my"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 368 "gen/reserved_keywords.gperf" {"$'", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$'"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 376 "gen/reserved_keywords.gperf" {"$\"", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$\""}}, {""}, {""}, {""}, {""}, #line 315 "gen/reserved_keywords.gperf" {"-X", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-X"}}, {""}, {""}, #line 322 "gen/reserved_keywords.gperf" {"elsif", {Enum::Token::Type::ElsifStmt, Enum::Token::Kind::Stmt, "ElsifStmt", "elsif"}}, {""}, {""}, {""}, {""}, {""}, #line 15 "gen/reserved_keywords.gperf" {"-", {Enum::Token::Type::Sub, Enum::Token::Kind::Operator, "Sub", "-"}}, #line 63 "gen/reserved_keywords.gperf" {"||", {Enum::Token::Type::Or, Enum::Token::Kind::Operator, "Or", "||"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 346 "gen/reserved_keywords.gperf" {"->", {Enum::Token::Type::Pointer, Enum::Token::Kind::Operator, "Pointer", "->"}}, {""}, {""}, {""}, {""}, #line 70 "gen/reserved_keywords.gperf" {"//", {Enum::Token::Type::DefaultOperator, Enum::Token::Kind::Operator, "DefaultOperator", "//"}}, {""}, {""}, {""}, {""}, #line 304 "gen/reserved_keywords.gperf" {"-x", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-x"}}, {""}, {""}, {""}, {""}, #line 379 "gen/reserved_keywords.gperf" {"$-", {Enum::Token::Type::SpecificValue, Enum::Token::Kind::Term, "SpecificValue", "$-"}}, {""}, {""}, #line 99 "gen/reserved_keywords.gperf" {"study", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "study"}}, {""}, #line 314 "gen/reserved_keywords.gperf" {"-W", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-W"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 288 "gen/reserved_keywords.gperf" {"break", {Enum::Token::Type::Break, Enum::Token::Kind::Control, "Break", "break"}}, {""}, #line 309 "gen/reserved_keywords.gperf" {"-M", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-M"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 303 "gen/reserved_keywords.gperf" {"-w", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-w"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 289 "gen/reserved_keywords.gperf" {"-b", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-b"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 293 "gen/reserved_keywords.gperf" {"-f", {Enum::Token::Type::Handle, Enum::Token::Kind::Handle, "Handle", "-f"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 167 "gen/reserved_keywords.gperf" {"wantarray", {Enum::Token::Type::BuiltinFunc, Enum::Token::Kind::Function, "BuiltinFunc", "wantarray"}}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, #line 56 "gen/reserved_keywords.gperf" {"--", {Enum::Token::Type::Dec, Enum::Token::Kind::Operator, "Dec", "--"}} }; if (len <= MAX_WORD_LENGTH && len >= MIN_WORD_LENGTH) { register int key = hash (str, len); if (key <= MAX_HASH_VALUE && key >= 0) { register const char *s = wordlist[key].name; if (*str == *s && !strcmp (str + 1, s + 1)) return &wordlist[key]; } } return 0; } #line 424 "gen/reserved_keywords.gperf" Compiler_token.cpp100644000765000024 547613603257356 23020 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/util#include using namespace std; namespace TokenType = Enum::Token::Type; namespace SyntaxType = Enum::Parser::Syntax; namespace TokenKind = Enum::Token::Kind; Token::Token(string data_, FileInfo finfo_) : token_num(0), total_token_num(0), deparsed_data(""), isDeparsed(false), isDeleted(false) { type = TokenType::Undefined; stype = SyntaxType::Value; info.type = TokenType::Undefined; info.kind = TokenKind::Undefined; info.name = ""; info.data = NULL; info.has_warnings = false; finfo.start_line_num = finfo_.start_line_num; finfo.end_line_num = finfo_.start_line_num; finfo.filename = finfo_.filename; finfo.indent = 0; } Token::Token(Tokens *tokens) : deparsed_data(""), isDeparsed(false), isDeleted(false) { total_token_num = 0; stype = SyntaxType::Value; type = TokenType::Undefined; info.type = TokenType::Undefined; info.kind = TokenKind::Undefined; info.name = ""; info.data = NULL; info.has_warnings = false; _data = ""; size_t size = tokens->size(); TokenPos pos = tokens->begin(); tks = (Token **)safe_malloc(size * PTR_SIZE); token_num = size; size_t i = 0; size_t end_line_num = 0; finfo.indent = 0; for (; i < size; i++) { Token *t = (Token *)*pos; tks[i] = t; if (t->info.has_warnings) { info.has_warnings = true; } if (i == 0) { finfo.start_line_num = tks[i]->finfo.start_line_num; finfo.filename = tks[i]->finfo.filename; } if (t->total_token_num > 1) { total_token_num += t->total_token_num; if (end_line_num < t->finfo.end_line_num) { end_line_num = t->finfo.end_line_num; } } else { total_token_num += 1; if (end_line_num < t->finfo.start_line_num) { end_line_num = t->finfo.start_line_num; } } pos++; } finfo.end_line_num = end_line_num; } const char *Token::deparse(void) { using namespace TokenType; if (isDeparsed) return deparsed_data; string data; isDeparsed = true; if (this->token_num > 0) { if (stype == SyntaxType::Expr) { //deparsed_data += "("; } for (size_t i = 0; i < this->token_num; i++) { data += string(this->tks[i]->deparse()); } if (stype == SyntaxType::Expr) { //deparsed_data += ")"; } } else { switch (info.type) { case String: data += " \"" + string(this->_data) + "\""; break; case RawString: data += " '" + string(this->_data) + "'"; break; case ExecString: data += " `" + string(this->_data) + "`"; break; case RegExp: case Pointer: case RegReplaceFrom: case RegReplaceTo: case RegMiddleDelim: case RegDelim: case RegOpt: data += string(this->_data); break; case HereDocument: data += "\n" + string(this->_data); break; case HereDocumentEnd: data += string(this->_data) + "\n"; break; default: data += " " + string(this->_data); break; } } deparsed_data = (new string(data))->c_str();//cstr(deparsed_data); return deparsed_data; } Compiler_triple_charactor_operator.cpp100644000765000024 425413603257356 27131 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/util/* C++ code produced by gperf version 3.0.3 */ /* Command-line: gperf -L C++ gen/triple_charactor_operator.gperf */ /* Computed positions: -k'1,3' */ #include /* maximum key range = 51, duplicates = 0 */ inline unsigned int TripleCharactorOperatorMap::hash(register const char *str) { static unsigned char asso_values[] = { 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 10, 54, 1, 54, 54, 54, 8, 54, 54, 54, 25, 3, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 10, 0, 5, 54, 54, 21, 54, 54, 16, 11, 6, 1, 28, 23, 54, 54, 18, 13, 54, 8, 3, 54, 30, 54, 25, 54, 54, 20, 15, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 10, 0, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54 }; return asso_values[(unsigned char)str[2]] + asso_values[(unsigned char)str[0]]; } const char *TripleCharactorOperatorMap::in_word_set(register const char *str) { static const char * triple_charactor_operators[] = { "||=", "&&=", "", "//=", "", ">>=", "", "", "**=", "", "<<=", "$^G", "", "$^P", "", "<=>", "$^F", "", "$^O", "", "$#{", "$^E", "", "$^M", "", "$^X", "$^D", "", "$^L", "", "$^W", "$^A", "", "$^I", "", "$^T", "", "", "$^H", "", "$^R", "", "", "", "", "", "", "", "", "", "..." }; register int key = hash(str); if (key <= TRIPLE_OPERATOR_MAX_HASH_VALUE && key >= 0) { register const char *s = triple_charactor_operators[key]; if (*str == *s && !strcmp (str + 1, s + 1)) return s; } return 0; } Compiler_util.cpp100644000765000024 75113603257356 22624 0ustar00goccystaff000000000000Compiler-Lexer-0.23/src/compiler/util#include static int memory_leaks = 0; void *safe_malloc(size_t size) { void *ret = malloc(size); if (!ret) { fprintf(stderr, "ERROR!!:cannot allocate memory\n"); exit(EXIT_FAILURE); } memset(ret, 0, size); #ifdef DEBUG_MODE memory_leaks += size; #endif return ret; } void safe_free(void *ptr, size_t size) { if (ptr) { free(ptr); ptr = NULL; #ifdef DEBUG_MODE memory_leaks -= size; #else (void)size; #endif } } int leaks(void) { return memory_leaks; } typemap100644000765000024 100513603257356 16135 0ustar00goccystaff000000000000Compiler-Lexer-0.23/srcTYPEMAP Compiler_Lexer T_PTROBJ_SPECIAL INPUT T_PTROBJ_SPECIAL if (sv_derived_from($arg, \"${(my $ntt=$ntype)=~s/_/::/g;\$ntt}\")) { IV tmp = SvIV((SV*)SvRV($arg)); $var = INT2PTR($type, tmp); } else croak(\"$var is not of type ${(my $ntt=$ntype)=~s/_/::/g;\$ntt}\") OUTPUT T_PTROBJ_SPECIAL sv_setref_pv($arg, \"${(my $ntt=$ntype)=~s/_/::/g;\$ntt}\", (void*)$var); Lexer.t100644000765000024 51413603257356 15437 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Data::Dumper; use Test::More tests => 1; BEGIN { use_ok('Compiler::Lexer') }; my $name = $0; #use modules tests print Dumper(Compiler::Lexer->new($name)->get_used_modules(<<'SCRIPT')); use Test::Module; my $hash = { use => "value" }; $hash->{use}; my $a = Test::Module->new(); $a->use(\@args); SCRIPT format.t100644000765000024 2515413603257356 15717 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Data::Dumper; use Test::More; BEGIN { use_ok('Compiler::Lexer') }; subtest 'tokenize' => sub { my $tokens = Compiler::Lexer->new('')->tokenize(<<'SCRIPT'); format STDOUT = ok @<<<<<<< $test . my $hoge; SCRIPT is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'FormatDecl', 'data' => 'format', 'type' => Compiler::Lexer::TokenType::T_FormatDecl, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Handle, 'has_warnings' => 0, 'stype' => 0, 'name' => 'STDOUT', 'data' => 'STDOUT', 'type' => Compiler::Lexer::TokenType::T_STDOUT, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Assign, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Assign', 'data' => '=', 'type' => Compiler::Lexer::TokenType::T_Assign, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Format', 'data' => 'ok @<<<<<<< $test ', 'type' => Compiler::Lexer::TokenType::T_Format, 'line' => 4 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'FormatEnd', 'data' => '.', 'type' => Compiler::Lexer::TokenType::T_FormatEnd, 'line' => 4 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'VarDecl', 'data' => 'my', 'type' => Compiler::Lexer::TokenType::T_VarDecl, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LocalVar', 'data' => '$hoge', 'type' => Compiler::Lexer::TokenType::T_LocalVar, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 5 }, 'Compiler::Lexer::Token' ) ]); }; subtest 'omitted handler name' => sub { my $tokens = Compiler::Lexer->new('')->tokenize(<<'SCRIPT'); format = ok @<<<<<<< $test . my $hoge; SCRIPT is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'FormatDecl', 'data' => 'format', 'type' => Compiler::Lexer::TokenType::T_FormatDecl, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Assign, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Assign', 'data' => '=', 'type' => Compiler::Lexer::TokenType::T_Assign, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Format', 'data' => 'ok @<<<<<<< $test ', 'type' => Compiler::Lexer::TokenType::T_Format, 'line' => 4 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'FormatEnd', 'data' => '.', 'type' => Compiler::Lexer::TokenType::T_FormatEnd, 'line' => 4 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'VarDecl', 'data' => 'my', 'type' => Compiler::Lexer::TokenType::T_VarDecl, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LocalVar', 'data' => '$hoge', 'type' => Compiler::Lexer::TokenType::T_LocalVar, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 5 }, 'Compiler::Lexer::Token' ) ]); }; subtest 'do not misrecognize when confusing case' => sub { my $tokens = Compiler::Lexer->new('')->tokenize(<<'SCRIPT'); my $foo = { format => 1, }; my $bar = "asdf"; 1; SCRIPT is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'VarDecl', 'data' => 'my', 'type' => Compiler::Lexer::TokenType::T_VarDecl, 'line' => 1, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LocalVar', 'data' => '$foo', 'type' => Compiler::Lexer::TokenType::T_LocalVar, 'line' => 1, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Assign, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Assign', 'data' => '=', 'type' => Compiler::Lexer::TokenType::T_Assign, 'line' => 1, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LeftBrace', 'data' => '{', 'type' => Compiler::Lexer::TokenType::T_LeftBrace, 'line' => 1, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Key', 'data' => 'format', 'type' => Compiler::Lexer::TokenType::T_Key, 'line' => 2, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Operator, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Arrow', 'data' => '=>', 'type' => Compiler::Lexer::TokenType::T_Arrow, 'line' => 2, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Int', 'data' => '1', 'type' => Compiler::Lexer::TokenType::T_Int, 'line' => 2, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Comma, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Comma', 'data' => ',', 'type' => Compiler::Lexer::TokenType::T_Comma, 'line' => 2, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => 0, 'name' => 'RightBrace', 'data' => '}', 'type' => Compiler::Lexer::TokenType::T_RightBrace, 'line' => 3, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 3, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'VarDecl', 'data' => 'my', 'type' => Compiler::Lexer::TokenType::T_VarDecl, 'line' => 5, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LocalVar', 'data' => '$bar', 'type' => Compiler::Lexer::TokenType::T_LocalVar, 'line' => 5, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Assign, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Assign', 'data' => '=', 'type' => Compiler::Lexer::TokenType::T_Assign, 'line' => 5, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'String', 'data' => 'asdf', 'type' => Compiler::Lexer::TokenType::T_String, 'line' => 6, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 6, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Int', 'data' => '1', 'type' => Compiler::Lexer::TokenType::T_Int, 'line' => 7, }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 7, }, 'Compiler::Lexer::Token' ) ]); }; done_testing; issue_32.t100644000765000024 146313603257356 16040 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; is_deeply([ map { $_->name } @{Compiler::Lexer->new->tokenize("q'foobar'")} ], [qw/RegQuote RegDelim RegExp RegDelim/]); is_deeply([ map { $_->name } @{Compiler::Lexer->new->tokenize('q"foobar"')} ], [qw/RegQuote RegDelim RegExp RegDelim/]); is_deeply([ map { $_->name } @{Compiler::Lexer->new->tokenize("qq'foobar'")} ], [qw/RegDoubleQuote RegDelim RegExp RegDelim/]); is_deeply([ map { $_->name } @{Compiler::Lexer->new->tokenize('qq"foobar"')} ], [qw/RegDoubleQuote RegDelim RegExp RegDelim/]); is_deeply([ map { $_->name } @{Compiler::Lexer->new->tokenize("qw'foobar'")} ], [qw/RegList RegDelim RegExp RegDelim/]); is_deeply([ map { $_->name } @{Compiler::Lexer->new->tokenize('qw"foobar"')} ], [qw/RegList RegDelim RegExp RegDelim/]); done_testing; issue_35.t100644000765000024 247413603257356 16046 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; use Data::Dumper; my $tokens = Compiler::Lexer->new->tokenize('foo\'Bar;'); is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Namespace, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Namespace', 'data' => 'foo', 'type' => Compiler::Lexer::TokenType::T_Namespace, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Operator, 'has_warnings' => 0, 'stype' => 0, 'name' => 'NamespaceResolver', 'data' => '\'', 'type' => Compiler::Lexer::TokenType::T_NamespaceResolver, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Namespace, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Namespace', 'data' => 'Bar', 'type' => Compiler::Lexer::TokenType::T_Namespace, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 1 }, 'Compiler::Lexer::Token' ) ]); done_testing; issue_38.t100644000765000024 45413603257356 16025 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; use Data::Dumper; my $tokens = Compiler::Lexer->new->tokenize('$x->y()->z'); my @methods = map { $_->data } grep { $_->type == Compiler::Lexer::TokenType::T_Method } @$tokens; is $methods[0], 'y'; is $methods[1], 'z'; done_testing; issue_39.t100644000765000024 43313603257356 16023 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; my $src = "( sub { /!/ }, '//' )"; my $lexer = Compiler::Lexer->new('-'); my $tokens = $lexer->tokenize($src); my @dor = grep { $_->name eq 'DefaultOperator' && $_->data eq '//' } @$tokens; is 0+@dor, 0; done_testing; issue_40.t100644000765000024 174613603257356 16043 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; use Data::Dumper; my $tokens = Compiler::Lexer->new->tokenize("'' / 1"); is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'RawString', 'data' => '', 'type' => Compiler::Lexer::TokenType::T_RawString, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Operator, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Div', 'data' => '/', 'type' => Compiler::Lexer::TokenType::T_Div, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Int', 'data' => '1', 'type' => Compiler::Lexer::TokenType::T_Int, 'line' => 1 }, 'Compiler::Lexer::Token' ) ]); done_testing; issue_42.t100644000765000024 176013603257356 16041 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; my $tokens = Compiler::Lexer->new->tokenize('$foo x= 3'); is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'GlobalVar', 'data' => '$foo', 'type' => Compiler::Lexer::TokenType::T_GlobalVar, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Assign, 'has_warnings' => 0, 'stype' => 0, 'name' => 'StringMulEqual', 'data' => 'x=', 'type' => Compiler::Lexer::TokenType::T_StringMulEqual, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Int', 'data' => '3', 'type' => Compiler::Lexer::TokenType::T_Int, 'line' => 1 }, 'Compiler::Lexer::Token' ) ]); done_testing; issue_43.t100644000765000024 45613603257356 16023 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; my $tokens = Compiler::Lexer->new->tokenize(<<'...'); /foo/m; /bar/; ... my %delim = map { ($_->data => 1) } grep { $_->type == Compiler::Lexer::TokenType::T_RegDelim } @$tokens; is_deeply([keys %delim], ['/']); done_testing; issue_44.t100644000765000024 243213603257356 16040 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; use Data::Dumper; my $tokens = Compiler::Lexer->new->tokenize('not /\d/'); is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_SingleTerm, 'has_warnings' => 0, 'stype' => 0, 'name' => 'AlphabetNot', 'data' => 'not', 'type' => Compiler::Lexer::TokenType::T_AlphabetNot, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'RegDelim', 'data' => '/', 'type' => Compiler::Lexer::TokenType::T_RegDelim, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'RegExp', 'data' => '\\d', 'type' => Compiler::Lexer::TokenType::T_RegExp, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'RegDelim', 'data' => '/', 'type' => Compiler::Lexer::TokenType::T_RegDelim, 'line' => 1 }, 'Compiler::Lexer::Token' ) ]); done_testing; issue_45.t100644000765000024 17313603257356 16021 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; Compiler::Lexer->new->tokenize('^/'); ok 1; done_testing; issue_48.t100644000765000024 173013603257356 16044 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; my $tokens = Compiler::Lexer->new->tokenize('$foo-1'); is_deeply($tokens, [ bless( { 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_Term, 'name' => 'GlobalVar', 'type' => Compiler::Lexer::TokenType::T_GlobalVar, 'has_warnings' => 0, 'stype' => 0, 'data' => '$foo' }, 'Compiler::Lexer::Token' ), bless( { 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_Operator, 'name' => 'Sub', 'type' => Compiler::Lexer::TokenType::T_Sub, 'stype' => 0, 'has_warnings' => 0, 'data' => '-' }, 'Compiler::Lexer::Token' ), bless( { 'has_warnings' => 0, 'stype' => 0, 'data' => '1', 'line' => 1, 'type' => Compiler::Lexer::TokenType::T_Int, 'kind' => Compiler::Lexer::Kind::T_Term, 'name' => 'Int' }, 'Compiler::Lexer::Token' ) ]); done_testing; issue_53.t100644000765000024 423213603257356 16040 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; use Data::Dumper; my $tokens = Compiler::Lexer->new->tokenize('s///;'); print Dumper $tokens; is_deeply($tokens, [ bless( { 'type' => Compiler::Lexer::TokenType::T_RegReplace, 'name' => 'RegReplace', 'stype' => 0, 'data' => 's', 'has_warnings' => 0, 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_RegReplacePrefix, }, 'Compiler::Lexer::Token' ), bless( { 'stype' => 0, 'data' => '/', 'type' => Compiler::Lexer::TokenType::T_RegDelim, 'name' => 'RegDelim', 'has_warnings' => 0, 'kind' => Compiler::Lexer::Kind::T_Term, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'has_warnings' => 0, 'name' => 'RegReplaceFrom', 'type' => Compiler::Lexer::TokenType::T_RegReplaceFrom, 'data' => '', 'stype' => 0, 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_Term }, 'Compiler::Lexer::Token' ), bless( { 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_Term, 'type' => Compiler::Lexer::TokenType::T_RegMiddleDelim, 'name' => 'RegMiddleDelim', 'data' => '/', 'stype' => 0, 'has_warnings' => 0 }, 'Compiler::Lexer::Token' ), bless( { 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'type' => Compiler::Lexer::TokenType::T_RegReplaceTo, 'name' => 'RegReplaceTo', 'stype' => 0, 'data' => '' }, 'Compiler::Lexer::Token' ), bless( { 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_Term, 'name' => 'RegDelim', 'type' => Compiler::Lexer::TokenType::T_RegDelim, 'stype' => 0, 'data' => '/', 'has_warnings' => 0 }, 'Compiler::Lexer::Token' ), bless( { 'has_warnings' => 0, 'name' => 'SemiColon', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'stype' => 0, 'data' => ';', 'line' => 1, 'kind' => Compiler::Lexer::Kind::T_StmtEnd }, 'Compiler::Lexer::Token' ) ]); done_testing; issue_69.t100644000765000024 213113603257356 16043 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; my $tokens = Compiler::Lexer->new->recursive_tokenize('{}'); is_deeply($tokens, { 'main' => [ bless( { 'kind' => 22, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LeftBrace', 'data' => '{', 'type' => 109, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => 22, 'has_warnings' => 0, 'stype' => 0, 'name' => 'RightBrace', 'data' => '}', 'type' => 110, 'line' => 1 }, 'Compiler::Lexer::Token' ) ] }); done_testing; issue_reports.t100644000765000024 2373213603257356 17335 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Data::Dumper; use Test::More; BEGIN { use_ok('Compiler::Lexer') }; my $tokens = Compiler::Lexer->new('')->tokenize(<<'SCRIPT'); %-; %+; @-; @+; $-{a}; $+{a}; @-{a}; @+{a}; SCRIPT subtest 'tokenize' => sub { is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'GlobalHashVar', 'data' => '%-', 'type' => Compiler::Lexer::TokenType::T_GlobalHashVar, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'GlobalHashVar', 'data' => '%+', 'type' => Compiler::Lexer::TokenType::T_GlobalHashVar, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'GlobalArrayVar', 'data' => '@-', 'type' => Compiler::Lexer::TokenType::T_GlobalArrayVar, 'line' => 3 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 3 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'GlobalArrayVar', 'data' => '@+', 'type' => Compiler::Lexer::TokenType::T_GlobalArrayVar,, 'line' => 4 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 4 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SpecificValue', 'data' => '$-', 'type' => Compiler::Lexer::TokenType::T_SpecificValue, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'LeftBrace', 'data' => '{', 'type' => Compiler::Lexer::TokenType::T_LeftBrace, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'Key', 'data' => 'a', 'type' => Compiler::Lexer::TokenType::T_Key, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'RightBrace', 'data' => '}', 'type' => Compiler::Lexer::TokenType::T_RightBrace, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 5 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SpecificValue', 'data' => '$+', 'type' => Compiler::Lexer::TokenType::T_SpecificValue, 'line' => 6 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'LeftBrace', 'data' => '{', 'type' => Compiler::Lexer::TokenType::T_LeftBrace, 'line' => 6 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'Key', 'data' => 'a', 'type' => Compiler::Lexer::TokenType::T_Key, 'line' => 6 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'RightBrace', 'data' => '}', 'type' => Compiler::Lexer::TokenType::T_RightBrace, 'line' => 6 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 6 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'ArrayVar', 'data' => '@-', 'type' => Compiler::Lexer::TokenType::T_ArrayVar, 'line' => 7 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'LeftBrace', 'data' => '{', 'type' => Compiler::Lexer::TokenType::T_LeftBrace, 'line' => 7 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'Key', 'data' => 'a', 'type' => Compiler::Lexer::TokenType::T_Key, 'line' => 7 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'RightBrace', 'data' => '}', 'type' => Compiler::Lexer::TokenType::T_RightBrace, 'line' => 7 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 7 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'ArrayVar', 'data' => '@+', 'type' => Compiler::Lexer::TokenType::T_ArrayVar, 'line' => 8 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'LeftBrace', 'data' => '{', 'type' => Compiler::Lexer::TokenType::T_LeftBrace, 'line' => 8 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'Key', 'data' => 'a', 'type' => Compiler::Lexer::TokenType::T_Key, 'line' => 8 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Symbol, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'RightBrace', 'data' => '}', 'type' => Compiler::Lexer::TokenType::T_RightBrace, 'line' => 8 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 8 }, 'Compiler::Lexer::Token' ) ]); }; done_testing; package.t100644000765000024 33113603257356 15750 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More tests => 1; BEGIN { use_ok('Compiler::Lexer') }; use Data::Dumper; my $tokens = Compiler::Lexer->new('-')->tokenize('package Foo'); print Dumper $tokens; perl6.t100644000765000024 37213603257356 15412 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use utf8; use v5.10; use Compiler::Lexer; use Test::More; my $codeA = q|{}/'|; my $codeB = q|$g-f~B,'';|; for my $code ($codeA, $codeB) { my $tokens = Compiler::Lexer->new->tokenize($code); ok(1); } done_testing; recursive_tokenize.t100644000765000024 41713603257356 20301 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; my $results = Compiler::Lexer->new('-')->recursive_tokenize(<<'SCRIPT'); use Compiler::Lexer; my $lexer = Compiler::Lexer->new('-'); SCRIPT ok(scalar @{$results->{'Compiler::Lexer'}} > 0); done_testing; verbose.t100644000765000024 1715213603257356 16073 0ustar00goccystaff000000000000Compiler-Lexer-0.23/tuse strict; use warnings; use Compiler::Lexer; use Test::More; use Data::Dumper; my $lexer = Compiler::Lexer->new({ verbose => 1 }); my $tokens = $lexer->tokenize(<<'SCRIPT'); # comment line my $var; =pod =head1 =head2 =cut my $foo = <<" --"; print "Hello"; print "Goodbye"; -- SCRIPT subtest 'tokenize' => sub { is_deeply($tokens, [ bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Comment', 'data' => '# comment line', 'type' => Compiler::Lexer::TokenType::T_Comment, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 1 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'VarDecl', 'data' => 'my', 'type' => Compiler::Lexer::TokenType::T_VarDecl, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LocalVar', 'data' => '$var', 'type' => Compiler::Lexer::TokenType::T_LocalVar, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => 0, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 2 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'Pod', 'data' => '=pod =head1 =head2 =cut', 'type' => Compiler::Lexer::TokenType::T_Pod, 'line' => 9 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 9 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Decl, 'has_warnings' => 0, 'stype' => 0, 'name' => 'VarDecl', 'data' => 'my', 'type' => Compiler::Lexer::TokenType::T_VarDecl, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => 0, 'name' => 'LocalVar', 'data' => '$foo', 'type' => Compiler::Lexer::TokenType::T_LocalVar, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Assign, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'Assign', 'data' => '=', 'type' => Compiler::Lexer::TokenType::T_Assign, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Operator, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'LeftShift', 'data' => '<<', 'type' => Compiler::Lexer::TokenType::T_LeftShift, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'HereDocumentTag', 'data' => ' --', 'type' => Compiler::Lexer::TokenType::T_HereDocumentTag, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_StmtEnd, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'SemiColon', 'data' => ';', 'type' => Compiler::Lexer::TokenType::T_SemiColon, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 10 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'HereDocument', 'data' => ' print "Hello"; print "Goodbye"; ', 'type' => Compiler::Lexer::TokenType::T_HereDocument, 'line' => 13 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Term, 'has_warnings' => 0, 'stype' => Compiler::Lexer::SyntaxType::T_Value, 'name' => 'HereDocumentEnd', 'data' => ' --', 'type' => Compiler::Lexer::TokenType::T_HereDocumentEnd, 'line' => 13 }, 'Compiler::Lexer::Token' ), bless( { 'kind' => Compiler::Lexer::Kind::T_Verbose, 'has_warnings' => 0, 'stype' => 0, 'name' => 'WhiteSpace', 'data' => ' ', 'type' => Compiler::Lexer::TokenType::T_WhiteSpace, 'line' => 13 }, 'Compiler::Lexer::Token' ), ]); }; done_testing; typemap100644000765000024 100513603257356 15346 0ustar00goccystaff000000000000Compiler-Lexer-0.23TYPEMAP Compiler_Lexer T_PTROBJ_SPECIAL INPUT T_PTROBJ_SPECIAL if (sv_derived_from($arg, \"${(my $ntt=$ntype)=~s/_/::/g;\$ntt}\")) { IV tmp = SvIV((SV*)SvRV($arg)); $var = INT2PTR($type, tmp); } else croak(\"$var is not of type ${(my $ntt=$ntype)=~s/_/::/g;\$ntt}\") OUTPUT T_PTROBJ_SPECIAL sv_setref_pv($arg, \"${(my $ntt=$ntype)=~s/_/::/g;\$ntt}\", (void*)$var); META.yml100644000765000024 325713603257356 15230 0ustar00goccystaff000000000000Compiler-Lexer-0.23--- abstract: 'Lexical Analyzer for Perl5' author: - 'Masaaki Goshima (goccy) ' build_requires: Devel::PPPort: '3.19' ExtUtils::MakeMaker: '6.59' ExtUtils::ParseXS: '2.21' Test::More: '0.95' configure_requires: Module::Build: '0.4005' Module::Build::XSUtil: '0.02' dynamic_config: 0 generated_by: 'Minilla/v3.1.8, CPAN::Meta::Converter version 2.150010' license: perl meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: '1.4' name: Compiler-Lexer no_index: directory: - t - examples - builder - experiments provides: Compiler::Lexer: file: lib/Compiler/Lexer.pm version: '0.23' Compiler::Lexer::Kind: file: lib/Compiler/Lexer/Constants.pm Compiler::Lexer::SyntaxType: file: lib/Compiler/Lexer/Constants.pm Compiler::Lexer::Token: file: lib/Compiler/Lexer/Token.pm Compiler::Lexer::TokenType: file: lib/Compiler/Lexer/Constants.pm requires: XSLoader: '0.02' perl: '5.008001' resources: bugtracker: https://github.com/goccy/p5-Compiler-Lexer/issues homepage: https://github.com/goccy/p5-Compiler-Lexer repository: git://github.com/goccy/p5-Compiler-Lexer.git version: '0.23' x_authority: cpan:GOCCY x_contributors: - 'Fumihiro Itoh ' - 'K ' - 'Masaaki Goshima ' - 'Masaaki Goshima ' - 'Olivier Mengué ' - 'Reini Urban ' - 'Syohei YOSHIDA ' - 'brian d foy ' - 'moznion ' - 'tokuhirom ' x_serialization_backend: 'CPAN::Meta::YAML version 0.018' x_static_install: 0 MANIFEST100644000765000024 232313603257356 15101 0ustar00goccystaff000000000000Compiler-Lexer-0.23Build.PL Changes LICENSE META.json README.md builder/MyBuilder.pm cpanfile cpanfile.snapshot example/benchmark.pl example/sample.pl gen/double_charactor_operator.gperf gen/gen_constants.yaml gen/gen_decl.pl gen/reserved_keywords.gperf gen/test_generator.pl gen/triple_charactor_operator.gperf include/common.hpp include/gen_token.hpp include/keyword.hpp include/lexer.hpp include/token.hpp lib/Compiler/Lexer.pm lib/Compiler/Lexer/Constants.pm lib/Compiler/Lexer/Token.pm minil.toml src/Compiler-Lexer.xs src/compiler/lexer/Compiler_annotator.cpp src/compiler/lexer/Compiler_lexer.cpp src/compiler/lexer/Compiler_manager.cpp src/compiler/lexer/Compiler_scanner.cpp src/compiler/util/Compiler_double_charactor_operator.cpp src/compiler/util/Compiler_gen_token_decl.cpp src/compiler/util/Compiler_reserved_keyword.cpp src/compiler/util/Compiler_token.cpp src/compiler/util/Compiler_triple_charactor_operator.cpp src/compiler/util/Compiler_util.cpp src/typemap t/Lexer.t t/format.t t/issue_32.t t/issue_35.t t/issue_38.t t/issue_39.t t/issue_40.t t/issue_42.t t/issue_43.t t/issue_44.t t/issue_45.t t/issue_48.t t/issue_53.t t/issue_69.t t/issue_reports.t t/package.t t/perl6.t t/recursive_tokenize.t t/verbose.t typemap META.yml MANIFEST