.clang-format0100644 0000000 0000000 00000000033 13654546140 012164 0ustar000000000 0000000 --- BasedOnStyle: Chromium .gitignore0100644 0000000 0000000 00000000347 13654546140 011611 0ustar000000000 0000000 kati !cmd/kati/ ckati para *.o *.d out/ repo/android/ repo/glog/ repo/maloader/ testcase_parse_benchmark_test.go bench-old.out bench-new.out find_test ninja_test string_piece_test strutil_bench strutil_test go_src_stamp version.cc .travis.yml0100644 0000000 0000000 00000001176 13654546140 011733 0ustar000000000 0000000 language: cpp dist: trusty sudo: required compiler: - clang addons: apt: update: true sources: - ubuntu-toolchain-r-test - llvm-toolchain-trusty-7 packages: - clang-7 - clang-format-7 - realpath cache: apt before_script: - wget https://github.com/ninja-build/ninja/releases/download/v1.7.2/ninja-linux.zip - unzip ninja-linux.zip -d ~/bin script: - export CXX=clang++-7 - make -j4 ckati ckati_tests - ./clang-format-check - ruby runtest.rb -c - ruby runtest.rb -c -n - ruby runtest.rb -c -n -a - ./ninja_test - ./string_piece_test - ./strutil_test - ./find_test AUTHORS0100644 0000000 0000000 00000000666 13654546140 010675 0ustar000000000 0000000 # This is the official list of kati authors for copyright purposes. # This file is distinct from the CONTRIBUTORS files. # See the latter for an explanation. # # Names should be added to this file as: # Name or Organization # The email address is not required for organizations. # # Please keep the list sorted. Google Inc. Koichi Shiraishi Kouhei Sutou Po Hu Android.bp0100644 0000000 0000000 00000004606 13654546140 011526 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. cc_defaults { name: "ckati_defaults", cflags: [ "-W", "-Wall", "-Werror", "-DNOLOG", ], tidy_checks: [ "-google-global-names-in-headers", "-google-build-using-namespace", "-google-explicit-constructor", ], } cc_library_host_static { name: "libckati", defaults: ["ckati_defaults"], srcs: [ "affinity.cc", "command.cc", "dep.cc", "eval.cc", "exec.cc", "expr.cc", "file.cc", "file_cache.cc", "fileutil.cc", "find.cc", "flags.cc", "func.cc", "io.cc", "log.cc", "ninja.cc", "parser.cc", "regen.cc", "rule.cc", "stats.cc", "stmt.cc", "string_piece.cc", "stringprintf.cc", "strutil.cc", "symtab.cc", "thread_pool.cc", "timeutil.cc", "var.cc", "version_unknown.cc", ], } cc_binary_host { name: "ckati", defaults: ["ckati_defaults"], srcs: ["main.cc"], whole_static_libs: ["libckati"], target: { linux_glibc: { shared_libs: ["libjemalloc"], }, }, } cc_binary_host { name: "ckati_stamp_dump", defaults: ["ckati_defaults"], srcs: ["regen_dump.cc"], static_libs: ["libckati"], } cc_test_host { name: "ckati_test", defaults: ["ckati_defaults"], test_per_src: true, srcs: [ "find_test.cc", "ninja_test.cc", "string_piece_test.cc", "strutil_bench.cc", "strutil_test.cc", ], gtest: false, static_libs: ["libckati"], } cc_benchmark_host { name: "ckati_fileutil_bench", defaults: ["ckati_defaults"], srcs: [ "fileutil_bench.cc", ], static_libs: ["libckati"], } CONTRIBUTING.md0100644 0000000 0000000 00000004665 13654546140 012061 0ustar000000000 0000000 # How to contribute # We'd love to accept your patches and contributions to this project. There are a just a few small guidelines you need to follow. ## Contributor License Agreement ## Contributions to any Google project must be accompanied by a Contributor License Agreement. This is not a copyright **assignment**, it simply gives Google permission to use and redistribute your contributions as part of the project. * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA][]. * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA][]. You generally only need to submit a CLA once, so if you've already submitted one (even if it was for a different project), you probably don't need to do it again. [individual CLA]: https://developers.google.com/open-source/cla/individual [corporate CLA]: https://developers.google.com/open-source/cla/corporate Once your CLA is submitted (or if you already submitted one for another Google project), make a commit adding yourself to the [AUTHORS][] and [CONTRIBUTORS][] files. This commit can be part of your first [pull request][]. [AUTHORS]: AUTHORS [CONTRIBUTORS]: CONTRIBUTORS ## Submitting a patch ## 1. It's generally best to start by opening a new issue describing the bug or feature you're intending to fix. Even if you think it's relatively minor, it's helpful to know what people are working on. Mention in the initial issue that you are planning to work on that bug or feature so that it can be assigned to you. 1. Follow the normal process of [forking][] the project, and setup a new branch to work in. It's important that each group of changes be done in separate branches in order to ensure that a pull request only includes the commits related to that bug or feature. 1. Do your best to have [well-formed commit messages][] for each change. This provides consistency throughout the project, and ensures that commit messages are able to be formatted properly by various git tools. 1. Finally, push the commits to your fork and submit a [pull request][]. [forking]: https://help.github.com/articles/fork-a-repo [well-formed commit messages]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html [pull request]: https://help.github.com/articles/creating-a-pull-request CONTRIBUTORS0100644 0000000 0000000 00000002445 13654546140 011502 0ustar000000000 0000000 # People who have agreed to one of the CLAs and can contribute patches. # The AUTHORS file lists the copyright holders; this file # lists people. For example, Google employees are listed here # but not in AUTHORS, because Google holds the copyright. # # Names should be added to this file only after verifying that # the individual or the individual's organization has agreed to # the appropriate Contributor License Agreement, found here: # # https://developers.google.com/open-source/cla/individual # https://developers.google.com/open-source/cla/corporate # # The agreement for individuals can be filled out on the web. # # When adding J Random Contributor's name to this file, # either J's name or J's organization's name should be # added to the AUTHORS file, depending on whether the # individual or corporate CLA was used. # # Names should be added to this file as: # Name # # Please keep the list sorted. Colin Cross Dan Willemsen Fumitoshi Ukai Koichi Shiraishi Kouhei Sutou Po Hu Ryo Hashimoto Shinichiro Hamaji Stefan Becker Steve McKay Taiju Tsuiki INTERNALS.md0100644 0000000 0000000 00000052610 13654546140 011502 0ustar000000000 0000000 Kati internals ============== This is an informal document about internals of kati. This document is not meant to be a comprehensive document of kati or GNU make. This explains some random topics which other programmers may be interested in. Motivation ---------- The motivation of kati was to speed up Android platform build. Especially, its incremental build time was the main focus. Android platform's build system is a very unique system. It provides a DSL, (ab)using Turing-completeness of GNU make. The DSL allows developers to write build rules in a descriptive way, but the downside is it's complicated and slow. When we say a build system is slow, we consider "null build" and "full build". Null build is a build which does nothing, because all output files are already up-to-date. Full build is a build which builds everything, because there were nothing which have been already built. Actual builds in daily development are somewhere between null build and full build. Most benchmarks below were done for null build. For Android with my fairly beefy workstation, null build took ~100 secs with GNU make. This means you needed to wait ~100 secs to see if there's a compile error when you changed a single C file. To be fair, things were not that bad. There are tools called mm/mmm. They allow developers to build an individual module. As they ignore dependencies between modules, they are fast. However, you need to be somewhat experienced to use them properly. You should know which modules will be affected by your change. It would be nicer if you can just type "make" whenever you change something. This is why we started this project. We decided to create a GNU make clone from scratch, but there were some other options. One option was to replace all Android.mk by files with a better format. There is actually a longer-term project for this. Kati was planned to be a short-term project. Another option was to hack GNU make instead of developing a clone. We didn't take this option because we thought the source code of GNU make is somewhat complicated due to historical reason. It's written in old-style C, has a lot of ifdefs for some unknown architectures, etc. Currently, kati's main mode is --ninja mode. Instead of executing build commands by itself, kati generates build.ninja file and [ninja](https://github.com/martine/ninja) actually runs commands. There were some back-and-forths before kati became the current form. Some experiments succeeded and some others failed. We even changed the language for kati. At first, we wrote kati in Go. We naively expected we can get enough performance with Go. I guessed at least one of the following statements are true: 1. GNU make is not very optimized for computation heavy Makefiles, 2. Go is fast for our purpose, or 3. we can come up with some optimization tricks for Android's build system. As for 3, some of such optimization succeeded but it's performance gain didn't cancel the slowness of Go. Go's performance would be somewhat interesting topic. I didn't study the performance difference in detail, but it seemed both our use of Go and Go language itself were making the Go version of kati slower. As for our fault, I think Go version has more unnecessary string allocations than C++ version has. As for Go itself, it seemed GC was the main show-stopper. For example, Android's build system defines about one million make variables, and buffers for them will be never freed. IIRC, this kind of allocation pattern isn't good for non-generational GC. Go version and test cases were written by ukai and me, and C++ rewrite was done mostly by me. The rest of this document is mostly about the C++ version. Overall architecture -------------------- Kati consists of the following components: * Parser * Evaluator * Dependency builder * Executor * Ninja generator A Makefile has some statements which consist of zero or more expressions. There are two parsers and two evaluators - one for statements and the other for expressions. Most of users of GNU make may not care about the evaluator much. However, GNU make's evaluator is very powerful and is Turing-complete. For Android's null build, most time is spent in this phase. Other tasks, such as building dependency graphs and calling stat function for build targets, are not the bottleneck. This would be a very Android specific characteristics. Android's build system uses a lot of GNU make black magics. The evaluator outputs a list of build rules and a variable table. The dependency builder creates a dependency graph from the list of build rules. Note this step doesn't use the variable table. Then either executor or ninja generator will be used. Either way, kati runs its evaluator again for command lines. The variable table is used again for this step. We'll look at each components closely. GNU make is a somewhat different language from modern languages. Let's see. Parser for statements --------------------- I'm not 100% sure, but I think GNU make parses and evaluates Makefiles simultaneously, but kati has two phases for parsing and evaluation. The reason of this design is for performance. For Android build, kati (or GNU make) needs to read ~3k files ~50k times. The file which is read most often is read ~5k times. It's waste of time to parse such files again and again. Kati can re-use parsed results when it needs to evaluate a Makefile second time. If we stop caching the parsed results, kati will be two times slower for Android's build. Caching parsed statements is done in *file_cache.cc*. The statement parser is defined in *parser.cc*. In kati, there are four kinds of statements: * Rules * Assignments * Commands * Make directives Data structures for them are defined in *stmt.h*. Here are examples of these statements: VAR := yay! # An assignment all: # A rule echo $(VAR) # A command include xxx.mk # A make directive (include) In addition to include directive, there are ifeq/ifneq/ifdef/ifndef directives and export/unexport directives. Also, kati internally uses "parse error statement". As GNU make doesn't show parse errors in branches which are not taken, we need to delay parse errors to evaluation time. ### Context dependent parser A tricky point of parsing make statements is that the parsing depends on the context of the evaluation. See the following Makefile chunk for example: $(VAR) X=hoge echo $${X} You cannot tell whether the second line is a command or an assignment until *$(VAR)* is evaluated. If *$(VAR)* is a rule statement, the second line is a command and otherwise it's an assignment. If the previous line is VAR := target: the second line will turn out to be a command. For some reason, GNU make expands expressions before it decides the type of a statement only for rules. Storing assignments or directives in a variable won't work as assignments or directives. For example ASSIGN := A=B $(ASSIGN): doesn't assign "*B:*" to *A*, but defines a build rule whose target is *A=B*. Anyway, as a line starts with a tab character can be either a command statement or other statements depending on the evaluation result of the previous line, sometimes kati's parser cannot tell the statement type of a line. In this case, kati's parser speculatively creates a command statement object, keeping the original line. If it turns out the line is actually not a command statement, the evaluator re-runs the parser. ### Line concatenations and comments In most programming languages, line concatenations by a backslash character and comments are handled at a very early stage of a language implementation. However, GNU make changes the behavior for them depending on parse/eval context. For example, the following Makefile outputs "has space" and "hasnospace": VAR := has\ space all: echo $(VAR) echo has\ nospace GNU make usually inserts a whitespace between lines, but for command lines it doesn't. As we've seen in the previous subsection, sometimes kati cannot tell a line is a command statement or not. This means we should handle them after evaluating statements. Similar discussion applies for comments. GNU make usually trims characters after '#', but it does nothing for '#' in command lines. We have a bunch of comment/backslash related testcases in the testcase directory of kati's repository. Parser for expressions ---------------------- A statement may have one or more expressions. The number of expressions in a statement depends on the statement's type. For example, A := $(X) This is an assignment statement, which has two expressions - *A* and *$(X)*. Types of expressions and their parser are defined in *expr.cc*. Like other programming languages, an expression is a tree of expressions. The type of a leaf expression is either literal, variable reference, [substitution references](http://www.gnu.org/software/make/manual/make.html#Substitution-Refs), or make functions. As written, backslashes and comments change their behavior depending on the context. Kati handles them in this phase. *ParseExprOpt* is the enum for the contexts. As a nature of old systems, GNU make is very permissive. For some reason, it allows some kind of unmatched pairs of parentheses. For example, GNU make doesn't think *$($(foo)* is an error - this is a reference to variable *$(foo*. If you have some experiences with parsers, you may wonder how one can implement a parser which allows such expressions. It seems GNU make intentionally allows this: http://git.savannah.gnu.org/cgit/make.git/tree/expand.c#n285 No one won't use this feature intentionally. However, as GNU make allows this, some Makefiles have unmatched parentheses, so kati shouldn't raise an error for them, unfortunately. GNU make has a bunch of functions. Most users would use only simple ones such as *$(wildcard ...)* and *$(subst ...)*. There are also more complex functions such as *$(if ...)* and *$(call ...)*, which make GNU make Turing-complete. Make functions are defined in *func.cc*. Though *func.cc* is not short, the implementation is fairly simple. There is only one weirdness I remember around functions. GNU make slightly changes its parsing for *$(if ...)*, *$(and ...)*, and *$(or ...)*. See *trim_space* and *trim_right_space_1st* in *func.h* and how they are used in *expr.cc*. Evaluator for statements ------------------------ Evaluator for statements are defined in *eval.cc*. As written, there are four kinds of statements: * Rules * Assignments * Commands * Make directives There is nothing tricky around commands and make directives. A rule statement have some forms and should be parsed after evaluating expression by the third parser. This will be discussed in the next section. Assignments in GNU make is tricky a bit. There are two kinds of variables in GNU make - simple variables and recursive variables. See the following code snippet: A = $(info world!) # recursive B := $(info Hello,) # simple $(A) $(B) This code outputs "Hello," and "world!", in this order. The evaluation of a recursive variable is delayed until the variable is referenced. So the first line, which is an assignment of a recursive variable, outputs nothing. The content of the variable *$(A)* will be *$(info world!)* after the first line. The assignment in the second line uses *:=* which means this is a simple variable assignment. For simple variables, the right hand side is evaluated immediately. So "Hello," will be output and the value of *$(B)* will be an empty string ($(info ...) returns an empty string). Then, "world!" will be shown when the third line is evaluated as *$(A)* is evaluated, and lastly the forth line does nothing, as *$(B)* is an empty string. There are two more kinds of assignments (i.e., *+=* and *?=*). These assignments keep the type of the original variable. Evaluation of them will be done immediately only when the left hand side of the assignment is already defined and is a simple variable. Parser for rules ---------------- After evaluating a rule statement, kati needs to parse the evaluated result. A rule statement can actually be the following four things: * A rule * A [target specific variable](http://www.gnu.org/software/make/manual/make.html#Target_002dspecific) * An empty line * An error (there're non-whitespace characters without a colon) Parsing them is mostly done in *rule.cc*. ### Rules A rule is something like *all: hello.exe*. You should be familiar with it. There are several kinds of rules such as pattern rules, double colon rules, and order only dependencies, but they don't complicate the rule parser. A feature which complicates the parser is semicolon. You can write the first build command on the same line as the rule. For example, target: echo hi! and target: ; echo hi! have the same meaning. This is tricky because kati shouldn't evaluate expressions in a command until the command is actually invoked. As a semicolon can appear as the result of expression evaluation, there are some corner cases. A tricky example: all: $(info foo) ; $(info bar) $(info baz) should output *foo*, *baz*, and then *bar*, in this order, but VAR := all: $(info foo) ; $(info bar) $(VAR) $(info baz) outputs *foo*, *bar*, and then *baz*. Again, for the command line after a semicolon, kati should also change how backslashes and comments are handled. target: has\ space ; echo no\ space The above example says *target* depends on two targets, *has* and *space*, and to build *target*, *echo nospace* should be executed. ### Target specific variables You may not familiar with target specific variables. This feature allows you to define variable which can be referenced only from commands in a specified target. See the following code: VAR := X target1: VAR := Y target1: echo $(VAR) target2: echo $(VAR) In this example, *target1* shows *Y* and *target2* shows *X*. I think this feature is somewhat similar to namespaces in other programming languages. If a target specific variable is specified for a non-leaf target, the variable will be used even in build commands of prerequisite targets. In general, I like GNU make, but this is the only GNU make's feature I don't like. See the following Makefile: hello: CFLAGS := -g hello: hello.o gcc $(CFLAGS) $< -o $@ hello.o: hello.c gcc $(CFLAGS) -c $< -o $@ If you run make for the target *hello*, *CFLAGS* is applied for both commands: $ make hello gcc -g -c hello.c -o hello.o gcc -g hello.o -o hello However, *CFLAGS* for *hello* won't be used when you build only *hello.o*: $ make hello.o gcc -c hello.c -o hello.o Things could be even worse when two targets with different target specific variables depend on a same target. The build result will be inconsistent. I think there is no valid usage of this feature for non-leaf targets. Let's go back to the parsing. Like for semicolons, we need to delay the evaluation of the right hand side of the assignment for recursive variables. Its implementation is very similar to the one for semicolons, but the combination of the assignment and the semicolon makes parsing a bit trickier. An example: target1: ;X=Y echo $(X) # A rule with a command target2: X=;Y echo $(X) # A target specific variable Evaluator for expressions ------------------------- Evaluation of expressions is done in *expr.cc*, *func.cc*, and *command.cc*. The amount of code for this step is fairly large especially because of the number of GNU make functions. However, their implementations are fairly straightforward. One tricky function is $(wildcard ...). It seems GNU make is doing some kind of optimization only for this function and $(wildcard ...) in commands seem to be evaluated before the evaluation phase for commands. Both C++ kati and Go kati are different from GNU make's behavior in different ways, but it seems this incompatibility is OK for Android build. There is an important optimization done for Android. Android's build system has a lot of $(shell find ...) calls to create a list of all .java/.mk files under a directory, and they are slow. For this, kati has a builtin emulator of GNU find. The find emulator traverses the directory tree and creates an in-memory directory tree. Then the find emulator returns results of find commands using the cached tree. For my environment, the find command emulator makes kati ~1.6x faster for AOSP. The implementations of some IO-related functions in commands are tricky in the ninja generation mode. This will be described later. Dependency builder ------------------ Now we get a list of rules and a variable table. *dep.cc* builds a dependency graph using the list of rules. I think this step is what GNU make is supposed to do for normal users. This step is fairly complex like other components but there's nothing strange. There are three types of rules in GNU make: * explicit rule * implicit rule * suffix rule The following code shows the three types: all: foo.o foo.o: echo explicit %.o: echo implicit .c.o: echo suffix In the above example, all of these three rules match the target *foo.o*. GNU make prioritizes explicit rules first. When there's no explicit rule for a target, it uses an implicit rule with longer pattern string. Suffix rules are used only when there are no explicit/implicit rules. Android has more than one thousand implicit rules and there are ten thousands of targets. It's too slow to do matching for them with a naive O(NM) algorithm. Kati uses a trie to speed up this step. Multiple rules without commands should be merged into the rule with a command. For example: foo.o: foo.h %.o: %.c $(CC) -c $< -o $@ *foo.o* depends not only on *foo.c*, but also on *foo.h*. Executor -------- C++ kati's executor is fairly simple. This is defined in *exec.cc*. This is useful only for testing because this lacks some important features for a build system (e.g., parallel build). Expressions in commands are evaluated at this stage. When they are evaluated, target specific variables and some special variables (e.g., $< and $@) should be considered. *command.cc* is handling them. This file is used by both the executor and the ninja generator. Evaluation at this stage is tricky when both *+=* and target specific variables are involved. Here is an example code: all: test1 test2 test3 test4 A:=X B=X X:=foo test1: A+=$(X) test1: @echo $(A) # X bar test2: B+=$(X) test2: @echo $(B) # X bar test3: A:= test3: A+=$(X) test3: @echo $(A) # foo test4: B= test4: B+=$(X) test4: @echo $(B) # bar X:=bar *$(A)* in *test3* is a simple variable. Though *$(A)* in the global scope is simple, *$(A)* in *test1* is a recursive variable. This means types of global variables don't affect types of target specific variables. However, The result of *test1* ("X bar") shows the value of a target specific variable is concatenated to the value of a global variable. Ninja generator --------------- *ninja.cc* generates a ninja file using the results of other components. This step is actually fairly complicated because kati needs to map GNU make's features to ninja's. A build rule in GNU make may have multiple commands, while ninja's has always a single command. To mitigate this, the ninja generator translates multiple commands into something like *(cmd1) && (cmd2) && ...*. Kati should also escape some special characters for ninja and shell. The tougher thing is $(shell ...) in commands. Current kati's implementation translates it into shell's $(...). This works for many cases. But this approach won't work when the result of $(shell ...) is passed to another make function. For example all: echo $(if $(shell echo),FAIL,PASS) should output PASS, because the result of $(shell echo) is an empty string. GNU make and kati's executor mode output PASS correctly. However, kati's ninja generator emits a ninja file which shows FAIL. I wrote a few experimental patches for this issue, but they didn't work well. The current kati's implementation has an Android specific workaround for this. See *HasNoIoInShellScript* in *func.cc* for detail. Ninja regeneration ------------------ C++ kati has --regen flag. If this flag is specified, kati checks if anything in your environment was changed after the previous run. If kati thinks it doesn't need to regenerate the ninja file, it finishes quickly. For Android, running kati takes ~30 secs at the first run but the second run takes only ~1 sec. Kati thinks it needs to regenerate the ninja file when one of the followings is changed: * The command line flags passed to kati * A timestamp of a Makefile used to generate the previous ninja file * An environment variable used while evaluating Makefiles * A result of $(wildcard ...) * A result of $(shell ...) Quickly doing the last check is not trivial. It takes ~18 secs to run all $(shell ...) in Android's build system due to the slowness of $(shell find ...). So, for find commands executed by kati's find emulator, kati stores the timestamps of traversed directories with the find command itself. For each find commands, kati checks the timestamps of them. If they are not changed, kati skips re-running the find command. Kati doesn't run $(shell date ...) and $(shell echo ...) during this check. The former always changes so there's no sense to re-run them. Android uses the latter to create a file and the result of them are empty strings. We don't want to update these files to get empty strings. TODO ---- A big TODO is sub-makes invoked by $(MAKE). I wrote some experimental patches but nothing is ready to be used as of writing. LICENSE0100644 0000000 0000000 00000026136 13654546140 010632 0ustar000000000 0000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Makefile0100644 0000000 0000000 00000001547 13654546140 011264 0ustar000000000 0000000 # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. all: ckati ckati_tests include Makefile.kati include Makefile.ckati test: run_tests test_quietly: run_tests test_quietly: RUN_TESTS_QUIETLY := -q run_tests: all ckati_tests ruby runtest.rb -c -n $(RUN_TESTS_QUIETLY) clean: ckati_clean .PHONY: test clean ckati_tests Makefile.ckati0100644 0000000 0000000 00000007706 13654546140 012361 0ustar000000000 0000000 # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Find source file location from path to this Makefile KATI_SRC_PATH := $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST)))) # Set defaults if they weren't set by the including Makefile KATI_CXX ?= $(CXX) KATI_LD ?= $(CXX) KATI_INTERMEDIATES_PATH ?= . KATI_BIN_PATH ?= . KATI_CXX_SRCS := \ affinity.cc \ command.cc \ dep.cc \ eval.cc \ exec.cc \ expr.cc \ file.cc \ file_cache.cc \ fileutil.cc \ find.cc \ flags.cc \ func.cc \ io.cc \ log.cc \ main.cc \ ninja.cc \ parser.cc \ regen.cc \ rule.cc \ stats.cc \ stmt.cc \ string_piece.cc \ stringprintf.cc \ strutil.cc \ symtab.cc \ thread_pool.cc \ timeutil.cc \ var.cc KATI_CXX_GENERATED_SRCS := \ version.cc KATI_CXX_SRCS := $(addprefix $(KATI_SRC_PATH)/,$(KATI_CXX_SRCS)) KATI_CXX_TEST_SRCS := \ $(wildcard $(KATI_SRC_PATH)/*_test.cc) \ $(filter-out $(KATI_SRC_PATH)/fileutil_bench.cc,\ $(wildcard $(KATI_SRC_PATH)/*_bench.cc)) KATI_CXX_OBJS := $(patsubst $(KATI_SRC_PATH)/%.cc,$(KATI_INTERMEDIATES_PATH)/%.o,\ $(KATI_CXX_SRCS)) KATI_CXX_GENERATED_OBJS := $(patsubst %.cc,$(KATI_INTERMEDIATES_PATH)/%.o,\ $(KATI_CXX_GENERATED_SRCS)) KATI_CXX_TEST_OBJS := $(patsubst $(KATI_SRC_PATH)/%.cc,$(KATI_INTERMEDIATES_PATH)/%.o,\ $(KATI_CXX_TEST_SRCS)) KATI_CXX_TEST_EXES := $(patsubst $(KATI_INTERMEDIATES_PATH)/%.o,$(KATI_BIN_PATH)/%,\ $(KATI_CXX_TEST_OBJS)) KATI_CXXFLAGS := -g -W -Wall -MMD -MP KATI_CXXFLAGS += -O -DNOLOG KATI_CXXFLAGS += -march=native #KATI_CXXFLAGS += -pg ifeq ($(shell uname),Linux) KATI_LIBS := -lrt -lpthread endif # Rule to build ckati into KATI_BIN_PATH $(KATI_BIN_PATH)/ckati: $(KATI_CXX_OBJS) $(KATI_CXX_GENERATED_OBJS) @mkdir -p $(dir $@) $(KATI_LD) -std=c++11 $(KATI_CXXFLAGS) -o $@ $^ $(KATI_LIBS) # Rule to build normal source files into object files in KATI_INTERMEDIATES_PATH $(KATI_CXX_OBJS) $(KATI_CXX_TEST_OBJS): $(KATI_INTERMEDIATES_PATH)/%.o: $(KATI_SRC_PATH)/%.cc @mkdir -p $(dir $@) $(KATI_CXX) -c -std=c++11 $(KATI_CXXFLAGS) -o $@ $< # Rule to build generated source files into object files in KATI_INTERMEDIATES_PATH $(KATI_CXX_GENERATED_OBJS): $(KATI_INTERMEDIATES_PATH)/%.o: $(KATI_INTERMEDIATES_PATH)/%.cc @mkdir -p $(dir $@) $(KATI_CXX) -c -std=c++11 $(KATI_CXXFLAGS) -o $@ $< ckati_tests: $(KATI_CXX_TEST_EXES) # Rule to build tests using *_test.cc and all normal *.cc files except main.cc $(KATI_CXX_TEST_EXES): $(filter-out $(KATI_INTERMEDIATES_PATH)/main.o,$(KATI_CXX_OBJS)) $(KATI_CXX_GENERATED_OBJS) $(KATI_CXX_TEST_EXES): $(KATI_BIN_PATH)/%: $(KATI_INTERMEDIATES_PATH)/%.o $(KATI_LD) $^ -o $@ $(KATI_LIBS) # Rule to generate version.cc KATI_GIT_DIR := $(shell cd $(KATI_SRC_PATH); realpath `git rev-parse --git-dir`) ifneq ($(KATI_GIT_DIR),) KATI_VERSION_DEPS := $(KATI_GIT_DIR)/HEAD $(KATI_GIT_DIR)/index KATI_VERSION := $(shell git -C $(KATI_GIT_DIR) rev-parse HEAD) else KATI_VERSION_DEPS := KATI_VERSION := unknown endif $(KATI_INTERMEDIATES_PATH)/version.cc: $(KATI_VERSION_DEPS) @mkdir -p $(dir $@) echo '// +build ignore' > $@ echo >> $@ echo 'const char* kGitVersion = "$(KATI_VERSION)";' >> $@ tsan_ckati: $(MAKE) clean $(MAKE) all CXX='clang++-3.6 -fsanitize=thread' cp ckati $@ ckati_clean: rm -rf $(KATI_INTERMEDIATES_PATH)/ckati rm -rf $(KATI_INTERMEDIATES_PATH)/*.o rm -rf $(KATI_INTERMEDIATES_PATH)/*.d rm -rf $(KATI_INTERMEDIATES_PATH)/version.cc rm -rf $(KATI_CXX_TEST_EXES) rm -rf out .PHONY: ckati_clean -include $(KATI_INTERMEDIATES_PATH)/*.d Makefile.kati0100644 0000000 0000000 00000002401 13654546140 012201 0ustar000000000 0000000 # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. GO_SRCS:=$(wildcard *.go) ifeq (${GOPATH},) KATI_GOPATH:=$$(pwd)/out else KATI_GOPATH:=$$(pwd)/out:$${GOPATH} endif kati: go_src_stamp -rm -f out/bin/kati GOPATH=${KATI_GOPATH} go install -ldflags "-X github.com/google/kati.gitVersion=$(shell git rev-parse HEAD)" github.com/google/kati/cmd/kati cp out/bin/kati $@ go_src_stamp: $(GO_SRCS) $(wildcard cmd/*/*.go) -rm -rf out/src out/pkg mkdir -p out/src/github.com/google/kati cp -a $(GO_SRCS) cmd out/src/github.com/google/kati GOPATH=${KATI_GOPATH} go get github.com/google/kati/cmd/kati touch $@ go_test: $(GO_SRCS) GOPATH=${KATI_GOPATH} go test *.go go_clean: rm -rf out kati go_src_stamp .PHONY: go_clean go_test OWNERS0100644 0000000 0000000 00000000073 13654546140 010555 0ustar000000000 0000000 ccross@android.com dwillemsen@google.com hamaji@google.com README.md0100644 0000000 0000000 00000002444 13654546140 011100 0ustar000000000 0000000 kati ==== [![Build Status](https://travis-ci.org/google/kati.svg?branch=master)](http://travis-ci.org/google/kati) kati is an experimental GNU make clone. The main goal of this tool is to speed-up incremental build of Android. Currently, kati does not offer a faster build by itself. It instead converts your Makefile to a ninja file. How to use for Android ---------------------- For Android-N+, ckati and ninja is used automatically. There is a prebuilt checked in under prebuilts/build-tools that is used. All Android's build commands (m, mmm, mmma, etc.) should just work. How to use for Android (deprecated -- only for Android M or earlier) ---------------------- Set up kati: % cd ~/src % git clone https://github.com/google/kati % cd kati % make Build Android: % cd % source build/envsetup.sh % lunch % ~/src/kati/m2n --kati_stats % ./ninja.sh You need ninja in your $PATH. More usage examples (deprecated way) ------------------- ### "make clean" % ./ninja.sh -t clean Note ./ninja.sh passes all parameters to ninja. ### Build a specific target For example, the following is equivalent to "make cts": % ./ninja.sh cts Or, if you know the path you want, you can do: % ./ninja.sh out/host/linux-x86/bin/adb affinity.cc0100644 0000000 0000000 00000003542 13654546140 011741 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "affinity.h" #include "flags.h" #include "log.h" #ifdef __linux__ #include #include #include #include void SetAffinityForSingleThread() { cpu_set_t cs; CPU_ZERO(&cs); std::random_device generator; std::uniform_int_distribution distribution(0, g_flags.num_cpus - 1); int cpu = distribution(generator); // Try to come up with a CPU and one close to it. This should work on most // hyperthreaded system, but may be less optimal under stranger setups. // Choosing two completely different CPUs would work here as well, it's just a // couple percent faster if they're close (and still faster than letting the // scheduler do whatever it wants). cpu = cpu - (cpu % 2); CPU_SET(cpu, &cs); if (g_flags.num_cpus > 1) CPU_SET(cpu + 1, &cs); if (sched_setaffinity(0, sizeof(cs), &cs) < 0) WARN("sched_setaffinity: %s", strerror(errno)); } void SetAffinityForMultiThread() { cpu_set_t cs; CPU_ZERO(&cs); for (int i = 0; i < g_flags.num_cpus; i++) { CPU_SET(i, &cs); } if (sched_setaffinity(0, sizeof(cs), &cs) < 0) WARN("sched_setaffinity: %s", strerror(errno)); } #else void SetAffinityForSingleThread() {} void SetAffinityForMultiThread() {} #endif affinity.h0100644 0000000 0000000 00000001330 13654546140 011574 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef AFFINITY_H_ #define AFFINITY_H_ void SetAffinityForSingleThread(); void SetAffinityForMultiThread(); #endif ast.go0100644 0000000 0000000 00000007273 13654546140 010744 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "strings" "github.com/golang/glog" ) type ast interface { eval(*Evaluator) error show() } type assignAST struct { srcpos lhs Value rhs Value op string opt string // "override", "export" } func (ast *assignAST) eval(ev *Evaluator) error { return ev.evalAssign(ast) } func (ast *assignAST) evalRHS(ev *Evaluator, lhs string) (Var, error) { origin := "file" if ast.filename == bootstrapMakefileName { origin = "default" } if ast.opt == "override" { origin = "override" } // TODO(ukai): handle ast.opt == "export" switch ast.op { case ":=": switch v := ast.rhs.(type) { case literal: return &simpleVar{value: []string{v.String()}, origin: origin}, nil case tmpval: return &simpleVar{value: []string{v.String()}, origin: origin}, nil default: var buf evalBuffer buf.resetSep() err := v.Eval(&buf, ev) if err != nil { return nil, err } return &simpleVar{value: []string{buf.String()}, origin: origin}, nil } case "=": return &recursiveVar{expr: ast.rhs, origin: origin}, nil case "+=": prev := ev.lookupVarInCurrentScope(lhs) if !prev.IsDefined() { return &recursiveVar{expr: ast.rhs, origin: origin}, nil } return prev.AppendVar(ev, ast.rhs) case "?=": prev := ev.lookupVarInCurrentScope(lhs) if prev.IsDefined() { return prev, nil } return &recursiveVar{expr: ast.rhs, origin: origin}, nil } return nil, ast.errorf("unknown assign op: %q", ast.op) } func (ast *assignAST) show() { glog.Infof("%s %s %s %q", ast.opt, ast.lhs, ast.op, ast.rhs) } // maybeRuleAST is an ast for rule line. // Note we cannot be sure what this is, until all variables in |expr| // are expanded. type maybeRuleAST struct { srcpos isRule bool // found literal ':' expr Value assign *assignAST // target specific var semi []byte // after ';' if ';' exists } func (ast *maybeRuleAST) eval(ev *Evaluator) error { return ev.evalMaybeRule(ast) } func (ast *maybeRuleAST) show() { glog.Info(ast.expr) } type commandAST struct { srcpos cmd string } func (ast *commandAST) eval(ev *Evaluator) error { return ev.evalCommand(ast) } func (ast *commandAST) show() { glog.Infof("\t%s", strings.Replace(ast.cmd, "\n", `\n`, -1)) } type includeAST struct { srcpos expr string op string } func (ast *includeAST) eval(ev *Evaluator) error { return ev.evalInclude(ast) } func (ast *includeAST) show() { glog.Infof("include %s", ast.expr) } type ifAST struct { srcpos op string lhs Value rhs Value // Empty if |op| is ifdef or ifndef. trueStmts []ast falseStmts []ast } func (ast *ifAST) eval(ev *Evaluator) error { return ev.evalIf(ast) } func (ast *ifAST) show() { // TODO glog.Info("if") } type exportAST struct { srcpos expr []byte hasEqual bool export bool } func (ast *exportAST) eval(ev *Evaluator) error { return ev.evalExport(ast) } func (ast *exportAST) show() { // TODO glog.Info("export") } type vpathAST struct { srcpos expr Value } func (ast *vpathAST) eval(ev *Evaluator) error { return ev.evalVpath(ast) } func (ast *vpathAST) show() { glog.Infof("vpath %s", ast.expr.String()) } bootstrap.go0100644 0000000 0000000 00000003063 13654546140 012163 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "path/filepath" "strings" ) const bootstrapMakefileName = "*bootstrap*" func bootstrapMakefile(targets []string) (makefile, error) { bootstrap := ` CC?=cc CXX?=g++ AR?=ar MAKE?=kati # Pretend to be GNU make 3.81, for compatibility. MAKE_VERSION?=3.81 KATI?=kati SHELL=/bin/sh # TODO: Add more builtin vars. # http://www.gnu.org/software/make/manual/make.html#Catalogue-of-Rules # The document above is actually not correct. See default.c: # http://git.savannah.gnu.org/cgit/make.git/tree/default.c?id=4.1 .c.o: $(CC) $(CFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c -o $@ $< .cc.o: $(CXX) $(CXXFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c -o $@ $< # TODO: Add more builtin rules. ` bootstrap += fmt.Sprintf("MAKECMDGOALS:=%s\n", strings.Join(targets, " ")) cwd, err := filepath.Abs(".") if err != nil { return makefile{}, err } bootstrap += fmt.Sprintf("CURDIR:=%s\n", cwd) return parseMakefileString(bootstrap, srcpos{bootstrapMakefileName, 0}) } buf.go0100644 0000000 0000000 00000010115 13654546140 010716 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "io" "sync" ) var ( ebufFree = sync.Pool{ New: func() interface{} { return new(evalBuffer) }, } wbufFree = sync.Pool{ New: func() interface{} { return new(wordBuffer) }, } ) func writeByte(w io.Writer, b byte) error { if bw, ok := w.(io.ByteWriter); ok { return bw.WriteByte(b) } _, err := w.Write([]byte{b}) return err } // use io.WriteString to stringWrite. type ssvWriter struct { io.Writer sep bool } func (w *ssvWriter) writeWord(word []byte) { if w.sep { writeByte(w.Writer, ' ') } w.sep = true w.Writer.Write(word) } func (w *ssvWriter) writeWordString(word string) { if w.sep { writeByte(w.Writer, ' ') } w.sep = true io.WriteString(w.Writer, word) } func (w *ssvWriter) resetSep() { w.sep = false } type buffer struct { buf []byte bootstrap [64]byte // memory to hold first slice } func (b *buffer) Write(data []byte) (int, error) { b.buf = append(b.buf, data...) return len(data), nil } func (b *buffer) WriteByte(c byte) error { b.buf = append(b.buf, c) return nil } func (b *buffer) WriteString(s string) (int, error) { b.buf = append(b.buf, []byte(s)...) return len(s), nil } func (b *buffer) Bytes() []byte { return b.buf } func (b *buffer) Len() int { return len(b.buf) } func (b *buffer) String() string { return string(b.buf) } func (b *buffer) Reset() { if b.buf == nil { b.buf = b.bootstrap[:0] } b.buf = b.buf[:0] } type evalBuffer struct { buffer ssvWriter args [][]byte } func newEbuf() *evalBuffer { buf := ebufFree.Get().(*evalBuffer) buf.Reset() return buf } func (buf *evalBuffer) release() { if cap(buf.Bytes()) > 1024 { return } buf.Reset() buf.args = buf.args[:0] ebufFree.Put(buf) } func (b *evalBuffer) Reset() { b.buffer.Reset() b.resetSep() } func (b *evalBuffer) resetSep() { if b.ssvWriter.Writer == nil { b.ssvWriter.Writer = &b.buffer } b.ssvWriter.resetSep() } type wordBuffer struct { buf buffer words [][]byte } func newWbuf() *wordBuffer { buf := wbufFree.Get().(*wordBuffer) buf.Reset() return buf } func (buf *wordBuffer) release() { if cap(buf.Bytes()) > 1024 { return } buf.Reset() wbufFree.Put(buf) } func (wb *wordBuffer) Write(data []byte) (int, error) { if len(data) == 0 { return 0, nil } off := len(wb.buf.buf) var cont bool if !isWhitespace(rune(data[0])) && len(wb.buf.buf) > 0 { cont = !isWhitespace(rune(wb.buf.buf[off-1])) } ws := newWordScanner(data) for ws.Scan() { if cont { word := wb.words[len(wb.words)-1] wb.words = wb.words[:len(wb.words)-1] wb.buf.buf = wb.buf.buf[:len(wb.buf.buf)-len(word)] var w []byte w = append(w, word...) w = append(w, ws.Bytes()...) wb.writeWord(w) cont = false continue } wb.writeWord(ws.Bytes()) } if isWhitespace(rune(data[len(data)-1])) { wb.buf.buf = append(wb.buf.buf, ' ') } return len(data), nil } func (wb *wordBuffer) WriteByte(c byte) error { _, err := wb.Write([]byte{c}) return err } func (wb *wordBuffer) WriteString(s string) (int, error) { return wb.Write([]byte(s)) } func (wb *wordBuffer) writeWord(word []byte) { if len(wb.buf.buf) > 0 { wb.buf.buf = append(wb.buf.buf, ' ') } off := len(wb.buf.buf) wb.buf.buf = append(wb.buf.buf, word...) wb.words = append(wb.words, wb.buf.buf[off:off+len(word)]) } func (wb *wordBuffer) writeWordString(word string) { wb.writeWord([]byte(word)) } func (wb *wordBuffer) Reset() { wb.buf.Reset() wb.words = nil } func (wb *wordBuffer) resetSep() {} func (wb *wordBuffer) Bytes() []byte { return wb.buf.Bytes() } buf_test.go0100644 0000000 0000000 00000003025 13654546140 011757 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "reflect" "testing" ) func TestWordBuffer(t *testing.T) { for _, tc := range []struct { in []string want []string }{ { in: []string{"foo"}, want: []string{"foo"}, }, { in: []string{"foo bar"}, want: []string{"foo", "bar"}, }, { in: []string{" foo bar\tbaz "}, want: []string{"foo", "bar", "baz"}, }, { in: []string{"foo", "bar"}, want: []string{"foobar"}, }, { in: []string{"foo ", "bar"}, want: []string{"foo", "bar"}, }, { in: []string{"foo", " bar"}, want: []string{"foo", "bar"}, }, { in: []string{"foo ", " bar"}, want: []string{"foo", "bar"}, }, } { var wb wordBuffer for _, s := range tc.in { wb.Write([]byte(s)) } var got []string for _, word := range wb.words { got = append(got, string(word)) } if !reflect.DeepEqual(got, tc.want) { t.Errorf("%q => %q; want %q", tc.in, got, tc.want) } } } clang-format-check0100755 0000000 0000000 00000000536 13654546140 013174 0ustar000000000 0000000 #!/usr/bin/env bash CLANG_FORMAT="clang-format-7" if [ -z "$(which $CLANG_FORMAT)" ]; then CLANG_FORMAT="clang-format" fi for f in $(ls *.cc *.h); do if ! diff -U 1 <($CLANG_FORMAT $f) $f; then echo echo "!!!!!!" echo "!! Failed clang-format check, please run 'clang-format -i *.cc *.h'" 1>&2 echo "!!!!!!" exit 1 fi done cmd/0040755 0000000 0000000 00000000000 13654546140 010363 5ustar000000000 0000000 cmd/kati/0040755 0000000 0000000 00000000000 13654546140 011313 5ustar000000000 0000000 cmd/kati/main.go0100644 0000000 0000000 00000017430 13654546140 012570 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "bytes" "flag" "fmt" "os" "os/exec" "path/filepath" "runtime" "runtime/pprof" "text/template" "time" "github.com/golang/glog" "github.com/google/kati" ) const shellDateTimeformat = time.RFC3339 var ( makefileFlag string jobsFlag int loadJSON string saveJSON string loadGOB string saveGOB string useCache bool m2n bool goma bool cpuprofile string heapprofile string memstats string traceEventFile string syntaxCheckOnlyFlag bool queryFlag string eagerCmdEvalFlag bool generateNinja bool regenNinja bool ninjaSuffix string gomaDir string detectAndroidEcho bool shellDate string ) func init() { // TODO: Make this default and replace this by -d flag. flag.StringVar(&makefileFlag, "f", "", "Use it as a makefile") flag.IntVar(&jobsFlag, "j", 1, "Allow N jobs at once.") flag.StringVar(&loadGOB, "load", "", "") flag.StringVar(&saveGOB, "save", "", "") flag.StringVar(&loadJSON, "load_json", "", "") flag.StringVar(&saveJSON, "save_json", "", "") flag.BoolVar(&useCache, "use_cache", false, "Use cache.") flag.BoolVar(&m2n, "m2n", false, "m2n mode") flag.BoolVar(&goma, "goma", false, "ensure goma start") flag.StringVar(&cpuprofile, "kati_cpuprofile", "", "write cpu profile to `file`") flag.StringVar(&heapprofile, "kati_heapprofile", "", "write heap profile to `file`") flag.StringVar(&memstats, "kati_memstats", "", "Show memstats with given templates") flag.StringVar(&traceEventFile, "kati_trace_event", "", "write trace event to `file`") flag.BoolVar(&syntaxCheckOnlyFlag, "c", false, "Syntax check only.") flag.StringVar(&queryFlag, "query", "", "Show the target info") flag.BoolVar(&eagerCmdEvalFlag, "eager_cmd_eval", false, "Eval commands first.") flag.BoolVar(&generateNinja, "ninja", false, "Generate build.ninja.") flag.BoolVar(®enNinja, "gen_regen_rule", false, "Generate regenerate build.ninja rule.") flag.StringVar(&ninjaSuffix, "ninja_suffix", "", "suffix for ninja files.") flag.StringVar(&gomaDir, "goma_dir", "", "If specified, use goma to build C/C++ files.") // TODO(ukai): implement --regen flag.BoolVar(&detectAndroidEcho, "detect_android_echo", false, "detect echo as ninja description.") flag.StringVar(&shellDate, "shell_date", "", "specify $(shell date) time as "+shellDateTimeformat) flag.BoolVar(&kati.StatsFlag, "kati_stats", false, "Show a bunch of statistics") flag.BoolVar(&kati.PeriodicStatsFlag, "kati_periodic_stats", false, "Show a bunch of periodic statistics") flag.BoolVar(&kati.EvalStatsFlag, "kati_eval_stats", false, "Show eval statistics") flag.BoolVar(&kati.DryRunFlag, "n", false, "Only print the commands that would be executed") // TODO: Make this default. flag.BoolVar(&kati.UseFindEmulator, "use_find_emulator", false, "use find emulator") flag.BoolVar(&kati.UseShellBuiltins, "use_shell_builtins", true, "Use shell builtins") flag.StringVar(&kati.IgnoreOptionalInclude, "ignore_optional_include", "", "If specified, skip reading -include directives start with the specified path.") } func writeHeapProfile() { f, err := os.Create(heapprofile) if err != nil { panic(err) } pprof.WriteHeapProfile(f) f.Close() } type memStatsDumper struct { *template.Template } func (t memStatsDumper) dump() { var ms runtime.MemStats runtime.ReadMemStats(&ms) var buf bytes.Buffer err := t.Template.Execute(&buf, ms) fmt.Println(buf.String()) if err != nil { panic(err) } } func load(req kati.LoadReq) (*kati.DepGraph, error) { if loadGOB != "" { g, err := kati.GOB.Load(loadGOB) return g, err } if loadJSON != "" { g, err := kati.JSON.Load(loadJSON) return g, err } g, err := kati.Load(req) return g, err } func save(g *kati.DepGraph, targets []string) error { var err error if saveGOB != "" { err = kati.GOB.Save(g, saveGOB, targets) } if saveJSON != "" { serr := kati.JSON.Save(g, saveJSON, targets) if err == nil { err = serr } } return err } func m2nsetup() { fmt.Println("kati: m2n mode") generateNinja = true kati.IgnoreOptionalInclude = "out/%.P" kati.UseFindEmulator = true } func gomasetup() { for _, k := range []string{"CC_WRAPPER", "CXX_WRAPPER", "JAVAC_WRAPPER"} { v := os.Getenv(k) if v != "" { fmt.Printf("Note: %s=%s may confuse m2n --goma, unsetting", k, v) os.Unsetenv(k) } } if gomaDir == "" { gomaDir = os.Getenv("GOMA_DIR") if gomaDir == "" { gomaDir = os.ExpandEnv("${HOME}/goma") } } fmt.Printf("kati: setup goma: %s\n", gomaDir) cmd := exec.Command(filepath.Join(gomaDir, "goma_ctl.py"), "ensure_start") cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err := cmd.Run() if err != nil { fmt.Printf("goma failed to start: %v", err) os.Exit(1) } } func main() { runtime.GOMAXPROCS(runtime.NumCPU()) m2ncmd := false if filepath.Base(os.Args[0]) == "m2n" { m2nsetup() m2ncmd = true } flag.Parse() args := flag.Args() if m2n { generateNinja = true if !m2ncmd { m2nsetup() } if len(args) > 1 { fmt.Println("use only first argument as ONE_SHOT_MAKEFILE. ignore rest") } if len(args) > 0 { err := os.Setenv("ONE_SHOT_MAKEFILE", filepath.Join(args[0], "Android.mk")) if err != nil { fmt.Println(err) os.Exit(1) } fmt.Printf("ONE_SHOT_MAKEFILE=%s\n", os.ExpandEnv("${ONE_SHOT_MAKEFILE}")) } args = args[:0] } if goma { gomasetup() } err := katiMain(args) if err != nil { fmt.Println(err) // http://www.gnu.org/software/make/manual/html_node/Running.html os.Exit(2) } } func katiMain(args []string) error { defer glog.Flush() if cpuprofile != "" { f, err := os.Create(cpuprofile) if err != nil { return err } pprof.StartCPUProfile(f) defer pprof.StopCPUProfile() } if heapprofile != "" { defer writeHeapProfile() } defer kati.DumpStats() if memstats != "" { ms := memStatsDumper{ Template: template.Must(template.New("memstats").Parse(memstats)), } ms.dump() defer ms.dump() } if traceEventFile != "" { f, err := os.Create(traceEventFile) if err != nil { panic(err) } kati.TraceEventStart(f) defer kati.TraceEventStop() } if shellDate != "" { if shellDate == "ref" { shellDate = shellDateTimeformat[:20] // until Z, drop 07:00 } t, err := time.Parse(shellDateTimeformat, shellDate) if err != nil { panic(err) } kati.ShellDateTimestamp = t } req := kati.FromCommandLine(args) if makefileFlag != "" { req.Makefile = makefileFlag } req.EnvironmentVars = os.Environ() req.UseCache = useCache req.EagerEvalCommand = eagerCmdEvalFlag g, err := load(req) if err != nil { return err } err = save(g, req.Targets) if err != nil { return err } if generateNinja { var args []string if regenNinja { args = os.Args } n := kati.NinjaGenerator{ Args: args, Suffix: ninjaSuffix, GomaDir: gomaDir, DetectAndroidEcho: detectAndroidEcho, } return n.Save(g, "", req.Targets) } if syntaxCheckOnlyFlag { return nil } if queryFlag != "" { kati.Query(os.Stdout, queryFlag, g) return nil } execOpt := &kati.ExecutorOpt{ NumJobs: jobsFlag, } ex, err := kati.NewExecutor(execOpt) if err != nil { return err } err = ex.Exec(g, req.Targets) if err != nil { return err } return nil } command.cc0100644 0000000 0000000 00000015734 13654546140 011554 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "command.h" #include #include #include "dep.h" #include "eval.h" #include "flags.h" #include "log.h" #include "strutil.h" #include "var.h" namespace { class AutoVar : public Var { public: AutoVar() : Var(VarOrigin::AUTOMATIC) {} virtual const char* Flavor() const override { return "undefined"; } virtual void AppendVar(Evaluator*, Value*) override { CHECK(false); } virtual StringPiece String() const override { ERROR("$(value %s) is not implemented yet", sym_); return ""; } virtual string DebugString() const override { return string("AutoVar(") + sym_ + ")"; } protected: AutoVar(CommandEvaluator* ce, const char* sym) : ce_(ce), sym_(sym) {} virtual ~AutoVar() = default; CommandEvaluator* ce_; const char* sym_; }; #define DECLARE_AUTO_VAR_CLASS(name) \ class name : public AutoVar { \ public: \ name(CommandEvaluator* ce, const char* sym) : AutoVar(ce, sym) {} \ virtual ~name() = default; \ virtual void Eval(Evaluator* ev, string* s) const override; \ } DECLARE_AUTO_VAR_CLASS(AutoAtVar); DECLARE_AUTO_VAR_CLASS(AutoLessVar); DECLARE_AUTO_VAR_CLASS(AutoHatVar); DECLARE_AUTO_VAR_CLASS(AutoPlusVar); DECLARE_AUTO_VAR_CLASS(AutoStarVar); DECLARE_AUTO_VAR_CLASS(AutoNotImplementedVar); class AutoSuffixDVar : public AutoVar { public: AutoSuffixDVar(CommandEvaluator* ce, const char* sym, Var* wrapped) : AutoVar(ce, sym), wrapped_(wrapped) {} virtual ~AutoSuffixDVar() = default; virtual void Eval(Evaluator* ev, string* s) const override; private: Var* wrapped_; }; class AutoSuffixFVar : public AutoVar { public: AutoSuffixFVar(CommandEvaluator* ce, const char* sym, Var* wrapped) : AutoVar(ce, sym), wrapped_(wrapped) {} virtual ~AutoSuffixFVar() = default; virtual void Eval(Evaluator* ev, string* s) const override; private: Var* wrapped_; }; void AutoAtVar::Eval(Evaluator*, string* s) const { *s += ce_->current_dep_node()->output.str(); } void AutoLessVar::Eval(Evaluator*, string* s) const { auto& ai = ce_->current_dep_node()->actual_inputs; if (!ai.empty()) *s += ai[0].str(); } void AutoHatVar::Eval(Evaluator*, string* s) const { unordered_set seen; WordWriter ww(s); for (Symbol ai : ce_->current_dep_node()->actual_inputs) { if (seen.insert(ai.str()).second) ww.Write(ai.str()); } } void AutoPlusVar::Eval(Evaluator*, string* s) const { WordWriter ww(s); for (Symbol ai : ce_->current_dep_node()->actual_inputs) { ww.Write(ai.str()); } } void AutoStarVar::Eval(Evaluator*, string* s) const { const DepNode* n = ce_->current_dep_node(); if (!n->output_pattern.IsValid()) return; Pattern pat(n->output_pattern.str()); pat.Stem(n->output.str()).AppendToString(s); } void AutoNotImplementedVar::Eval(Evaluator* ev, string*) const { ev->Error(StringPrintf("Automatic variable `$%s' isn't supported yet", sym_)); } void AutoSuffixDVar::Eval(Evaluator* ev, string* s) const { string buf; wrapped_->Eval(ev, &buf); WordWriter ww(s); for (StringPiece tok : WordScanner(buf)) { ww.Write(Dirname(tok)); } } void AutoSuffixFVar::Eval(Evaluator* ev, string* s) const { string buf; wrapped_->Eval(ev, &buf); WordWriter ww(s); for (StringPiece tok : WordScanner(buf)) { ww.Write(Basename(tok)); } } void ParseCommandPrefixes(StringPiece* s, bool* echo, bool* ignore_error) { *s = TrimLeftSpace(*s); while (true) { char c = s->get(0); if (c == '@') { *echo = false; } else if (c == '-') { *ignore_error = true; } else if (c == '+') { // ignore recursion marker } else { break; } *s = TrimLeftSpace(s->substr(1)); } } } // namespace CommandEvaluator::CommandEvaluator(Evaluator* ev) : ev_(ev) { #define INSERT_AUTO_VAR(name, sym) \ do { \ Var* v = new name(this, sym); \ Intern(sym).SetGlobalVar(v); \ Intern(sym "D").SetGlobalVar(new AutoSuffixDVar(this, sym "D", v)); \ Intern(sym "F").SetGlobalVar(new AutoSuffixFVar(this, sym "F", v)); \ } while (0) INSERT_AUTO_VAR(AutoAtVar, "@"); INSERT_AUTO_VAR(AutoLessVar, "<"); INSERT_AUTO_VAR(AutoHatVar, "^"); INSERT_AUTO_VAR(AutoPlusVar, "+"); INSERT_AUTO_VAR(AutoStarVar, "*"); // TODO: Implement them. INSERT_AUTO_VAR(AutoNotImplementedVar, "%"); INSERT_AUTO_VAR(AutoNotImplementedVar, "?"); INSERT_AUTO_VAR(AutoNotImplementedVar, "|"); } void CommandEvaluator::Eval(DepNode* n, vector* commands) { ev_->set_loc(n->loc); ev_->set_current_scope(n->rule_vars); current_dep_node_ = n; for (Value* v : n->cmds) { const string&& cmds_buf = v->Eval(ev_); StringPiece cmds = cmds_buf; bool global_echo = !g_flags.is_silent_mode; bool global_ignore_error = false; ParseCommandPrefixes(&cmds, &global_echo, &global_ignore_error); if (cmds == "") continue; while (true) { size_t lf_cnt; size_t index = FindEndOfLine(cmds, 0, &lf_cnt); if (index == cmds.size()) index = string::npos; StringPiece cmd = TrimLeftSpace(cmds.substr(0, index)); cmds = cmds.substr(index + 1); bool echo = global_echo; bool ignore_error = global_ignore_error; ParseCommandPrefixes(&cmd, &echo, &ignore_error); if (!cmd.empty()) { Command* command = new Command(n->output); command->cmd = cmd.as_string(); command->echo = echo; command->ignore_error = ignore_error; commands->push_back(command); } if (index == string::npos) break; } continue; } if (!ev_->delayed_output_commands().empty()) { vector output_commands; for (const string& cmd : ev_->delayed_output_commands()) { Command* c = new Command(n->output); c->cmd = cmd; c->echo = false; c->ignore_error = false; output_commands.push_back(c); } // Prepend |output_commands|. commands->swap(output_commands); copy(output_commands.begin(), output_commands.end(), back_inserter(*commands)); ev_->clear_delayed_output_commands(); } ev_->set_current_scope(NULL); } command.h0100644 0000000 0000000 00000002254 13654546140 011407 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef COMMAND_H_ #define COMMAND_H_ #include #include "symtab.h" using namespace std; struct DepNode; class Evaluator; struct Command { explicit Command(Symbol o) : output(o), echo(true), ignore_error(false) {} Symbol output; string cmd; bool echo; bool ignore_error; }; class CommandEvaluator { public: explicit CommandEvaluator(Evaluator* ev); void Eval(DepNode* n, vector* commands); const DepNode* current_dep_node() const { return current_dep_node_; } private: Evaluator* ev_; DepNode* current_dep_node_; }; #endif // COMMAND_H_ dep.cc0100644 0000000 0000000 00000061275 13654546140 010707 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "dep.h" #include #include #include #include #include #include #include "eval.h" #include "fileutil.h" #include "flags.h" #include "log.h" #include "rule.h" #include "stats.h" #include "strutil.h" #include "symtab.h" #include "timeutil.h" #include "var.h" namespace { static vector* g_dep_node_pool; static Symbol ReplaceSuffix(Symbol s, Symbol newsuf) { string r; AppendString(StripExt(s.str()), &r); r += '.'; AppendString(newsuf.str(), &r); return Intern(r); } void ApplyOutputPattern(const Rule& r, Symbol output, const vector& inputs, vector* out_inputs) { if (inputs.empty()) return; if (r.is_suffix_rule) { for (Symbol input : inputs) { out_inputs->push_back(ReplaceSuffix(output, input)); } return; } if (r.output_patterns.empty()) { copy(inputs.begin(), inputs.end(), back_inserter(*out_inputs)); return; } CHECK(r.output_patterns.size() == 1); Pattern pat(r.output_patterns[0].str()); for (Symbol input : inputs) { string buf; pat.AppendSubst(output.str(), input.str(), &buf); out_inputs->push_back(Intern(buf)); } } class RuleTrie { struct Entry { Entry(const Rule* r, StringPiece s) : rule(r), suffix(s) {} const Rule* rule; StringPiece suffix; }; public: RuleTrie() {} ~RuleTrie() { for (auto& p : children_) delete p.second; } void Add(StringPiece name, const Rule* rule) { if (name.empty() || name[0] == '%') { rules_.push_back(Entry(rule, name)); return; } const char c = name[0]; auto p = children_.emplace(c, nullptr); if (p.second) { p.first->second = new RuleTrie(); } p.first->second->Add(name.substr(1), rule); } void Get(StringPiece name, vector* rules) const { for (const Entry& ent : rules_) { if ((ent.suffix.empty() && name.empty()) || HasSuffix(name, ent.suffix.substr(1))) { rules->push_back(ent.rule); } } if (name.empty()) return; auto found = children_.find(name[0]); if (found != children_.end()) { found->second->Get(name.substr(1), rules); } } size_t size() const { size_t r = rules_.size(); for (const auto& c : children_) r += c.second->size(); return r; } private: vector rules_; unordered_map children_; }; bool IsSuffixRule(Symbol output) { if (output.empty() || output.str()[0] != '.') return false; const StringPiece rest = StringPiece(output.str()).substr(1); size_t dot_index = rest.find('.'); // If there is only a single dot or the third dot, this is not a // suffix rule. if (dot_index == string::npos || rest.substr(dot_index + 1).find('.') != string::npos) { return false; } return true; } struct RuleMerger { vector rules; vector> implicit_outputs; const Rule* primary_rule; const RuleMerger* parent; Symbol parent_sym; bool is_double_colon; RuleMerger() : primary_rule(nullptr), parent(nullptr), is_double_colon(false) {} void AddImplicitOutput(Symbol output, RuleMerger* merger) { implicit_outputs.push_back(make_pair(output, merger)); } void SetImplicitOutput(Symbol output, Symbol p, const RuleMerger* merger) { if (!merger->primary_rule) { ERROR("*** implicit output `%s' on phony target `%s'", output.c_str(), p.c_str()); } if (parent) { ERROR_LOC(merger->primary_rule->cmd_loc(), "*** implicit output `%s' of `%s' was already defined by `%s' " "at %s:%d", output.c_str(), p.c_str(), parent_sym.c_str(), LOCF(parent->primary_rule->cmd_loc())); } if (primary_rule) { ERROR_LOC(primary_rule->cmd_loc(), "*** implicit output `%s' may not have commands", output.c_str()); } parent = merger; parent_sym = p; } void AddRule(Symbol output, const Rule* r) { if (rules.empty()) { is_double_colon = r->is_double_colon; } else if (is_double_colon != r->is_double_colon) { ERROR_LOC(r->loc, "*** target file `%s' has both : and :: entries.", output.c_str()); } if (primary_rule && !r->cmds.empty() && !IsSuffixRule(output) && !r->is_double_colon) { if (g_flags.werror_overriding_commands) { ERROR_LOC(r->cmd_loc(), "*** overriding commands for target `%s', previously defined " "at %s:%d", output.c_str(), LOCF(primary_rule->cmd_loc())); } else { WARN_LOC(r->cmd_loc(), "warning: overriding commands for target `%s'", output.c_str()); WARN_LOC(primary_rule->cmd_loc(), "warning: ignoring old commands for target `%s'", output.c_str()); } primary_rule = r; } if (!primary_rule && !r->cmds.empty()) { primary_rule = r; } rules.push_back(r); } void FillDepNodeFromRule(Symbol output, const Rule* r, DepNode* n) const { if (is_double_colon) copy(r->cmds.begin(), r->cmds.end(), back_inserter(n->cmds)); ApplyOutputPattern(*r, output, r->inputs, &n->actual_inputs); ApplyOutputPattern(*r, output, r->order_only_inputs, &n->actual_order_only_inputs); if (r->output_patterns.size() >= 1) { CHECK(r->output_patterns.size() == 1); n->output_pattern = r->output_patterns[0]; } } void FillDepNodeLoc(const Rule* r, DepNode* n) const { n->loc = r->loc; if (!r->cmds.empty() && r->cmd_lineno) n->loc.lineno = r->cmd_lineno; } void FillDepNode(Symbol output, const Rule* pattern_rule, DepNode* n) const { if (primary_rule) { CHECK(!pattern_rule); FillDepNodeFromRule(output, primary_rule, n); FillDepNodeLoc(primary_rule, n); n->cmds = primary_rule->cmds; } else if (pattern_rule) { FillDepNodeFromRule(output, pattern_rule, n); FillDepNodeLoc(pattern_rule, n); n->cmds = pattern_rule->cmds; } for (const Rule* r : rules) { if (r == primary_rule) continue; FillDepNodeFromRule(output, r, n); if (n->loc.filename == NULL) n->loc = r->loc; } for (auto& implicit_output : implicit_outputs) { n->implicit_outputs.push_back(implicit_output.first); for (const Rule* r : implicit_output.second->rules) { FillDepNodeFromRule(output, r, n); } } } }; } // namespace DepNode::DepNode(Symbol o, bool p, bool r) : output(o), has_rule(false), is_default_target(false), is_phony(p), is_restat(r), rule_vars(NULL), depfile_var(NULL), ninja_pool_var(NULL) { g_dep_node_pool->push_back(this); } class DepBuilder { public: DepBuilder(Evaluator* ev, const vector& rules, const unordered_map& rule_vars) : ev_(ev), rule_vars_(rule_vars), implicit_rules_(new RuleTrie()), depfile_var_name_(Intern(".KATI_DEPFILE")), implicit_outputs_var_name_(Intern(".KATI_IMPLICIT_OUTPUTS")), ninja_pool_var_name_(Intern(".KATI_NINJA_POOL")) { ScopedTimeReporter tr("make dep (populate)"); PopulateRules(rules); // TODO? // LOG_STAT("%zu variables", ev->mutable_vars()->size()); LOG_STAT("%zu explicit rules", rules_.size()); LOG_STAT("%zu implicit rules", implicit_rules_->size()); LOG_STAT("%zu suffix rules", suffix_rules_.size()); HandleSpecialTargets(); } void HandleSpecialTargets() { Loc loc; vector targets; if (GetRuleInputs(Intern(".PHONY"), &targets, &loc)) { for (Symbol t : targets) phony_.insert(t); } if (GetRuleInputs(Intern(".KATI_RESTAT"), &targets, &loc)) { for (Symbol t : targets) restat_.insert(t); } if (GetRuleInputs(Intern(".SUFFIXES"), &targets, &loc)) { if (targets.empty()) { suffix_rules_.clear(); } else { WARN_LOC(loc, "kati doesn't support .SUFFIXES with prerequisites"); } } // Note we can safely ignore .DELETE_ON_ERROR for --ninja mode. static const char* kUnsupportedBuiltinTargets[] = {".DEFAULT", ".PRECIOUS", ".INTERMEDIATE", ".SECONDARY", ".SECONDEXPANSION", ".IGNORE", ".LOW_RESOLUTION_TIME", ".SILENT", ".EXPORT_ALL_VARIABLES", ".NOTPARALLEL", ".ONESHELL", NULL}; for (const char** p = kUnsupportedBuiltinTargets; *p; p++) { if (GetRuleInputs(Intern(*p), &targets, &loc)) { WARN_LOC(loc, "kati doesn't support %s", *p); } } } ~DepBuilder() {} void Build(vector targets, vector* nodes) { if (!first_rule_.IsValid()) { ERROR("*** No targets."); } if (!g_flags.gen_all_targets && targets.empty()) { targets.push_back(first_rule_); } if (g_flags.gen_all_targets) { SymbolSet non_root_targets; for (const auto& p : rules_) { if (p.first.get(0) == '.') continue; for (const Rule* r : p.second.rules) { for (Symbol t : r->inputs) non_root_targets.insert(t); for (Symbol t : r->order_only_inputs) non_root_targets.insert(t); } } for (const auto& p : rules_) { Symbol t = p.first; if (!non_root_targets.exists(t) && t.get(0) != '.') { targets.push_back(p.first); } } } // TODO: LogStats? for (Symbol target : targets) { cur_rule_vars_.reset(new Vars); ev_->set_current_scope(cur_rule_vars_.get()); DepNode* n = BuildPlan(target, Intern("")); nodes->push_back({target, n}); ev_->set_current_scope(NULL); cur_rule_vars_.reset(NULL); } } private: bool Exists(Symbol target) { return (rules_.find(target) != rules_.end()) || phony_.exists(target) || ::Exists(target.str()); } bool GetRuleInputs(Symbol s, vector* o, Loc* l) { auto found = rules_.find(s); if (found == rules_.end()) return false; o->clear(); CHECK(!found->second.rules.empty()); *l = found->second.rules.front()->loc; for (const Rule* r : found->second.rules) { for (Symbol i : r->inputs) o->push_back(i); } return true; } void PopulateRules(const vector& rules) { for (const Rule* rule : rules) { if (rule->outputs.empty()) { PopulateImplicitRule(rule); } else { PopulateExplicitRule(rule); } } for (auto& p : suffix_rules_) { reverse(p.second.begin(), p.second.end()); } for (auto& p : rules_) { auto vars = LookupRuleVars(p.first); if (!vars) { continue; } auto var = vars->Lookup(implicit_outputs_var_name_); if (!var->IsDefined()) { continue; } string implicit_outputs; var->Eval(ev_, &implicit_outputs); for (StringPiece output : WordScanner(implicit_outputs)) { Symbol sym = Intern(TrimLeadingCurdir(output)); rules_[sym].SetImplicitOutput(sym, p.first, &p.second); p.second.AddImplicitOutput(sym, &rules_[sym]); } } } bool PopulateSuffixRule(const Rule* rule, Symbol output) { if (!IsSuffixRule(output)) return false; if (g_flags.werror_suffix_rules) { ERROR_LOC(rule->loc, "*** suffix rules are obsolete: %s", output.c_str()); } else if (g_flags.warn_suffix_rules) { WARN_LOC(rule->loc, "warning: suffix rules are deprecated: %s", output.c_str()); } const StringPiece rest = StringPiece(output.str()).substr(1); size_t dot_index = rest.find('.'); StringPiece input_suffix = rest.substr(0, dot_index); StringPiece output_suffix = rest.substr(dot_index + 1); shared_ptr r = make_shared(*rule); r->inputs.clear(); r->inputs.push_back(Intern(input_suffix)); r->is_suffix_rule = true; suffix_rules_[output_suffix].push_back(r); return true; } void PopulateExplicitRule(const Rule* rule) { for (Symbol output : rule->outputs) { if (!first_rule_.IsValid() && output.get(0) != '.') { first_rule_ = output; } rules_[output].AddRule(output, rule); PopulateSuffixRule(rule, output); } } static bool IsIgnorableImplicitRule(const Rule* rule) { // As kati doesn't have RCS/SCCS related default rules, we can // safely ignore suppression for them. if (rule->inputs.size() != 1) return false; if (!rule->order_only_inputs.empty()) return false; if (!rule->cmds.empty()) return false; const string& i = rule->inputs[0].str(); return (i == "RCS/%,v" || i == "RCS/%" || i == "%,v" || i == "s.%" || i == "SCCS/s.%"); } void PopulateImplicitRule(const Rule* rule) { for (Symbol output_pattern : rule->output_patterns) { if (output_pattern.str() != "%" || !IsIgnorableImplicitRule(rule)) { if (g_flags.werror_implicit_rules) { ERROR_LOC(rule->loc, "*** implicit rules are obsolete: %s", output_pattern.c_str()); } else if (g_flags.warn_implicit_rules) { WARN_LOC(rule->loc, "warning: implicit rules are deprecated: %s", output_pattern.c_str()); } implicit_rules_->Add(output_pattern.str(), rule); } } } const RuleMerger* LookupRuleMerger(Symbol o) { auto found = rules_.find(o); if (found != rules_.end()) { return &found->second; } return nullptr; } Vars* LookupRuleVars(Symbol o) { auto found = rule_vars_.find(o); if (found != rule_vars_.end()) return found->second; return nullptr; } bool CanPickImplicitRule(const Rule* rule, Symbol output, DepNode* n, shared_ptr* out_rule) { Symbol matched; for (Symbol output_pattern : rule->output_patterns) { Pattern pat(output_pattern.str()); if (pat.Match(output.str())) { bool ok = true; for (Symbol input : rule->inputs) { string buf; pat.AppendSubst(output.str(), input.str(), &buf); if (!Exists(Intern(buf))) { ok = false; break; } } if (ok) { matched = output_pattern; break; } } } if (!matched.IsValid()) return false; *out_rule = make_shared(*rule); if ((*out_rule)->output_patterns.size() > 1) { // We should mark all other output patterns as used. Pattern pat(matched.str()); for (Symbol output_pattern : rule->output_patterns) { if (output_pattern == matched) continue; string buf; pat.AppendSubst(output.str(), output_pattern.str(), &buf); done_[Intern(buf)] = n; } (*out_rule)->output_patterns.clear(); (*out_rule)->output_patterns.push_back(matched); } return true; } Vars* MergeImplicitRuleVars(Symbol output, Vars* vars) { auto found = rule_vars_.find(output); if (found == rule_vars_.end()) return vars; if (vars == NULL) return found->second; // TODO: leak. Vars* r = new Vars(*found->second); for (auto p : *vars) { (*r)[p.first] = p.second; } return r; } bool PickRule(Symbol output, DepNode* n, const RuleMerger** out_rule_merger, shared_ptr* pattern_rule, Vars** out_var) { const RuleMerger* rule_merger = LookupRuleMerger(output); Vars* vars = LookupRuleVars(output); *out_rule_merger = rule_merger; *out_var = vars; if (rule_merger && rule_merger->primary_rule) { for (auto implicit_output : rule_merger->implicit_outputs) { vars = MergeImplicitRuleVars(implicit_output.first, vars); } *out_var = vars; return true; } vector irules; implicit_rules_->Get(output.str(), &irules); for (auto iter = irules.rbegin(); iter != irules.rend(); ++iter) { if (!CanPickImplicitRule(*iter, output, n, pattern_rule)) continue; if (rule_merger) { return true; } CHECK((*pattern_rule)->output_patterns.size() == 1); vars = MergeImplicitRuleVars((*pattern_rule)->output_patterns[0], vars); *out_var = vars; return true; } StringPiece output_suffix = GetExt(output.str()); if (output_suffix.get(0) != '.') return rule_merger; output_suffix = output_suffix.substr(1); SuffixRuleMap::const_iterator found = suffix_rules_.find(output_suffix); if (found == suffix_rules_.end()) return rule_merger; for (const shared_ptr& irule : found->second) { CHECK(irule->inputs.size() == 1); Symbol input = ReplaceSuffix(output, irule->inputs[0]); if (!Exists(input)) continue; *pattern_rule = irule; if (rule_merger) return true; if (vars) { CHECK(irule->outputs.size() == 1); vars = MergeImplicitRuleVars(irule->outputs[0], vars); *out_var = vars; } return true; } return rule_merger; } DepNode* BuildPlan(Symbol output, Symbol needed_by UNUSED) { LOG("BuildPlan: %s for %s", output.c_str(), needed_by.c_str()); auto found = done_.find(output); if (found != done_.end()) { return found->second; } DepNode* n = new DepNode(output, phony_.exists(output), restat_.exists(output)); done_[output] = n; const RuleMerger* rule_merger = nullptr; shared_ptr pattern_rule; Vars* vars; if (!PickRule(output, n, &rule_merger, &pattern_rule, &vars)) { return n; } if (rule_merger && rule_merger->parent) { output = rule_merger->parent_sym; done_[output] = n; n->output = output; if (!PickRule(output, n, &rule_merger, &pattern_rule, &vars)) { return n; } } if (rule_merger) rule_merger->FillDepNode(output, pattern_rule.get(), n); else RuleMerger().FillDepNode(output, pattern_rule.get(), n); vector> sv; if (vars) { for (const auto& p : *vars) { Symbol name = p.first; Var* var = p.second; CHECK(var); Var* new_var = var; if (var->op() == AssignOp::PLUS_EQ) { Var* old_var = ev_->LookupVar(name); if (old_var->IsDefined()) { // TODO: This would be incorrect and has a leak. shared_ptr s = make_shared(); old_var->Eval(ev_, s.get()); if (!s->empty()) *s += ' '; new_var->Eval(ev_, s.get()); new_var = new SimpleVar(*s, old_var->Origin()); } } else if (var->op() == AssignOp::QUESTION_EQ) { Var* old_var = ev_->LookupVar(name); if (old_var->IsDefined()) { continue; } } if (name == depfile_var_name_) { n->depfile_var = new_var; } else if (name == implicit_outputs_var_name_) { } else if (name == ninja_pool_var_name_) { n->ninja_pool_var = new_var; } else { sv.emplace_back(new ScopedVar(cur_rule_vars_.get(), name, new_var)); } } } if (g_flags.warn_phony_looks_real && n->is_phony && output.str().find("/") != string::npos) { if (g_flags.werror_phony_looks_real) { ERROR_LOC( n->loc, "*** PHONY target \"%s\" looks like a real file (contains a \"/\")", output.c_str()); } else { WARN_LOC(n->loc, "warning: PHONY target \"%s\" looks like a real file " "(contains a \"/\")", output.c_str()); } } if (!g_flags.writable.empty() && !n->is_phony) { bool found = false; for (const auto& w : g_flags.writable) { if (StringPiece(output.str()).starts_with(w)) { found = true; break; } } if (!found) { if (g_flags.werror_writable) { ERROR_LOC(n->loc, "*** writing to readonly directory: \"%s\"", output.c_str()); } else { WARN_LOC(n->loc, "warning: writing to readonly directory: \"%s\"", output.c_str()); } } } for (Symbol output : n->implicit_outputs) { done_[output] = n; if (g_flags.warn_phony_looks_real && n->is_phony && output.str().find("/") != string::npos) { if (g_flags.werror_phony_looks_real) { ERROR_LOC(n->loc, "*** PHONY target \"%s\" looks like a real file (contains " "a \"/\")", output.c_str()); } else { WARN_LOC(n->loc, "warning: PHONY target \"%s\" looks like a real file " "(contains a \"/\")", output.c_str()); } } if (!g_flags.writable.empty() && !n->is_phony) { bool found = false; for (const auto& w : g_flags.writable) { if (StringPiece(output.str()).starts_with(w)) { found = true; break; } } if (!found) { if (g_flags.werror_writable) { ERROR_LOC(n->loc, "*** writing to readonly directory: \"%s\"", output.c_str()); } else { WARN_LOC(n->loc, "warning: writing to readonly directory: \"%s\"", output.c_str()); } } } } for (Symbol input : n->actual_inputs) { DepNode* c = BuildPlan(input, output); n->deps.push_back({input, c}); bool is_phony = c->is_phony; if (!is_phony && !c->has_rule && g_flags.top_level_phony) { is_phony = input.str().find("/") == string::npos; } if (!n->is_phony && is_phony) { if (g_flags.werror_real_to_phony) { ERROR_LOC(n->loc, "*** real file \"%s\" depends on PHONY target \"%s\"", output.c_str(), input.c_str()); } else if (g_flags.warn_real_to_phony) { WARN_LOC(n->loc, "warning: real file \"%s\" depends on PHONY target \"%s\"", output.c_str(), input.c_str()); } } } for (Symbol input : n->actual_order_only_inputs) { DepNode* c = BuildPlan(input, output); n->order_onlys.push_back({input, c}); } n->has_rule = true; n->is_default_target = first_rule_ == output; if (cur_rule_vars_->empty()) { n->rule_vars = NULL; } else { n->rule_vars = new Vars; for (auto p : *cur_rule_vars_) { n->rule_vars->insert(p); } } return n; } Evaluator* ev_; map rules_; const unordered_map& rule_vars_; unique_ptr cur_rule_vars_; unique_ptr implicit_rules_; typedef unordered_map>> SuffixRuleMap; SuffixRuleMap suffix_rules_; Symbol first_rule_; unordered_map done_; SymbolSet phony_; SymbolSet restat_; Symbol depfile_var_name_; Symbol implicit_outputs_var_name_; Symbol ninja_pool_var_name_; }; void MakeDep(Evaluator* ev, const vector& rules, const unordered_map& rule_vars, const vector& targets, vector* nodes) { DepBuilder db(ev, rules, rule_vars); ScopedTimeReporter tr("make dep (build)"); db.Build(targets, nodes); } void InitDepNodePool() { g_dep_node_pool = new vector; } void QuitDepNodePool() { for (DepNode* n : *g_dep_node_pool) delete n; delete g_dep_node_pool; } dep.go0100644 0000000 0000000 00000035702 13654546140 010723 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "path/filepath" "sort" "strings" "github.com/golang/glog" ) // DepNode represents a makefile rule for an output. type DepNode struct { Output string Cmds []string Deps []*DepNode OrderOnlys []*DepNode Parents []*DepNode HasRule bool IsPhony bool ActualInputs []string TargetSpecificVars Vars Filename string Lineno int } func (n *DepNode) String() string { return fmt.Sprintf("Dep{output=%s cmds=%d deps=%d orders=%d hasRule=%t phony=%t filename=%s lineno=%d}", n.Output, len(n.Cmds), len(n.Deps), len(n.OrderOnlys), n.HasRule, n.IsPhony, n.Filename, n.Lineno) } type depBuilder struct { rules map[string]*rule ruleVars map[string]Vars implicitRules *ruleTrie suffixRules map[string][]*rule firstRule *rule vars Vars ev *Evaluator vpaths searchPaths done map[string]*DepNode phony map[string]bool trace []string nodeCnt int pickExplicitRuleCnt int pickImplicitRuleCnt int pickSuffixRuleCnt int pickExplicitRuleWithoutCmdCnt int } type ruleTrieEntry struct { rule *rule suffix string } type ruleTrie struct { rules []ruleTrieEntry children map[byte]*ruleTrie } func newRuleTrie() *ruleTrie { return &ruleTrie{ children: make(map[byte]*ruleTrie), } } func (rt *ruleTrie) add(name string, r *rule) { glog.V(1).Infof("rule trie: add %q %v %s", name, r.outputPatterns[0], r) if name == "" || name[0] == '%' { glog.V(1).Infof("rule trie: add entry %q %v %s", name, r.outputPatterns[0], r) rt.rules = append(rt.rules, ruleTrieEntry{ rule: r, suffix: name, }) return } c, found := rt.children[name[0]] if !found { c = newRuleTrie() rt.children[name[0]] = c } c.add(name[1:], r) } func (rt *ruleTrie) lookup(name string) []*rule { glog.V(1).Infof("rule trie: lookup %q", name) if rt == nil { return nil } var rules []*rule for _, entry := range rt.rules { if (entry.suffix == "" && name == "") || strings.HasSuffix(name, entry.suffix[1:]) { rules = append(rules, entry.rule) } } if name == "" { return rules } rules = append(rules, rt.children[name[0]].lookup(name[1:])...) glog.V(1).Infof("rule trie: lookup %q => %v", name, rules) return rules } func (rt *ruleTrie) size() int { if rt == nil { return 0 } size := len(rt.rules) for _, c := range rt.children { size += c.size() } return size } func replaceSuffix(s string, newsuf string) string { // TODO: Factor out the logic around suffix rules and use // it from substitution references. // http://www.gnu.org/software/make/manual/make.html#Substitution-Refs return fmt.Sprintf("%s.%s", stripExt(s), newsuf) } func (db *depBuilder) exists(target string) bool { _, present := db.rules[target] if present { return true } if db.phony[target] { return true } _, ok := db.vpaths.exists(target) return ok } func (db *depBuilder) canPickImplicitRule(r *rule, output string) bool { outputPattern := r.outputPatterns[0] if !outputPattern.match(output) { return false } for _, input := range r.inputs { input = outputPattern.subst(input, output) if !db.exists(input) { return false } } return true } func (db *depBuilder) mergeImplicitRuleVars(outputs []string, vars Vars) Vars { if len(outputs) != 1 { // TODO(ukai): should return error? panic(fmt.Sprintf("FIXME: Implicit rule should have only one output but %q", outputs)) } glog.V(1).Infof("merge? %q", db.ruleVars) glog.V(1).Infof("merge? %q", outputs[0]) ivars, present := db.ruleVars[outputs[0]] if !present { return vars } if vars == nil { return ivars } glog.V(1).Info("merge!") v := make(Vars) v.Merge(ivars) v.Merge(vars) return v } func (db *depBuilder) pickRule(output string) (*rule, Vars, bool) { r, present := db.rules[output] vars := db.ruleVars[output] if present { db.pickExplicitRuleCnt++ if len(r.cmds) > 0 { return r, vars, true } // If none of the explicit rules for a target has commands, // then `make' searches for an applicable implicit rule to // find some commands. db.pickExplicitRuleWithoutCmdCnt++ } irules := db.implicitRules.lookup(output) for i := len(irules) - 1; i >= 0; i-- { irule := irules[i] if !db.canPickImplicitRule(irule, output) { glog.Infof("ignore implicit rule %q %s", output, irule) continue } glog.Infof("pick implicit rule %q => %q %s", output, irule.outputPatterns, irule) db.pickImplicitRuleCnt++ if r != nil { ir := &rule{} *ir = *r ir.outputPatterns = irule.outputPatterns // implicit rule's prerequisites will be used for $< ir.inputs = append(irule.inputs, ir.inputs...) ir.cmds = irule.cmds // TODO(ukai): filename, lineno? ir.cmdLineno = irule.cmdLineno return ir, vars, true } if vars != nil { var outputs []string for _, op := range irule.outputPatterns { outputs = append(outputs, op.String()) } vars = db.mergeImplicitRuleVars(outputs, vars) } // TODO(ukai): check len(irule.cmd) ? return irule, vars, true } outputSuffix := filepath.Ext(output) if !strings.HasPrefix(outputSuffix, ".") { return r, vars, r != nil } rules, present := db.suffixRules[outputSuffix[1:]] if !present { return r, vars, r != nil } for _, irule := range rules { if len(irule.inputs) != 1 { // TODO(ukai): should return error? panic(fmt.Sprintf("FIXME: unexpected number of input for a suffix rule (%d)", len(irule.inputs))) } if !db.exists(replaceSuffix(output, irule.inputs[0])) { continue } db.pickSuffixRuleCnt++ if r != nil { sr := &rule{} *sr = *r // TODO(ukai): input order is correct? sr.inputs = append([]string{replaceSuffix(output, irule.inputs[0])}, r.inputs...) sr.cmds = irule.cmds // TODO(ukai): filename, lineno? sr.cmdLineno = irule.cmdLineno return sr, vars, true } if vars != nil { vars = db.mergeImplicitRuleVars(irule.outputs, vars) } // TODO(ukai): check len(irule.cmd) ? return irule, vars, true } return r, vars, r != nil } func expandInputs(rule *rule, output string) []string { var inputs []string for _, input := range rule.inputs { if len(rule.outputPatterns) > 0 { if len(rule.outputPatterns) != 1 { panic(fmt.Sprintf("FIXME: multiple output pattern is not supported yet")) } input = intern(rule.outputPatterns[0].subst(input, output)) } else if rule.isSuffixRule { input = intern(replaceSuffix(output, input)) } inputs = append(inputs, input) } return inputs } func (db *depBuilder) buildPlan(output string, neededBy string, tsvs Vars) (*DepNode, error) { glog.V(1).Infof("Evaluating command: %s", output) db.nodeCnt++ if db.nodeCnt%100 == 0 { db.reportStats() } if n, present := db.done[output]; present { return n, nil } n := &DepNode{Output: output, IsPhony: db.phony[output]} db.done[output] = n // create depnode for phony targets? rule, vars, present := db.pickRule(output) if !present { return n, nil } var restores []func() if vars != nil { for name, v := range vars { // TODO: Consider not updating db.vars. tsv := v.(*targetSpecificVar) restores = append(restores, db.vars.save(name)) restores = append(restores, tsvs.save(name)) switch tsv.op { case ":=", "=": db.vars[name] = tsv tsvs[name] = v case "+=": oldVar, present := db.vars[name] if !present || oldVar.String() == "" { db.vars[name] = tsv } else { var err error v, err = oldVar.AppendVar(db.ev, tsv) if err != nil { return nil, err } db.vars[name] = v } tsvs[name] = v case "?=": if _, present := db.vars[name]; !present { db.vars[name] = tsv tsvs[name] = v } } } defer func() { for _, restore := range restores { restore() } }() } inputs := expandInputs(rule, output) glog.Infof("Evaluating command: %s inputs:%q => %q", output, rule.inputs, inputs) for _, input := range inputs { db.trace = append(db.trace, input) ni, err := db.buildPlan(input, output, tsvs) db.trace = db.trace[0 : len(db.trace)-1] if err != nil { return nil, err } if ni != nil { n.Deps = append(n.Deps, ni) ni.Parents = append(ni.Parents, n) } } for _, input := range rule.orderOnlyInputs { db.trace = append(db.trace, input) ni, err := db.buildPlan(input, output, tsvs) db.trace = db.trace[0 : len(db.trace)-1] if err != nil { return nil, err } if n != nil { n.OrderOnlys = append(n.OrderOnlys, ni) ni.Parents = append(ni.Parents, n) } } n.HasRule = true n.Cmds = rule.cmds n.ActualInputs = inputs n.TargetSpecificVars = make(Vars) for k, v := range tsvs { if glog.V(1) { glog.Infof("output=%s tsv %s=%s", output, k, v) } n.TargetSpecificVars[k] = v } n.Filename = rule.filename if len(rule.cmds) > 0 { if rule.cmdLineno > 0 { n.Lineno = rule.cmdLineno } else { n.Lineno = rule.lineno } } return n, nil } func (db *depBuilder) populateSuffixRule(r *rule, output string) bool { if len(output) == 0 || output[0] != '.' { return false } rest := output[1:] dotIndex := strings.IndexByte(rest, '.') // If there is only a single dot or the third dot, this is not a // suffix rule. if dotIndex < 0 || strings.IndexByte(rest[dotIndex+1:], '.') >= 0 { return false } // This is a suffix rule. inputSuffix := rest[:dotIndex] outputSuffix := rest[dotIndex+1:] sr := &rule{} *sr = *r sr.inputs = []string{inputSuffix} sr.isSuffixRule = true db.suffixRules[outputSuffix] = append([]*rule{sr}, db.suffixRules[outputSuffix]...) return true } func mergeRules(oldRule, r *rule, output string, isSuffixRule bool) (*rule, error) { if oldRule.isDoubleColon != r.isDoubleColon { return nil, r.errorf("*** target file %q has both : and :: entries.", output) } if len(oldRule.cmds) > 0 && len(r.cmds) > 0 && !isSuffixRule && !r.isDoubleColon { warn(r.cmdpos(), "overriding commands for target %q", output) warn(oldRule.cmdpos(), "ignoring old commands for target %q", output) } mr := &rule{} *mr = *r if r.isDoubleColon { mr.cmds = append(oldRule.cmds, mr.cmds...) } else if len(oldRule.cmds) > 0 && len(r.cmds) == 0 { mr.cmds = oldRule.cmds } // If the latter rule has a command (regardless of the // commands in oldRule), inputs in the latter rule has a // priority. if len(r.cmds) > 0 { mr.inputs = append(mr.inputs, oldRule.inputs...) mr.orderOnlyInputs = append(mr.orderOnlyInputs, oldRule.orderOnlyInputs...) } else { mr.inputs = append(oldRule.inputs, mr.inputs...) mr.orderOnlyInputs = append(oldRule.orderOnlyInputs, mr.orderOnlyInputs...) } mr.outputPatterns = append(mr.outputPatterns, oldRule.outputPatterns...) return mr, nil } // expandPattern expands static pattern (target: target-pattern: prereq-pattern). func expandPattern(r *rule) []*rule { if len(r.outputs) == 0 { return []*rule{r} } if len(r.outputPatterns) != 1 { return []*rule{r} } var rules []*rule pat := r.outputPatterns[0] for _, output := range r.outputs { nr := new(rule) *nr = *r nr.outputs = []string{output} nr.outputPatterns = nil nr.inputs = nil for _, input := range r.inputs { nr.inputs = append(nr.inputs, intern(pat.subst(input, output))) } rules = append(rules, nr) } glog.V(1).Infof("expand static pattern: outputs=%q inputs=%q -> %q", r.outputs, r.inputs, rules) return rules } func (db *depBuilder) populateExplicitRule(r *rule) error { // It seems rules with no outputs are siliently ignored. if len(r.outputs) == 0 { return nil } for _, output := range r.outputs { output = trimLeadingCurdir(output) isSuffixRule := db.populateSuffixRule(r, output) if oldRule, present := db.rules[output]; present { mr, err := mergeRules(oldRule, r, output, isSuffixRule) if err != nil { return err } db.rules[output] = mr } else { db.rules[output] = r if db.firstRule == nil && !strings.HasPrefix(output, ".") { db.firstRule = r } } } return nil } func (db *depBuilder) populateImplicitRule(r *rule) { for _, outputPattern := range r.outputPatterns { ir := &rule{} *ir = *r ir.outputPatterns = []pattern{outputPattern} db.implicitRules.add(outputPattern.String(), ir) } } func (db *depBuilder) populateRules(er *evalResult) error { for _, r := range er.rules { for i, input := range r.inputs { r.inputs[i] = trimLeadingCurdir(input) } for i, orderOnlyInput := range r.orderOnlyInputs { r.orderOnlyInputs[i] = trimLeadingCurdir(orderOnlyInput) } for _, r := range expandPattern(r) { err := db.populateExplicitRule(r) if err != nil { return err } if len(r.outputs) == 0 { db.populateImplicitRule(r) } } } return nil } func (db *depBuilder) reportStats() { if !PeriodicStatsFlag { return } logStats("node=%d explicit=%d implicit=%d suffix=%d explicitWOCmd=%d", db.nodeCnt, db.pickExplicitRuleCnt, db.pickImplicitRuleCnt, db.pickSuffixRuleCnt, db.pickExplicitRuleWithoutCmdCnt) if len(db.trace) > 1 { logStats("trace=%q", db.trace) } } func newDepBuilder(er *evalResult, vars Vars) (*depBuilder, error) { db := &depBuilder{ rules: make(map[string]*rule), ruleVars: er.ruleVars, implicitRules: newRuleTrie(), suffixRules: make(map[string][]*rule), vars: vars, ev: NewEvaluator(vars), vpaths: er.vpaths, done: make(map[string]*DepNode), phony: make(map[string]bool), } err := db.populateRules(er) if err != nil { return nil, err } rule, present := db.rules[".PHONY"] if present { for _, input := range rule.inputs { db.phony[input] = true } } return db, nil } func (db *depBuilder) Eval(targets []string) ([]*DepNode, error) { if len(targets) == 0 { if db.firstRule == nil { return nil, fmt.Errorf("*** No targets.") } targets = append(targets, db.firstRule.outputs[0]) var phonys []string for t := range db.phony { phonys = append(phonys, t) } sort.Strings(phonys) targets = append(targets, phonys...) } if StatsFlag { logStats("%d variables", len(db.vars)) logStats("%d explicit rules", len(db.rules)) logStats("%d implicit rules", db.implicitRules.size()) logStats("%d suffix rules", len(db.suffixRules)) logStats("%d dirs %d files", fsCache.dirs(), fsCache.files()) } var nodes []*DepNode for _, target := range targets { db.trace = []string{target} n, err := db.buildPlan(target, "", make(Vars)) if err != nil { return nil, err } nodes = append(nodes, n) } db.reportStats() return nodes, nil } dep.h0100644 0000000 0000000 00000003246 13654546140 010543 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef DEP_H_ #define DEP_H_ #include #include #include #include "loc.h" #include "string_piece.h" #include "symtab.h" class Evaluator; class Rule; class Value; class Var; class Vars; typedef pair NamedDepNode; struct DepNode { DepNode(Symbol output, bool is_phony, bool is_restat); string DebugString(); Symbol output; vector cmds; vector deps; vector order_onlys; vector parents; bool has_rule; bool is_default_target; bool is_phony; bool is_restat; vector implicit_outputs; vector actual_inputs; vector actual_order_only_inputs; Vars* rule_vars; Var* depfile_var; Var* ninja_pool_var; Symbol output_pattern; Loc loc; }; void InitDepNodePool(); void QuitDepNodePool(); void MakeDep(Evaluator* ev, const vector& rules, const unordered_map& rule_vars, const vector& targets, vector* nodes); #endif // DEP_H_ depgraph.go0100644 0000000 0000000 00000012236 13654546140 011742 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "crypto/sha1" "fmt" "io/ioutil" "strings" "time" "github.com/golang/glog" ) // DepGraph represents rules defined in makefiles. type DepGraph struct { nodes []*DepNode vars Vars accessedMks []*accessedMakefile exports map[string]bool vpaths searchPaths } // Nodes returns all rules. func (g *DepGraph) Nodes() []*DepNode { return g.nodes } // Vars returns all variables. func (g *DepGraph) Vars() Vars { return g.vars } func (g *DepGraph) resolveVPATH() { seen := make(map[*DepNode]bool) var fix func(n *DepNode) fix = func(n *DepNode) { if seen[n] { return } seen[n] = true glog.V(3).Infof("vpath check %s [%#v]", n.Output, g.vpaths) if output, ok := g.vpaths.exists(n.Output); ok { glog.V(2).Infof("vpath fix %s=>%s", n.Output, output) n.Output = output } for _, d := range n.Deps { fix(d) } for _, d := range n.OrderOnlys { fix(d) } for _, d := range n.Parents { fix(d) } // fix ActualInputs? } for _, n := range g.nodes { fix(n) } } // LoadReq is a request to load makefile. type LoadReq struct { Makefile string Targets []string CommandLineVars []string EnvironmentVars []string UseCache bool EagerEvalCommand bool } // FromCommandLine creates LoadReq from given command line. func FromCommandLine(cmdline []string) LoadReq { var vars []string var targets []string for _, arg := range cmdline { if strings.IndexByte(arg, '=') >= 0 { vars = append(vars, arg) continue } targets = append(targets, arg) } mk, err := defaultMakefile() if err != nil { glog.Warningf("default makefile: %v", err) } return LoadReq{ Makefile: mk, Targets: targets, CommandLineVars: vars, } } func initVars(vars Vars, kvlist []string, origin string) error { for _, v := range kvlist { kv := strings.SplitN(v, "=", 2) glog.V(1).Infof("%s var %q", origin, v) if len(kv) < 2 { return fmt.Errorf("A weird %s variable %q", origin, kv) } vars.Assign(kv[0], &recursiveVar{ expr: literal(kv[1]), origin: origin, }) } return nil } // Load loads makefile. func Load(req LoadReq) (*DepGraph, error) { startTime := time.Now() var err error if req.Makefile == "" { req.Makefile, err = defaultMakefile() if err != nil { return nil, err } } if req.UseCache { g, err := loadCache(req.Makefile, req.Targets) if err == nil { return g, nil } } bmk, err := bootstrapMakefile(req.Targets) if err != nil { return nil, err } content, err := ioutil.ReadFile(req.Makefile) if err != nil { return nil, err } mk, err := parseMakefile(content, req.Makefile) if err != nil { return nil, err } for _, stmt := range mk.stmts { stmt.show() } mk.stmts = append(bmk.stmts, mk.stmts...) vars := make(Vars) err = initVars(vars, req.EnvironmentVars, "environment") if err != nil { return nil, err } err = initVars(vars, req.CommandLineVars, "command line") if err != nil { return nil, err } er, err := eval(mk, vars, req.UseCache) if err != nil { return nil, err } vars.Merge(er.vars) logStats("eval time: %q", time.Since(startTime)) logStats("shell func time: %q %d", shellStats.Duration(), shellStats.Count()) startTime = time.Now() db, err := newDepBuilder(er, vars) if err != nil { return nil, err } logStats("dep build prepare time: %q", time.Since(startTime)) startTime = time.Now() nodes, err := db.Eval(req.Targets) if err != nil { return nil, err } logStats("dep build time: %q", time.Since(startTime)) var accessedMks []*accessedMakefile // Always put the root Makefile as the first element. accessedMks = append(accessedMks, &accessedMakefile{ Filename: req.Makefile, Hash: sha1.Sum(content), State: fileExists, }) accessedMks = append(accessedMks, er.accessedMks...) gd := &DepGraph{ nodes: nodes, vars: vars, accessedMks: accessedMks, exports: er.exports, vpaths: er.vpaths, } if req.EagerEvalCommand { startTime := time.Now() err = evalCommands(nodes, vars) if err != nil { return nil, err } logStats("eager eval command time: %q", time.Since(startTime)) } if req.UseCache { startTime := time.Now() saveCache(gd, req.Targets) logStats("serialize time: %q", time.Since(startTime)) } return gd, nil } // Loader is the interface that loads DepGraph. type Loader interface { Load(string) (*DepGraph, error) } // Saver is the interface that saves DepGraph. type Saver interface { Save(*DepGraph, string, []string) error } // LoadSaver is the interface that groups Load and Save methods. type LoadSaver interface { Loader Saver } doc.go0100644 0000000 0000000 00000001412 13654546140 010707 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* Package kati provides GNU make compatible functions, especially to speed up the continuous build of Android. */ package kati // TODO(ukai): add more doc comments. eval.cc0100644 0000000 0000000 00000037242 13654546140 011063 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "eval.h" #include #include #include #include "expr.h" #include "file.h" #include "file_cache.h" #include "fileutil.h" #include "parser.h" #include "rule.h" #include "stmt.h" #include "strutil.h" #include "symtab.h" #include "var.h" Evaluator::Evaluator() : last_rule_(NULL), current_scope_(NULL), avoid_io_(false), eval_depth_(0), posix_sym_(Intern(".POSIX")), is_posix_(false), export_error_(false) { #if defined(__APPLE__) stack_size_ = pthread_get_stacksize_np(pthread_self()); stack_addr_ = (char*)pthread_get_stackaddr_np(pthread_self()) - stack_size_; #else pthread_attr_t attr; CHECK(pthread_getattr_np(pthread_self(), &attr) == 0); CHECK(pthread_attr_getstack(&attr, &stack_addr_, &stack_size_) == 0); CHECK(pthread_attr_destroy(&attr) == 0); #endif lowest_stack_ = (char*)stack_addr_ + stack_size_; LOG_STAT("Stack size: %zd bytes", stack_size_); } Evaluator::~Evaluator() { // delete vars_; // for (auto p : rule_vars) { // delete p.second; // } } Var* Evaluator::EvalRHS(Symbol lhs, Value* rhs_v, StringPiece orig_rhs, AssignOp op, bool is_override, bool* needs_assign) { VarOrigin origin = ((is_bootstrap_ ? VarOrigin::DEFAULT : is_commandline_ ? VarOrigin::COMMAND_LINE : is_override ? VarOrigin::OVERRIDE : VarOrigin::FILE)); Var* result = NULL; Var* prev = NULL; *needs_assign = true; switch (op) { case AssignOp::COLON_EQ: { prev = PeekVarInCurrentScope(lhs); result = new SimpleVar(origin, this, rhs_v); break; } case AssignOp::EQ: prev = PeekVarInCurrentScope(lhs); result = new RecursiveVar(rhs_v, origin, orig_rhs); break; case AssignOp::PLUS_EQ: { prev = LookupVarInCurrentScope(lhs); if (!prev->IsDefined()) { result = new RecursiveVar(rhs_v, origin, orig_rhs); } else if (prev->ReadOnly()) { Error(StringPrintf("*** cannot assign to readonly variable: %s", lhs.c_str())); } else { result = prev; result->AppendVar(this, rhs_v); *needs_assign = false; } break; } case AssignOp::QUESTION_EQ: { prev = LookupVarInCurrentScope(lhs); if (!prev->IsDefined()) { result = new RecursiveVar(rhs_v, origin, orig_rhs); } else { result = prev; *needs_assign = false; } break; } } if (prev != NULL) { prev->Used(this, lhs); if (prev->Deprecated() && *needs_assign) { result->SetDeprecated(prev->DeprecatedMessage()); } } LOG("Assign: %s=%s", lhs.c_str(), result->DebugString().c_str()); return result; } void Evaluator::EvalAssign(const AssignStmt* stmt) { loc_ = stmt->loc(); last_rule_ = NULL; Symbol lhs = stmt->GetLhsSymbol(this); if (lhs.empty()) Error("*** empty variable name."); if (lhs == kKatiReadonlySym) { string rhs; stmt->rhs->Eval(this, &rhs); for (auto const& name : WordScanner(rhs)) { Var* var = Intern(name).GetGlobalVar(); if (!var->IsDefined()) { Error( StringPrintf("*** unknown variable: %s", name.as_string().c_str())); } var->SetReadOnly(); } return; } bool needs_assign; Var* var = EvalRHS(lhs, stmt->rhs, stmt->orig_rhs, stmt->op, stmt->directive == AssignDirective::OVERRIDE, &needs_assign); if (needs_assign) { bool readonly; lhs.SetGlobalVar(var, stmt->directive == AssignDirective::OVERRIDE, &readonly); if (readonly) { Error(StringPrintf("*** cannot assign to readonly variable: %s", lhs.c_str())); } } if (stmt->is_final) { var->SetReadOnly(); } } // With rule broken into // // parses into Symbol instances until encountering ':' // Returns the remainder of . static StringPiece ParseRuleTargets(const Loc& loc, const StringPiece& before_term, vector* targets, bool* is_pattern_rule) { size_t pos = before_term.find(':'); if (pos == string::npos) { ERROR_LOC(loc, "*** missing separator."); } StringPiece targets_string = before_term.substr(0, pos); size_t pattern_rule_count = 0; for (auto const& word : WordScanner(targets_string)) { StringPiece target = TrimLeadingCurdir(word); targets->push_back(Intern(target)); if (Rule::IsPatternRule(target)) { ++pattern_rule_count; } } // Check consistency: either all outputs are patterns or none. if (pattern_rule_count && (pattern_rule_count != targets->size())) { ERROR_LOC(loc, "*** mixed implicit and normal rules: deprecated syntax"); } *is_pattern_rule = pattern_rule_count; return before_term.substr(pos + 1); } void Evaluator::MarkVarsReadonly(Value* vars_list) { string vars_list_string; vars_list->Eval(this, &vars_list_string); for (auto const& name : WordScanner(vars_list_string)) { Var* var = current_scope_->Lookup(Intern(name)); if (!var->IsDefined()) { Error(StringPrintf("*** unknown variable: %s", name.as_string().c_str())); } var->SetReadOnly(); } } void Evaluator::EvalRuleSpecificAssign(const vector& targets, const RuleStmt* stmt, const StringPiece& after_targets, size_t separator_pos) { StringPiece var_name; StringPiece rhs_string; AssignOp assign_op; ParseAssignStatement(after_targets, separator_pos, &var_name, &rhs_string, &assign_op); Symbol var_sym = Intern(var_name); bool is_final = (stmt->sep == RuleStmt::SEP_FINALEQ); for (Symbol target : targets) { auto p = rule_vars_.emplace(target, nullptr); if (p.second) { p.first->second = new Vars; } Value* rhs; if (rhs_string.empty()) { rhs = stmt->rhs; } else if (stmt->rhs) { StringPiece sep(stmt->sep == RuleStmt::SEP_SEMICOLON ? " ; " : " = "); rhs = Value::NewExpr(Value::NewLiteral(rhs_string), Value::NewLiteral(sep), stmt->rhs); } else { rhs = Value::NewLiteral(rhs_string); } current_scope_ = p.first->second; if (var_sym == kKatiReadonlySym) { MarkVarsReadonly(rhs); } else { bool needs_assign; Var* rhs_var = EvalRHS(var_sym, rhs, StringPiece("*TODO*"), assign_op, false, &needs_assign); if (needs_assign) { bool readonly; rhs_var->SetAssignOp(assign_op); current_scope_->Assign(var_sym, rhs_var, &readonly); if (readonly) { Error(StringPrintf("*** cannot assign to readonly variable: %s", var_name)); } } if (is_final) { rhs_var->SetReadOnly(); } } current_scope_ = NULL; } } void Evaluator::EvalRule(const RuleStmt* stmt) { loc_ = stmt->loc(); last_rule_ = NULL; const string&& before_term = stmt->lhs->Eval(this); // See semicolon.mk. if (before_term.find_first_not_of(" \t;") == string::npos) { if (stmt->sep == RuleStmt::SEP_SEMICOLON) Error("*** missing rule before commands."); return; } vector targets; bool is_pattern_rule; StringPiece after_targets = ParseRuleTargets(loc_, before_term, &targets, &is_pattern_rule); bool is_double_colon = (after_targets[0] == ':'); if (is_double_colon) { after_targets = after_targets.substr(1); } // Figure out if this is a rule-specific variable assignment. // It is an assignment when either after_targets contains an assignment token // or separator is an assignment token, but only if there is no ';' before the // first assignment token. size_t separator_pos = after_targets.find_first_of("=;"); char separator = '\0'; if (separator_pos != string::npos) { separator = after_targets[separator_pos]; } else if (separator_pos == string::npos && (stmt->sep == RuleStmt::SEP_EQ || stmt->sep == RuleStmt::SEP_FINALEQ)) { separator_pos = after_targets.size(); separator = '='; } // If variable name is not empty, we have rule- or target-specific // variable assignment. if (separator == '=' && separator_pos) { EvalRuleSpecificAssign(targets, stmt, after_targets, separator_pos); return; } // "test: =foo" is questionable but a valid rule definition (not a // target specific variable). // See https://github.com/google/kati/issues/83 string buf; if (!separator_pos) { KATI_WARN_LOC(loc_, "defining a target which starts with `=', " "which is not probably what you meant"); buf = after_targets.as_string(); if (stmt->sep == RuleStmt::SEP_SEMICOLON) { buf += ';'; } else if (stmt->sep == RuleStmt::SEP_EQ || stmt->sep == RuleStmt::SEP_FINALEQ) { buf += '='; } if (stmt->rhs) { buf += stmt->rhs->Eval(this); } after_targets = buf; separator_pos = string::npos; } Rule* rule = new Rule(); rule->loc = loc_; rule->is_double_colon = is_double_colon; if (is_pattern_rule) { rule->output_patterns.swap(targets); } else { rule->outputs.swap(targets); } rule->ParsePrerequisites(after_targets, separator_pos, stmt); if (stmt->sep == RuleStmt::SEP_SEMICOLON) { rule->cmds.push_back(stmt->rhs); } for (Symbol o : rule->outputs) { if (o == posix_sym_) is_posix_ = true; } LOG("Rule: %s", rule->DebugString().c_str()); rules_.push_back(rule); last_rule_ = rule; } void Evaluator::EvalCommand(const CommandStmt* stmt) { loc_ = stmt->loc(); if (!last_rule_) { vector stmts; ParseNotAfterRule(stmt->orig, stmt->loc(), &stmts); for (Stmt* a : stmts) a->Eval(this); return; } last_rule_->cmds.push_back(stmt->expr); if (last_rule_->cmd_lineno == 0) last_rule_->cmd_lineno = stmt->loc().lineno; LOG("Command: %s", Value::DebugString(stmt->expr).c_str()); } void Evaluator::EvalIf(const IfStmt* stmt) { loc_ = stmt->loc(); bool is_true; switch (stmt->op) { case CondOp::IFDEF: case CondOp::IFNDEF: { string var_name; stmt->lhs->Eval(this, &var_name); Symbol lhs = Intern(TrimRightSpace(var_name)); if (lhs.str().find_first_of(" \t") != string::npos) Error("*** invalid syntax in conditional."); Var* v = LookupVarInCurrentScope(lhs); v->Used(this, lhs); is_true = (v->String().empty() == (stmt->op == CondOp::IFNDEF)); break; } case CondOp::IFEQ: case CondOp::IFNEQ: { const string&& lhs = stmt->lhs->Eval(this); const string&& rhs = stmt->rhs->Eval(this); is_true = ((lhs == rhs) == (stmt->op == CondOp::IFEQ)); break; } default: CHECK(false); abort(); } const vector* stmts; if (is_true) { stmts = &stmt->true_stmts; } else { stmts = &stmt->false_stmts; } for (Stmt* a : *stmts) { LOG("%s", a->DebugString().c_str()); a->Eval(this); } } void Evaluator::DoInclude(const string& fname) { CheckStack(); Makefile* mk = MakefileCacheManager::Get()->ReadMakefile(fname); if (!mk->Exists()) { Error(StringPrintf("%s does not exist", fname.c_str())); } Var* var_list = LookupVar(Intern("MAKEFILE_LIST")); var_list->AppendVar( this, Value::NewLiteral(Intern(TrimLeadingCurdir(fname)).str())); for (Stmt* stmt : mk->stmts()) { LOG("%s", stmt->DebugString().c_str()); stmt->Eval(this); } } void Evaluator::EvalInclude(const IncludeStmt* stmt) { loc_ = stmt->loc(); last_rule_ = NULL; const string&& pats = stmt->expr->Eval(this); for (StringPiece pat : WordScanner(pats)) { ScopedTerminator st(pat); vector* files; Glob(pat.data(), &files); if (stmt->should_exist) { if (files->empty()) { // TODO: Kati does not support building a missing include file. Error(StringPrintf("%s: %s", pat.data(), strerror(errno))); } } for (const string& fname : *files) { if (!stmt->should_exist && g_flags.ignore_optional_include_pattern && Pattern(g_flags.ignore_optional_include_pattern).Match(fname)) { continue; } DoInclude(fname); } } } void Evaluator::EvalExport(const ExportStmt* stmt) { loc_ = stmt->loc(); last_rule_ = NULL; const string&& exports = stmt->expr->Eval(this); for (StringPiece tok : WordScanner(exports)) { size_t equal_index = tok.find('='); StringPiece lhs; if (equal_index == string::npos) { lhs = tok; } else if (equal_index == 0 || (equal_index == 1 && (tok[0] == ':' || tok[0] == '?' || tok[0] == '+'))) { // Do not export tokens after an assignment. break; } else { StringPiece rhs; AssignOp op; ParseAssignStatement(tok, equal_index, &lhs, &rhs, &op); } Symbol sym = Intern(lhs); exports_[sym] = stmt->is_export; if (export_message_) { const char* prefix = ""; if (!stmt->is_export) { prefix = "un"; } if (export_error_) { Error(StringPrintf("*** %s: %sexport is obsolete%s.", sym.c_str(), prefix, export_message_->c_str())); } else { WARN_LOC(loc(), "%s: %sexport has been deprecated%s.", sym.c_str(), prefix, export_message_->c_str()); } } } } Var* Evaluator::LookupVarGlobal(Symbol name) { Var* v = name.GetGlobalVar(); if (v->IsDefined()) return v; used_undefined_vars_.insert(name); return v; } Var* Evaluator::LookupVar(Symbol name) { if (current_scope_) { Var* v = current_scope_->Lookup(name); if (v->IsDefined()) return v; } return LookupVarGlobal(name); } Var* Evaluator::PeekVar(Symbol name) { if (current_scope_) { Var* v = current_scope_->Peek(name); if (v->IsDefined()) return v; } return name.PeekGlobalVar(); } Var* Evaluator::LookupVarInCurrentScope(Symbol name) { if (current_scope_) { return current_scope_->Lookup(name); } return LookupVarGlobal(name); } Var* Evaluator::PeekVarInCurrentScope(Symbol name) { if (current_scope_) { return current_scope_->Peek(name); } return name.PeekGlobalVar(); } string Evaluator::EvalVar(Symbol name) { return LookupVar(name)->Eval(this); } string Evaluator::GetShell() { return EvalVar(kShellSym); } string Evaluator::GetShellFlag() { // TODO: Handle $(.SHELLFLAGS) return is_posix_ ? "-ec" : "-c"; } string Evaluator::GetShellAndFlag() { string shell = GetShell(); shell += ' '; shell += GetShellFlag(); return shell; } void Evaluator::Error(const string& msg) { ERROR_LOC(loc_, "%s", msg.c_str()); } void Evaluator::DumpStackStats() const { LOG_STAT("Max stack use: %zd bytes at %s:%d", ((char*)stack_addr_ - (char*)lowest_stack_) + stack_size_, LOCF(lowest_loc_)); } SymbolSet Evaluator::used_undefined_vars_; eval.go0100644 0000000 0000000 00000037647 13654546140 011114 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "crypto/sha1" "fmt" "os" "path/filepath" "strconv" "strings" "sync" "github.com/golang/glog" ) type fileState int const ( fileExists fileState = iota fileNotExists fileInconsistent // Modified during kati is running. ) type accessedMakefile struct { Filename string Hash [sha1.Size]byte State fileState } type accessCache struct { mu sync.Mutex m map[string]*accessedMakefile } func newAccessCache() *accessCache { return &accessCache{ m: make(map[string]*accessedMakefile), } } func (ac *accessCache) update(fn string, hash [sha1.Size]byte, st fileState) string { if ac == nil { return "" } ac.mu.Lock() defer ac.mu.Unlock() rm, present := ac.m[fn] if present { switch rm.State { case fileExists: if st != fileExists { return fmt.Sprintf("%s was removed after the previous read", fn) } else if !bytes.Equal(hash[:], rm.Hash[:]) { ac.m[fn].State = fileInconsistent return fmt.Sprintf("%s was modified after the previous read", fn) } return "" case fileNotExists: if st != fileNotExists { ac.m[fn].State = fileInconsistent return fmt.Sprintf("%s was created after the previous read", fn) } case fileInconsistent: return "" } return "" } ac.m[fn] = &accessedMakefile{ Filename: fn, Hash: hash, State: st, } return "" } func (ac *accessCache) Slice() []*accessedMakefile { if ac == nil { return nil } ac.mu.Lock() defer ac.mu.Unlock() r := []*accessedMakefile{} for _, v := range ac.m { r = append(r, v) } return r } type evalResult struct { vars Vars rules []*rule ruleVars map[string]Vars accessedMks []*accessedMakefile exports map[string]bool vpaths searchPaths } type srcpos struct { filename string lineno int } func (p srcpos) String() string { return fmt.Sprintf("%s:%d", p.filename, p.lineno) } // EvalError is an error in kati evaluation. type EvalError struct { Filename string Lineno int Err error } func (e EvalError) Error() string { return fmt.Sprintf("%s:%d: %v", e.Filename, e.Lineno, e.Err) } func (p srcpos) errorf(f string, args ...interface{}) error { return EvalError{ Filename: p.filename, Lineno: p.lineno, Err: fmt.Errorf(f, args...), } } func (p srcpos) error(err error) error { if _, ok := err.(EvalError); ok { return err } return EvalError{ Filename: p.filename, Lineno: p.lineno, Err: err, } } // Evaluator manages makefile evaluation. type Evaluator struct { paramVars []tmpval // $1 => paramVars[1] outVars Vars outRules []*rule outRuleVars map[string]Vars vars Vars lastRule *rule currentScope Vars cache *accessCache exports map[string]bool vpaths []vpath avoidIO bool hasIO bool // delayedOutputs are commands which should run at ninja-time // (i.e., info, warning, and error). delayedOutputs []string srcpos } // NewEvaluator creates new Evaluator. func NewEvaluator(vars map[string]Var) *Evaluator { return &Evaluator{ outVars: make(Vars), vars: vars, outRuleVars: make(map[string]Vars), exports: make(map[string]bool), } } func (ev *Evaluator) args(buf *evalBuffer, args ...Value) ([][]byte, error) { pos := make([]int, 0, len(args)) for _, arg := range args { buf.resetSep() err := arg.Eval(buf, ev) if err != nil { return nil, err } pos = append(pos, buf.Len()) } v := buf.Bytes() buf.args = buf.args[:0] s := 0 for _, p := range pos { buf.args = append(buf.args, v[s:p]) s = p } return buf.args, nil } func (ev *Evaluator) evalAssign(ast *assignAST) error { ev.lastRule = nil lhs, rhs, err := ev.evalAssignAST(ast) if err != nil { return err } if glog.V(1) { glog.Infof("ASSIGN: %s=%q (flavor:%q)", lhs, rhs, rhs.Flavor()) } if lhs == "" { return ast.errorf("*** empty variable name.") } ev.outVars.Assign(lhs, rhs) return nil } func (ev *Evaluator) evalAssignAST(ast *assignAST) (string, Var, error) { ev.srcpos = ast.srcpos var lhs string switch v := ast.lhs.(type) { case literal: lhs = string(v) case tmpval: lhs = string(v) default: buf := newEbuf() err := v.Eval(buf, ev) if err != nil { return "", nil, err } lhs = string(trimSpaceBytes(buf.Bytes())) buf.release() } rhs, err := ast.evalRHS(ev, lhs) if err != nil { return "", nil, err } return lhs, rhs, nil } func (ev *Evaluator) setTargetSpecificVar(assign *assignAST, output string) error { vars, present := ev.outRuleVars[output] if !present { vars = make(Vars) ev.outRuleVars[output] = vars } ev.currentScope = vars lhs, rhs, err := ev.evalAssignAST(assign) if err != nil { return err } if glog.V(1) { glog.Infof("rule outputs:%q assign:%q%s%q (flavor:%q)", output, lhs, assign.op, rhs, rhs.Flavor()) } vars.Assign(lhs, &targetSpecificVar{v: rhs, op: assign.op}) ev.currentScope = nil return nil } func (ev *Evaluator) evalMaybeRule(ast *maybeRuleAST) error { ev.lastRule = nil ev.srcpos = ast.srcpos if glog.V(1) { glog.Infof("maybe rule %s: %q assign:%v", ev.srcpos, ast.expr, ast.assign) } abuf := newEbuf() aexpr := toExpr(ast.expr) var rhs expr semi := ast.semi for i, v := range aexpr { var hashFound bool var buf evalBuffer buf.resetSep() switch v.(type) { case literal, tmpval: s := v.String() i := strings.Index(s, "#") if i >= 0 { hashFound = true v = tmpval(trimRightSpaceBytes([]byte(s[:i]))) } } err := v.Eval(&buf, ev) if err != nil { return err } b := buf.Bytes() if ast.isRule { abuf.Write(b) continue } eq := findLiteralChar(b, '=', 0, skipVar) if eq >= 0 { abuf.Write(b[:eq+1]) if eq+1 < len(b) { rhs = append(rhs, tmpval(trimLeftSpaceBytes(b[eq+1:]))) } if i+1 < len(aexpr) { rhs = append(rhs, aexpr[i+1:]...) } if ast.semi != nil { rhs = append(rhs, literal(';')) sexpr, _, err := parseExpr(ast.semi, nil, parseOp{}) if err != nil { return err } rhs = append(rhs, toExpr(sexpr)...) semi = nil } break } abuf.Write(b) if hashFound { break } } line := abuf.Bytes() r := &rule{srcpos: ast.srcpos} if glog.V(1) { glog.Infof("rule? %s: %q assign:%v rhs:%s", r.srcpos, line, ast.assign, rhs) } assign, err := r.parse(line, ast.assign, rhs) if err != nil { ws := newWordScanner(line) if ws.Scan() { if string(ws.Bytes()) == "override" { warnNoPrefix(ast.srcpos, "invalid `override' directive") return nil } } return ast.error(err) } abuf.release() if glog.V(1) { glog.Infof("rule %q assign:%v rhs:%v=> outputs:%q, inputs:%q", ast.expr, ast.assign, rhs, r.outputs, r.inputs) } // TODO: Pretty print. // glog.V(1).Infof("RULE: %s=%s (%d commands)", lhs, rhs, len(cmds)) if assign != nil { glog.V(1).Infof("target specific var: %#v", assign) for _, output := range r.outputs { ev.setTargetSpecificVar(assign, output) } for _, output := range r.outputPatterns { ev.setTargetSpecificVar(assign, output.String()) } return nil } if semi != nil { r.cmds = append(r.cmds, string(semi)) } if glog.V(1) { glog.Infof("rule outputs:%q cmds:%q", r.outputs, r.cmds) } ev.lastRule = r ev.outRules = append(ev.outRules, r) return nil } func (ev *Evaluator) evalCommand(ast *commandAST) error { ev.srcpos = ast.srcpos if ev.lastRule == nil || ev.lastRule.outputs == nil { // This could still be an assignment statement. See // assign_after_tab.mk. if strings.IndexByte(ast.cmd, '=') >= 0 { line := trimLeftSpace(ast.cmd) mk, err := parseMakefileString(line, ast.srcpos) if err != nil { return ast.errorf("parse failed: %q: %v", line, err) } if len(mk.stmts) >= 1 && mk.stmts[len(mk.stmts)-1].(*assignAST) != nil { for _, stmt := range mk.stmts { err = ev.eval(stmt) if err != nil { return err } } } return nil } // Or, a comment is OK. if strings.TrimSpace(ast.cmd)[0] == '#' { return nil } return ast.errorf("*** commands commence before first target.") } ev.lastRule.cmds = append(ev.lastRule.cmds, ast.cmd) if ev.lastRule.cmdLineno == 0 { ev.lastRule.cmdLineno = ast.lineno } return nil } func (ev *Evaluator) paramVar(name string) (Var, error) { idx, err := strconv.ParseInt(name, 10, 32) if err != nil { return nil, fmt.Errorf("param: %s: %v", name, err) } i := int(idx) if i < 0 || i >= len(ev.paramVars) { return nil, fmt.Errorf("param: %s out of %d", name, len(ev.paramVars)) } return &automaticVar{value: []byte(ev.paramVars[i])}, nil } // LookupVar looks up named variable. func (ev *Evaluator) LookupVar(name string) Var { if ev.currentScope != nil { v := ev.currentScope.Lookup(name) if v.IsDefined() { return v } } v := ev.outVars.Lookup(name) if v.IsDefined() { return v } v, err := ev.paramVar(name) if err == nil { return v } return ev.vars.Lookup(name) } func (ev *Evaluator) lookupVarInCurrentScope(name string) Var { if ev.currentScope != nil { v := ev.currentScope.Lookup(name) return v } v := ev.outVars.Lookup(name) if v.IsDefined() { return v } v, err := ev.paramVar(name) if err == nil { return v } return ev.vars.Lookup(name) } // EvaluateVar evaluates variable named name. // Only for a few special uses such as getting SHELL and handling // export/unexport. func (ev *Evaluator) EvaluateVar(name string) (string, error) { var buf evalBuffer buf.resetSep() err := ev.LookupVar(name).Eval(&buf, ev) if err != nil { return "", err } return buf.String(), nil } func (ev *Evaluator) evalIncludeFile(fname string, mk makefile) error { te := traceEvent.begin("include", literal(fname), traceEventMain) defer func() { traceEvent.end(te) }() var err error makefileList := ev.outVars.Lookup("MAKEFILE_LIST") makefileList, err = makefileList.Append(ev, mk.filename) if err != nil { return err } ev.outVars.Assign("MAKEFILE_LIST", makefileList) for _, stmt := range mk.stmts { err = ev.eval(stmt) if err != nil { return err } } return nil } func (ev *Evaluator) evalInclude(ast *includeAST) error { ev.lastRule = nil ev.srcpos = ast.srcpos glog.Infof("%s include %q", ev.srcpos, ast.expr) v, _, err := parseExpr([]byte(ast.expr), nil, parseOp{}) if err != nil { return ast.errorf("parse failed: %q: %v", ast.expr, err) } var buf evalBuffer buf.resetSep() err = v.Eval(&buf, ev) if err != nil { return ast.errorf("%v", err) } pats := splitSpaces(buf.String()) buf.Reset() var files []string for _, pat := range pats { if strings.Contains(pat, "*") || strings.Contains(pat, "?") { matched, err := filepath.Glob(pat) if err != nil { return ast.errorf("glob error: %s: %v", pat, err) } files = append(files, matched...) } else { files = append(files, pat) } } for _, fn := range files { fn = trimLeadingCurdir(fn) if IgnoreOptionalInclude != "" && ast.op == "-include" && matchPattern(fn, IgnoreOptionalInclude) { continue } mk, hash, err := makefileCache.parse(fn) if os.IsNotExist(err) { if ast.op == "include" { return ev.errorf("%v\nNOTE: kati does not support generating missing makefiles", err) } msg := ev.cache.update(fn, hash, fileNotExists) if msg != "" { warn(ev.srcpos, "%s", msg) } continue } msg := ev.cache.update(fn, hash, fileExists) if msg != "" { warn(ev.srcpos, "%s", msg) } err = ev.evalIncludeFile(fn, mk) if err != nil { return err } } return nil } func (ev *Evaluator) evalIf(iast *ifAST) error { var isTrue bool switch iast.op { case "ifdef", "ifndef": expr := iast.lhs buf := newEbuf() err := expr.Eval(buf, ev) if err != nil { return iast.errorf("%v\n expr:%s", err, expr) } v := ev.LookupVar(buf.String()) buf.Reset() err = v.Eval(buf, ev) if err != nil { return iast.errorf("%v\n expr:%s=>%s", err, expr, v) } value := buf.String() val := buf.Len() buf.release() isTrue = (val > 0) == (iast.op == "ifdef") if glog.V(1) { glog.Infof("%s lhs=%q value=%q => %t", iast.op, iast.lhs, value, isTrue) } case "ifeq", "ifneq": lexpr := iast.lhs rexpr := iast.rhs buf := newEbuf() params, err := ev.args(buf, lexpr, rexpr) if err != nil { return iast.errorf("%v\n (%s,%s)", err, lexpr, rexpr) } lhs := string(params[0]) rhs := string(params[1]) buf.release() isTrue = (lhs == rhs) == (iast.op == "ifeq") if glog.V(1) { glog.Infof("%s lhs=%q %q rhs=%q %q => %t", iast.op, iast.lhs, lhs, iast.rhs, rhs, isTrue) } default: return iast.errorf("unknown if statement: %q", iast.op) } var stmts []ast if isTrue { stmts = iast.trueStmts } else { stmts = iast.falseStmts } for _, stmt := range stmts { err := ev.eval(stmt) if err != nil { return err } } return nil } func (ev *Evaluator) evalExport(ast *exportAST) error { ev.lastRule = nil ev.srcpos = ast.srcpos v, _, err := parseExpr(ast.expr, nil, parseOp{}) if err != nil { return ast.errorf("failed to parse: %q: %v", string(ast.expr), err) } var buf evalBuffer buf.resetSep() err = v.Eval(&buf, ev) if err != nil { return ast.errorf("%v\n expr:%s", err, v) } if ast.hasEqual { ev.exports[string(trimSpaceBytes(buf.Bytes()))] = ast.export } else { for _, n := range splitSpacesBytes(buf.Bytes()) { ev.exports[string(n)] = ast.export } } return nil } func (ev *Evaluator) evalVpath(ast *vpathAST) error { ev.lastRule = nil ev.srcpos = ast.srcpos var ebuf evalBuffer ebuf.resetSep() err := ast.expr.Eval(&ebuf, ev) if err != nil { return ast.errorf("%v\n expr:%s", err, ast.expr) } ws := newWordScanner(ebuf.Bytes()) if !ws.Scan() { ev.vpaths = nil return nil } pat := string(ws.Bytes()) if !ws.Scan() { vpaths := ev.vpaths ev.vpaths = nil for _, v := range vpaths { if v.pattern == pat { continue } ev.vpaths = append(ev.vpaths, v) } return nil } // The search path, DIRECTORIES, is a list of directories to be // searched, separated by colons (semi-colons on MS-DOS and // MS-Windows) or blanks, just like the search path used in the // `VPATH' variable. var dirs []string for { for _, dir := range bytes.Split(ws.Bytes(), []byte{':'}) { dirs = append(dirs, string(dir)) } if !ws.Scan() { break } } ev.vpaths = append(ev.vpaths, vpath{ pattern: pat, dirs: dirs, }) return nil } func (ev *Evaluator) eval(stmt ast) error { return stmt.eval(ev) } func eval(mk makefile, vars Vars, useCache bool) (er *evalResult, err error) { ev := NewEvaluator(vars) if useCache { ev.cache = newAccessCache() } makefileList := vars.Lookup("MAKEFILE_LIST") if !makefileList.IsDefined() { makefileList = &simpleVar{value: []string{""}, origin: "file"} } makefileList, err = makefileList.Append(ev, mk.filename) if err != nil { return nil, err } ev.outVars.Assign("MAKEFILE_LIST", makefileList) for _, stmt := range mk.stmts { err = ev.eval(stmt) if err != nil { return nil, err } } vpaths := searchPaths{ vpaths: ev.vpaths, } v, found := ev.outVars["VPATH"] if found { wb := newWbuf() err := v.Eval(wb, ev) if err != nil { return nil, err } // In the 'VPATH' variable, directory names are separated // by colons or blanks. (on windows, semi-colons) for _, word := range wb.words { for _, dir := range bytes.Split(word, []byte{':'}) { vpaths.dirs = append(vpaths.dirs, string(dir)) } } } glog.Infof("vpaths: %#v", vpaths) return &evalResult{ vars: ev.outVars, rules: ev.outRules, ruleVars: ev.outRuleVars, accessedMks: ev.cache.Slice(), exports: ev.exports, vpaths: vpaths, }, nil } eval.h0100644 0000000 0000000 00000011157 13654546140 010722 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef EVAL_H_ #define EVAL_H_ #include #include #include #include #include "loc.h" #include "stmt.h" #include "string_piece.h" #include "symtab.h" using namespace std; class Makefile; class Rule; class Var; class Vars; class Evaluator { public: Evaluator(); ~Evaluator(); void EvalAssign(const AssignStmt* stmt); void EvalRule(const RuleStmt* stmt); void EvalCommand(const CommandStmt* stmt); void EvalIf(const IfStmt* stmt); void EvalInclude(const IncludeStmt* stmt); void EvalExport(const ExportStmt* stmt); Var* LookupVar(Symbol name); // For target specific variables. Var* LookupVarInCurrentScope(Symbol name); // Equivalent to LookupVar, but doesn't mark as used. Var* PeekVar(Symbol name); string EvalVar(Symbol name); const Loc& loc() const { return loc_; } void set_loc(const Loc& loc) { loc_ = loc; } const vector& rules() const { return rules_; } const unordered_map& rule_vars() const { return rule_vars_; } const unordered_map& exports() const { return exports_; } void Error(const string& msg); void set_is_bootstrap(bool b) { is_bootstrap_ = b; } void set_is_commandline(bool c) { is_commandline_ = c; } void set_current_scope(Vars* v) { current_scope_ = v; } bool avoid_io() const { return avoid_io_; } void set_avoid_io(bool a) { avoid_io_ = a; } const vector& delayed_output_commands() const { return delayed_output_commands_; } void add_delayed_output_command(const string& c) { delayed_output_commands_.push_back(c); } void clear_delayed_output_commands() { delayed_output_commands_.clear(); } static const SymbolSet& used_undefined_vars() { return used_undefined_vars_; } int eval_depth() const { return eval_depth_; } void IncrementEvalDepth() { eval_depth_++; } void DecrementEvalDepth() { eval_depth_--; } string GetShell(); string GetShellFlag(); string GetShellAndFlag(); void CheckStack() { void* addr = __builtin_frame_address(0); if (__builtin_expect(addr < lowest_stack_ && addr >= stack_addr_, 0)) { lowest_stack_ = addr; lowest_loc_ = loc_; } } void DumpStackStats() const; bool ExportDeprecated() const { return export_message_ && !export_error_; }; bool ExportObsolete() const { return export_error_; }; void SetExportDeprecated(StringPiece msg) { export_message_.reset(new string(msg.as_string())); } void SetExportObsolete(StringPiece msg) { export_message_.reset(new string(msg.as_string())); export_error_ = true; } private: Var* EvalRHS(Symbol lhs, Value* rhs, StringPiece orig_rhs, AssignOp op, bool is_override, bool* needs_assign); void DoInclude(const string& fname); Var* LookupVarGlobal(Symbol name); // Equivalent to LookupVarInCurrentScope, but doesn't mark as used. Var* PeekVarInCurrentScope(Symbol name); void MarkVarsReadonly(Value* var_list); void EvalRuleSpecificAssign(const vector& targets, const RuleStmt* stmt, const StringPiece& lhs_string, size_t separator_pos); unordered_map rule_vars_; vector rules_; unordered_map exports_; Rule* last_rule_; Vars* current_scope_; Loc loc_; bool is_bootstrap_; bool is_commandline_; bool avoid_io_; // This value tracks the nest level of make expressions. For // example, $(YYY) in $(XXX $(YYY)) is evaluated with depth==2. // This will be used to disallow $(shell) in other make constructs. int eval_depth_; // Commands which should run at ninja-time (i.e., info, warning, and // error). vector delayed_output_commands_; Symbol posix_sym_; bool is_posix_; void* stack_addr_; size_t stack_size_; void* lowest_stack_; Loc lowest_loc_; unique_ptr export_message_; bool export_error_; static SymbolSet used_undefined_vars_; }; #endif // EVAL_H_ evalcmd.go0100644 0000000 0000000 00000017456 13654546140 011574 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "os/exec" "strings" "sync" "github.com/golang/glog" ) type execContext struct { shell string mu sync.Mutex ev *Evaluator vpaths searchPaths output string inputs []string } func newExecContext(vars Vars, vpaths searchPaths, avoidIO bool) *execContext { ev := NewEvaluator(vars) ev.avoidIO = avoidIO ctx := &execContext{ ev: ev, vpaths: vpaths, } av := autoVar{ctx: ctx} for k, v := range map[string]Var{ "@": autoAtVar{autoVar: av}, "<": autoLessVar{autoVar: av}, "^": autoHatVar{autoVar: av}, "+": autoPlusVar{autoVar: av}, "*": autoStarVar{autoVar: av}, } { ev.vars[k] = v // $D = $(patsubst %/,%,$(dir $)) ev.vars[k+"D"] = suffixDVar(k) // $F = $(notdir $) ev.vars[k+"F"] = suffixFVar(k) } // TODO: We should move this to somewhere around evalCmd so that // we can handle SHELL in target specific variables. shell, err := ev.EvaluateVar("SHELL") if err != nil { shell = "/bin/sh" } ctx.shell = shell return ctx } func (ec *execContext) uniqueInputs() []string { var uniqueInputs []string seen := make(map[string]bool) for _, input := range ec.inputs { if !seen[input] { seen[input] = true uniqueInputs = append(uniqueInputs, input) } } return uniqueInputs } type autoVar struct{ ctx *execContext } func (v autoVar) Flavor() string { return "undefined" } func (v autoVar) Origin() string { return "automatic" } func (v autoVar) IsDefined() bool { return true } func (v autoVar) Append(*Evaluator, string) (Var, error) { return nil, fmt.Errorf("cannot append to autovar") } func (v autoVar) AppendVar(*Evaluator, Value) (Var, error) { return nil, fmt.Errorf("cannot append to autovar") } func (v autoVar) serialize() serializableVar { return serializableVar{Type: ""} } func (v autoVar) dump(d *dumpbuf) { d.err = fmt.Errorf("cannot dump auto var: %v", v) } type autoAtVar struct{ autoVar } func (v autoAtVar) Eval(w evalWriter, ev *Evaluator) error { fmt.Fprint(w, v.String()) return nil } func (v autoAtVar) String() string { return v.ctx.output } type autoLessVar struct{ autoVar } func (v autoLessVar) Eval(w evalWriter, ev *Evaluator) error { fmt.Fprint(w, v.String()) return nil } func (v autoLessVar) String() string { if len(v.ctx.inputs) > 0 { return v.ctx.inputs[0] } return "" } type autoHatVar struct{ autoVar } func (v autoHatVar) Eval(w evalWriter, ev *Evaluator) error { fmt.Fprint(w, v.String()) return nil } func (v autoHatVar) String() string { return strings.Join(v.ctx.uniqueInputs(), " ") } type autoPlusVar struct{ autoVar } func (v autoPlusVar) Eval(w evalWriter, ev *Evaluator) error { fmt.Fprint(w, v.String()) return nil } func (v autoPlusVar) String() string { return strings.Join(v.ctx.inputs, " ") } type autoStarVar struct{ autoVar } func (v autoStarVar) Eval(w evalWriter, ev *Evaluator) error { fmt.Fprint(w, v.String()) return nil } // TODO: Use currentStem. See auto_stem_var.mk func (v autoStarVar) String() string { return stripExt(v.ctx.output) } func suffixDVar(k string) Var { return &recursiveVar{ expr: expr{ &funcPatsubst{ fclosure: fclosure{ args: []Value{ literal("(patsubst"), literal("%/"), literal("%"), &funcDir{ fclosure: fclosure{ args: []Value{ literal("(dir"), &varref{ varname: literal(k), }, }, }, }, }, }, }, }, origin: "automatic", } } func suffixFVar(k string) Var { return &recursiveVar{ expr: expr{ &funcNotdir{ fclosure: fclosure{ args: []Value{ literal("(notdir"), &varref{varname: literal(k)}, }, }, }, }, origin: "automatic", } } // runner is a single shell command invocation. type runner struct { output string cmd string echo bool ignoreError bool shell string } func (r runner) String() string { cmd := r.cmd if !r.echo { cmd = "@" + cmd } if r.ignoreError { cmd = "-" + cmd } return cmd } func (r runner) forCmd(s string) runner { for { s = trimLeftSpace(s) if s == "" { return runner{} } switch s[0] { case '@': if !DryRunFlag { r.echo = false } s = s[1:] continue case '-': r.ignoreError = true s = s[1:] continue } break } r.cmd = s return r } func (r runner) eval(ev *Evaluator, s string) ([]runner, error) { r = r.forCmd(s) if strings.IndexByte(r.cmd, '$') < 0 { // fast path return []runner{r}, nil } // TODO(ukai): parse once more earlier? expr, _, err := parseExpr([]byte(r.cmd), nil, parseOp{}) if err != nil { return nil, ev.errorf("parse cmd %q: %v", r.cmd, err) } buf := newEbuf() err = expr.Eval(buf, ev) if err != nil { return nil, err } cmds := buf.String() buf.release() glog.V(1).Infof("evalcmd: %q => %q", r.cmd, cmds) var runners []runner for _, cmd := range strings.Split(cmds, "\n") { if len(runners) > 0 && strings.HasSuffix(runners[len(runners)-1].cmd, "\\") { runners[len(runners)-1].cmd += "\n" runners[len(runners)-1].cmd += cmd continue } runners = append(runners, r.forCmd(cmd)) } return runners, nil } func (r runner) run(output string) error { if r.echo || DryRunFlag { fmt.Printf("%s\n", r.cmd) } s := cmdline(r.cmd) glog.Infof("sh:%q", s) if DryRunFlag { return nil } args := []string{r.shell, "-c", s} cmd := exec.Cmd{ Path: args[0], Args: args, } out, err := cmd.CombinedOutput() fmt.Printf("%s", out) exit := exitStatus(err) if r.ignoreError && exit != 0 { fmt.Printf("[%s] Error %d (ignored)\n", output, exit) err = nil } return err } func createRunners(ctx *execContext, n *DepNode) ([]runner, bool, error) { var runners []runner if len(n.Cmds) == 0 { return runners, false, nil } ctx.mu.Lock() defer ctx.mu.Unlock() // For automatic variables. ctx.output = n.Output ctx.inputs = n.ActualInputs for k, v := range n.TargetSpecificVars { restore := ctx.ev.vars.save(k) defer restore() ctx.ev.vars[k] = v if glog.V(1) { glog.Infof("set tsv: %s=%s", k, v) } } ctx.ev.filename = n.Filename ctx.ev.lineno = n.Lineno glog.Infof("Building: %s cmds:%q", n.Output, n.Cmds) r := runner{ output: n.Output, echo: true, shell: ctx.shell, } for _, cmd := range n.Cmds { rr, err := r.eval(ctx.ev, cmd) if err != nil { return nil, false, err } for _, r := range rr { if len(r.cmd) != 0 { runners = append(runners, r) } } } if len(ctx.ev.delayedOutputs) > 0 { var nrunners []runner r := runner{ output: n.Output, shell: ctx.shell, } for _, o := range ctx.ev.delayedOutputs { nrunners = append(nrunners, r.forCmd(o)) } nrunners = append(nrunners, runners...) runners = nrunners ctx.ev.delayedOutputs = nil } return runners, ctx.ev.hasIO, nil } func evalCommands(nodes []*DepNode, vars Vars) error { ioCnt := 0 ectx := newExecContext(vars, searchPaths{}, true) for i, n := range nodes { runners, hasIO, err := createRunners(ectx, n) if err != nil { return err } if hasIO { ioCnt++ if ioCnt%100 == 0 { logStats("%d/%d rules have IO", ioCnt, i+1) } continue } n.Cmds = []string{} n.TargetSpecificVars = make(Vars) for _, r := range runners { n.Cmds = append(n.Cmds, r.String()) } } logStats("%d/%d rules have IO", ioCnt, len(nodes)) return nil } exec.cc0100644 0000000 0000000 00000007761 13654546140 011063 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "exec.h" #include #include #include #include #include #include #include #include "command.h" #include "dep.h" #include "eval.h" #include "expr.h" #include "fileutil.h" #include "flags.h" #include "log.h" #include "string_piece.h" #include "strutil.h" #include "symtab.h" #include "var.h" namespace { const double kNotExist = -2.0; const double kProcessing = -1.0; class Executor { public: explicit Executor(Evaluator* ev) : ce_(ev), num_commands_(0) { shell_ = ev->GetShell(); shellflag_ = ev->GetShellFlag(); } double ExecNode(DepNode* n, DepNode* needed_by) { auto found = done_.find(n->output); if (found != done_.end()) { if (found->second == kProcessing) { WARN("Circular %s <- %s dependency dropped.", needed_by ? needed_by->output.c_str() : "(null)", n->output.c_str()); } return found->second; } done_[n->output] = kProcessing; double output_ts = GetTimestamp(n->output.c_str()); LOG("ExecNode: %s for %s", n->output.c_str(), needed_by ? needed_by->output.c_str() : "(null)"); if (!n->has_rule && output_ts == kNotExist && !n->is_phony) { if (needed_by) { ERROR("*** No rule to make target '%s', needed by '%s'.", n->output.c_str(), needed_by->output.c_str()); } else { ERROR("*** No rule to make target '%s'.", n->output.c_str()); } } double latest = kProcessing; for (auto const& d : n->order_onlys) { if (Exists(d.second->output.str())) { continue; } double ts = ExecNode(d.second, n); if (latest < ts) latest = ts; } for (auto const& d : n->deps) { double ts = ExecNode(d.second, n); if (latest < ts) latest = ts; } if (output_ts >= latest && !n->is_phony) { done_[n->output] = output_ts; return output_ts; } vector commands; ce_.Eval(n, &commands); for (Command* command : commands) { num_commands_ += 1; if (command->echo) { printf("%s\n", command->cmd.c_str()); fflush(stdout); } if (!g_flags.is_dry_run) { string out; int result = RunCommand(shell_, shellflag_, command->cmd.c_str(), RedirectStderr::STDOUT, &out); printf("%s", out.c_str()); if (result != 0) { if (command->ignore_error) { fprintf(stderr, "[%s] Error %d (ignored)\n", command->output.c_str(), WEXITSTATUS(result)); } else { fprintf(stderr, "*** [%s] Error %d\n", command->output.c_str(), WEXITSTATUS(result)); exit(1); } } } delete command; } done_[n->output] = output_ts; return output_ts; } uint64_t Count() { return num_commands_; } private: CommandEvaluator ce_; unordered_map done_; string shell_; string shellflag_; uint64_t num_commands_; }; } // namespace void Exec(const vector& roots, Evaluator* ev) { unique_ptr executor(new Executor(ev)); for (auto const& root : roots) { executor->ExecNode(root.second, NULL); } if (executor->Count() == 0) { for (auto const& root : roots) { printf("kati: Nothing to be done for `%s'.\n", root.first.c_str()); } } } exec.go0100644 0000000 0000000 00000010647 13654546140 011100 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "os" "time" "github.com/golang/glog" ) // Executor manages execution of makefile rules. type Executor struct { rules map[string]*rule implicitRules []*rule suffixRules map[string][]*rule firstRule *rule // target -> Job, nil means the target is currently being processed. done map[string]*job wm *workerManager ctx *execContext trace []string buildCnt int alreadyDoneCnt int noRuleCnt int upToDateCnt int runCommandCnt int } func (ex *Executor) makeJobs(n *DepNode, neededBy *job) error { output, _ := ex.ctx.vpaths.exists(n.Output) if neededBy != nil { glog.V(1).Infof("MakeJob: %s for %s", output, neededBy.n.Output) } n.Output = output ex.buildCnt++ if ex.buildCnt%100 == 0 { ex.reportStats() } j, present := ex.done[output] if present { if j == nil { if !n.IsPhony { fmt.Printf("Circular %s <- %s dependency dropped.\n", neededBy.n.Output, n.Output) } if neededBy != nil { neededBy.numDeps-- } } else { glog.Infof("%s already done: %d", j.n.Output, j.outputTs) if neededBy != nil { ex.wm.ReportNewDep(j, neededBy) } } return nil } j = &job{ n: n, ex: ex, numDeps: len(n.Deps) + len(n.OrderOnlys), depsTs: int64(-1), } if neededBy != nil { j.parents = append(j.parents, neededBy) } ex.done[output] = nil // We iterate n.Deps twice. In the first run, we may modify // numDeps. There will be a race if we do so after the first // ex.makeJobs(d, j). var deps []*DepNode for _, d := range n.Deps { deps = append(deps, d) } for _, d := range n.OrderOnlys { if _, ok := ex.ctx.vpaths.exists(d.Output); ok { j.numDeps-- continue } deps = append(deps, d) } glog.V(1).Infof("new: %s (%d)", j.n.Output, j.numDeps) for _, d := range deps { ex.trace = append(ex.trace, d.Output) err := ex.makeJobs(d, j) ex.trace = ex.trace[0 : len(ex.trace)-1] if err != nil { return err } } ex.done[output] = j return ex.wm.PostJob(j) } func (ex *Executor) reportStats() { if !PeriodicStatsFlag { return } logStats("build=%d alreadyDone=%d noRule=%d, upToDate=%d runCommand=%d", ex.buildCnt, ex.alreadyDoneCnt, ex.noRuleCnt, ex.upToDateCnt, ex.runCommandCnt) if len(ex.trace) > 1 { logStats("trace=%q", ex.trace) } } // ExecutorOpt is an option for Executor. type ExecutorOpt struct { NumJobs int } // NewExecutor creates new Executor. func NewExecutor(opt *ExecutorOpt) (*Executor, error) { if opt == nil { opt = &ExecutorOpt{NumJobs: 1} } if opt.NumJobs < 1 { opt.NumJobs = 1 } wm, err := newWorkerManager(opt.NumJobs) if err != nil { return nil, err } ex := &Executor{ rules: make(map[string]*rule), suffixRules: make(map[string][]*rule), done: make(map[string]*job), wm: wm, } return ex, nil } // Exec executes to build targets, or first target in DepGraph. func (ex *Executor) Exec(g *DepGraph, targets []string) error { ex.ctx = newExecContext(g.vars, g.vpaths, false) // TODO: Handle target specific variables. for name, export := range g.exports { if export { v, err := ex.ctx.ev.EvaluateVar(name) if err != nil { return err } os.Setenv(name, v) } else { os.Unsetenv(name) } } startTime := time.Now() var nodes []*DepNode if len(targets) == 0 { if len(g.nodes) > 0 { nodes = append(nodes, g.nodes[0]) } } else { m := make(map[string]*DepNode) for _, n := range g.nodes { m[n.Output] = n } for _, t := range targets { n := m[t] if n != nil { nodes = append(nodes, n) } } } for _, root := range nodes { err := ex.makeJobs(root, nil) if err != nil { break } } n, err := ex.wm.Wait() logStats("exec time: %q", time.Since(startTime)) if n == 0 { for _, root := range nodes { fmt.Printf("kati: Nothing to be done for `%s'.\n", root.Output) } } return err } exec.h0100644 0000000 0000000 00000001437 13654546140 010717 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef EXEC_H_ #define EXEC_H_ #include using namespace std; #include "dep.h" class Evaluator; void Exec(const vector& roots, Evaluator* ev); #endif // EXEC_H_ expr.cc0100644 0000000 0000000 00000034055 13654546140 011111 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "expr.h" #include #include "eval.h" #include "func.h" #include "log.h" #include "stringprintf.h" #include "strutil.h" #include "var.h" Evaluable::Evaluable() {} Evaluable::~Evaluable() {} string Evaluable::Eval(Evaluator* ev) const { string s; Eval(ev, &s); return s; } Value::Value() {} Value::~Value() {} string Value::DebugString(const Value* v) { return v ? NoLineBreak(v->DebugString_()) : "(null)"; } class Literal : public Value { public: explicit Literal(StringPiece s) : s_(s) {} StringPiece val() const { return s_; } virtual void Eval(Evaluator* ev, string* s) const override { ev->CheckStack(); s->append(s_.begin(), s_.end()); } virtual bool IsLiteral() const override { return true; } virtual StringPiece GetLiteralValueUnsafe() const override { return s_; } virtual string DebugString_() const override { return s_.as_string(); } private: StringPiece s_; }; class ValueList : public Value { public: ValueList() {} ValueList(Value* v1, Value* v2, Value* v3) : ValueList() { vals_.reserve(3); vals_.push_back(v1); vals_.push_back(v2); vals_.push_back(v3); } ValueList(Value* v1, Value* v2) : ValueList() { vals_.reserve(2); vals_.push_back(v1); vals_.push_back(v2); } ValueList(vector* values) : ValueList() { values->shrink_to_fit(); values->swap(vals_); } virtual ~ValueList() { for (Value* v : vals_) { delete v; } } virtual void Eval(Evaluator* ev, string* s) const override { ev->CheckStack(); for (Value* v : vals_) { v->Eval(ev, s); } } virtual string DebugString_() const override { string r; for (Value* v : vals_) { if (r.empty()) { r += "ValueList("; } else { r += ", "; } r += DebugString(v); } if (!r.empty()) r += ")"; return r; } private: vector vals_; }; class SymRef : public Value { public: explicit SymRef(Symbol n) : name_(n) {} virtual ~SymRef() {} virtual void Eval(Evaluator* ev, string* s) const override { ev->CheckStack(); Var* v = ev->LookupVar(name_); v->Used(ev, name_); v->Eval(ev, s); } virtual string DebugString_() const override { return StringPrintf("SymRef(%s)", name_.c_str()); } private: Symbol name_; }; class VarRef : public Value { public: explicit VarRef(Value* n) : name_(n) {} virtual ~VarRef() { delete name_; } virtual void Eval(Evaluator* ev, string* s) const override { ev->CheckStack(); ev->IncrementEvalDepth(); const string&& name = name_->Eval(ev); ev->DecrementEvalDepth(); Symbol sym = Intern(name); Var* v = ev->LookupVar(sym); v->Used(ev, sym); v->Eval(ev, s); } virtual string DebugString_() const override { return StringPrintf("VarRef(%s)", Value::DebugString(name_).c_str()); } private: Value* name_; }; class VarSubst : public Value { public: explicit VarSubst(Value* n, Value* p, Value* s) : name_(n), pat_(p), subst_(s) {} virtual ~VarSubst() { delete name_; delete pat_; delete subst_; } virtual void Eval(Evaluator* ev, string* s) const override { ev->CheckStack(); ev->IncrementEvalDepth(); const string&& name = name_->Eval(ev); Symbol sym = Intern(name); Var* v = ev->LookupVar(sym); const string&& pat_str = pat_->Eval(ev); const string&& subst = subst_->Eval(ev); ev->DecrementEvalDepth(); v->Used(ev, sym); const string&& value = v->Eval(ev); WordWriter ww(s); Pattern pat(pat_str); for (StringPiece tok : WordScanner(value)) { ww.MaybeAddWhitespace(); pat.AppendSubstRef(tok, subst, s); } } virtual string DebugString_() const override { return StringPrintf("VarSubst(%s:%s=%s)", Value::DebugString(name_).c_str(), Value::DebugString(pat_).c_str(), Value::DebugString(subst_).c_str()); } private: Value* name_; Value* pat_; Value* subst_; }; class Func : public Value { public: explicit Func(FuncInfo* fi) : fi_(fi) {} ~Func() { for (Value* a : args_) delete a; } virtual void Eval(Evaluator* ev, string* s) const override { ev->CheckStack(); LOG("Invoke func %s(%s)", name(), JoinValues(args_, ",").c_str()); ev->IncrementEvalDepth(); fi_->func(args_, ev, s); ev->DecrementEvalDepth(); } virtual string DebugString_() const override { return StringPrintf("Func(%s %s)", fi_->name, JoinValues(args_, ",").c_str()); } void AddArg(Value* v) { args_.push_back(v); } const char* name() const { return fi_->name; } int arity() const { return fi_->arity; } int min_arity() const { return fi_->min_arity; } bool trim_space() const { return fi_->trim_space; } bool trim_right_space_1st() const { return fi_->trim_right_space_1st; } private: FuncInfo* fi_; vector args_; }; static char CloseParen(char c) { switch (c) { case '(': return ')'; case '{': return '}'; } return 0; } static size_t SkipSpaces(StringPiece s, const char* terms) { for (size_t i = 0; i < s.size(); i++) { char c = s[i]; if (strchr(terms, c)) return i; if (!isspace(c)) { if (c != '\\') return i; char n = s.get(i + 1); if (n != '\r' && n != '\n') return i; } } return s.size(); } Value* Value::NewExpr(Value* v1, Value* v2) { return new ValueList(v1, v2); } Value* Value::NewExpr(Value* v1, Value* v2, Value* v3) { return new ValueList(v1, v2, v3); } Value* Value::NewExpr(vector* values) { if (values->size() == 1) { Value* v = (*values)[0]; values->clear(); return v; } return new ValueList(values); } Value* Value::NewLiteral(StringPiece s) { return new Literal(s); } bool ShouldHandleComments(ParseExprOpt opt) { return opt != ParseExprOpt::DEFINE && opt != ParseExprOpt::COMMAND; } void ParseFunc(const Loc& loc, Func* f, StringPiece s, size_t i, char* terms, size_t* index_out) { terms[1] = ','; terms[2] = '\0'; i += SkipSpaces(s.substr(i), terms); if (i == s.size()) { *index_out = i; return; } int nargs = 1; while (true) { if (f->arity() && nargs >= f->arity()) { terms[1] = '\0'; // Drop ','. } if (f->trim_space()) { for (; i < s.size(); i++) { if (isspace(s[i])) continue; if (s[i] == '\\') { char c = s.get(i + 1); if (c == '\r' || c == '\n') continue; } break; } } const bool trim_right_space = (f->trim_space() || (nargs == 1 && f->trim_right_space_1st())); size_t n; Value* v = ParseExprImpl(loc, s.substr(i), terms, ParseExprOpt::FUNC, &n, trim_right_space); // TODO: concatLine??? f->AddArg(v); i += n; if (i == s.size()) { ERROR_LOC(loc, "*** unterminated call to function '%s': " "missing '%c'.", f->name(), terms[0]); } nargs++; if (s[i] == terms[0]) { i++; break; } i++; // Should be ','. if (i == s.size()) break; } if (nargs <= f->min_arity()) { ERROR_LOC(loc, "*** insufficient number of arguments (%d) to function `%s'.", nargs - 1, f->name()); } *index_out = i; return; } Value* ParseDollar(const Loc& loc, StringPiece s, size_t* index_out) { CHECK(s.size() >= 2); CHECK(s[0] == '$'); CHECK(s[1] != '$'); char cp = CloseParen(s[1]); if (cp == 0) { *index_out = 2; return new SymRef(Intern(s.substr(1, 1))); } char terms[] = {cp, ':', ' ', 0}; for (size_t i = 2;;) { size_t n; Value* vname = ParseExprImpl(loc, s.substr(i), terms, ParseExprOpt::NORMAL, &n); i += n; if (s[i] == cp) { *index_out = i + 1; if (vname->IsLiteral()) { Literal* lit = static_cast(vname); Symbol sym = Intern(lit->val()); if (g_flags.enable_kati_warnings) { size_t found = sym.str().find_first_of(" ({"); if (found != string::npos) { KATI_WARN_LOC(loc, "*warning*: variable lookup with '%c': %.*s", sym.str()[found], SPF(s)); } } Value* r = new SymRef(sym); delete lit; return r; } return new VarRef(vname); } if (s[i] == ' ' || s[i] == '\\') { // ${func ...} if (vname->IsLiteral()) { Literal* lit = static_cast(vname); if (FuncInfo* fi = GetFuncInfo(lit->val())) { delete lit; Func* func = new Func(fi); ParseFunc(loc, func, s, i + 1, terms, index_out); return func; } else { KATI_WARN_LOC(loc, "*warning*: unknown make function '%.*s': %.*s", SPF(lit->val()), SPF(s)); } } // Not a function. Drop ' ' from |terms| and parse it // again. This is inefficient, but this code path should be // rarely used. delete vname; terms[2] = 0; i = 2; continue; } if (s[i] == ':') { terms[2] = '\0'; terms[1] = '='; size_t n; Value* pat = ParseExprImpl(loc, s.substr(i + 1), terms, ParseExprOpt::NORMAL, &n); i += 1 + n; if (s[i] == cp) { *index_out = i + 1; return new VarRef(Value::NewExpr(vname, new Literal(":"), pat)); } terms[1] = '\0'; Value* subst = ParseExprImpl(loc, s.substr(i + 1), terms, ParseExprOpt::NORMAL, &n); i += 1 + n; *index_out = i + 1; return new VarSubst(vname, pat, subst); } // GNU make accepts expressions like $((). See unmatched_paren*.mk // for detail. size_t found = s.find(cp); if (found != string::npos) { KATI_WARN_LOC(loc, "*warning*: unmatched parentheses: %.*s", SPF(s)); *index_out = s.size(); return new SymRef(Intern(s.substr(2, found - 2))); } ERROR_LOC(loc, "*** unterminated variable reference."); } } Value* ParseExprImpl(const Loc& loc, StringPiece s, const char* terms, ParseExprOpt opt, size_t* index_out, bool trim_right_space) { if (s.get(s.size() - 1) == '\r') s.remove_suffix(1); size_t b = 0; char save_paren = 0; int paren_depth = 0; size_t i; vector list; for (i = 0; i < s.size(); i++) { char c = s[i]; if (terms && strchr(terms, c) && !save_paren) { break; } // Handle a comment. if (!terms && c == '#' && ShouldHandleComments(opt)) { if (i > b) list.push_back(new Literal(s.substr(b, i - b))); bool was_backslash = false; for (; i < s.size() && !(s[i] == '\n' && !was_backslash); i++) { was_backslash = !was_backslash && s[i] == '\\'; } *index_out = i; return Value::NewExpr(&list); } if (c == '$') { if (i + 1 >= s.size()) { break; } if (i > b) list.push_back(new Literal(s.substr(b, i - b))); if (s[i + 1] == '$') { list.push_back(new Literal(StringPiece("$"))); i += 1; b = i + 1; continue; } if (terms && strchr(terms, s[i + 1])) { *index_out = i + 1; return Value::NewExpr(&list); } size_t n; list.push_back(ParseDollar(loc, s.substr(i), &n)); i += n; b = i; i--; continue; } if ((c == '(' || c == '{') && opt == ParseExprOpt::FUNC) { char cp = CloseParen(c); if (terms && terms[0] == cp) { paren_depth++; save_paren = cp; terms++; } else if (cp == save_paren) { paren_depth++; } continue; } if (c == save_paren) { paren_depth--; if (paren_depth == 0) { terms--; save_paren = 0; } } if (c == '\\' && i + 1 < s.size() && opt != ParseExprOpt::COMMAND) { char n = s[i + 1]; if (n == '\\') { i++; continue; } if (n == '#' && ShouldHandleComments(opt)) { list.push_back(new Literal(s.substr(b, i - b))); i++; b = i; continue; } if (n == '\r' || n == '\n') { if (terms && strchr(terms, ' ')) { break; } if (i > b) { list.push_back(new Literal(TrimRightSpace(s.substr(b, i - b)))); } list.push_back(new Literal(StringPiece(" "))); // Skip the current escaped newline i += 2; if (n == '\r' && s.get(i) == '\n') i++; // Then continue skipping escaped newlines, spaces, and tabs for (; i < s.size(); i++) { if (s[i] == '\\' && (s.get(i + 1) == '\r' || s.get(i + 1) == '\n')) { i++; continue; } if (s[i] != ' ' && s[i] != '\t') { break; } } b = i; i--; } } } if (i > b) { StringPiece rest = s.substr(b, i - b); if (trim_right_space) rest = TrimRightSpace(rest); if (!rest.empty()) list.push_back(new Literal(rest)); } *index_out = i; return Value::NewExpr(&list); } Value* ParseExpr(const Loc& loc, StringPiece s, ParseExprOpt opt) { size_t n; return ParseExprImpl(loc, s, NULL, opt, &n); } string JoinValues(const vector& vals, const char* sep) { vector val_strs; for (Value* v : vals) { val_strs.push_back(Value::DebugString(v)); } return JoinStrings(val_strs, sep); } expr.go0100644 0000000 0000000 00000037557 13654546140 011143 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "errors" "fmt" "io" "regexp" "strconv" "strings" "github.com/golang/glog" ) var ( errEndOfInput = errors.New("unexpected end of input") errNotLiteral = errors.New("valueNum: not literal") errUnterminatedVariableReference = errors.New("*** unterminated variable reference.") ) type evalWriter interface { io.Writer writeWord([]byte) writeWordString(string) resetSep() } // Value is an interface for value. type Value interface { String() string Eval(w evalWriter, ev *Evaluator) error serialize() serializableVar dump(d *dumpbuf) } // literal is literal value. type literal string func (s literal) String() string { return string(s) } func (s literal) Eval(w evalWriter, ev *Evaluator) error { io.WriteString(w, string(s)) return nil } func (s literal) serialize() serializableVar { return serializableVar{Type: "literal", V: string(s)} } func (s literal) dump(d *dumpbuf) { d.Byte(valueTypeLiteral) d.Bytes([]byte(s)) } // tmpval is temporary value. type tmpval []byte func (t tmpval) String() string { return string(t) } func (t tmpval) Eval(w evalWriter, ev *Evaluator) error { w.Write(t) return nil } func (t tmpval) Value() []byte { return []byte(t) } func (t tmpval) serialize() serializableVar { return serializableVar{Type: "tmpval", V: string(t)} } func (t tmpval) dump(d *dumpbuf) { d.Byte(valueTypeTmpval) d.Bytes(t) } // expr is a list of values. type expr []Value func (e expr) String() string { var s []string for _, v := range e { s = append(s, v.String()) } return strings.Join(s, "") } func (e expr) Eval(w evalWriter, ev *Evaluator) error { for _, v := range e { w.resetSep() err := v.Eval(w, ev) if err != nil { return err } } return nil } func (e expr) serialize() serializableVar { r := serializableVar{Type: "expr"} for _, v := range e { r.Children = append(r.Children, v.serialize()) } return r } func (e expr) dump(d *dumpbuf) { d.Byte(valueTypeExpr) d.Int(len(e)) for _, v := range e { v.dump(d) } } func compactExpr(e expr) Value { if len(e) == 1 { return e[0] } // TODO(ukai): concat literal return e } func toExpr(v Value) expr { if v == nil { return nil } if e, ok := v.(expr); ok { return e } return expr{v} } // varref is variable reference. e.g. ${foo}. type varref struct { varname Value paren byte } func (v *varref) String() string { varname := v.varname.String() if len(varname) == 1 && v.paren == 0 { return fmt.Sprintf("$%s", varname) } paren := v.paren if paren == 0 { paren = '{' } return fmt.Sprintf("$%c%s%c", paren, varname, closeParen(paren)) } func (v *varref) Eval(w evalWriter, ev *Evaluator) error { te := traceEvent.begin("var", v, traceEventMain) buf := newEbuf() err := v.varname.Eval(buf, ev) if err != nil { return err } vv := ev.LookupVar(buf.String()) buf.release() err = vv.Eval(w, ev) if err != nil { return err } traceEvent.end(te) return nil } func (v *varref) serialize() serializableVar { return serializableVar{ Type: "varref", V: string(v.paren), Children: []serializableVar{v.varname.serialize()}, } } func (v *varref) dump(d *dumpbuf) { d.Byte(valueTypeVarref) d.Byte(v.paren) v.varname.dump(d) } // paramref is parameter reference e.g. $1. type paramref int func (p paramref) String() string { return fmt.Sprintf("$%d", int(p)) } func (p paramref) Eval(w evalWriter, ev *Evaluator) error { te := traceEvent.begin("param", p, traceEventMain) n := int(p) if n < len(ev.paramVars) { err := ev.paramVars[n].Eval(w, ev) if err != nil { return err } } else { vv := ev.LookupVar(fmt.Sprintf("%d", n)) err := vv.Eval(w, ev) if err != nil { return err } } traceEvent.end(te) return nil } func (p paramref) serialize() serializableVar { return serializableVar{Type: "paramref", V: strconv.Itoa(int(p))} } func (p paramref) dump(d *dumpbuf) { d.Byte(valueTypeParamref) d.Int(int(p)) } // varsubst is variable substitutaion. e.g. ${var:pat=subst}. type varsubst struct { varname Value pat Value subst Value paren byte } func (v varsubst) String() string { paren := v.paren if paren == 0 { paren = '{' } return fmt.Sprintf("$%c%s:%s=%s%c", paren, v.varname, v.pat, v.subst, closeParen(paren)) } func (v varsubst) Eval(w evalWriter, ev *Evaluator) error { te := traceEvent.begin("varsubst", v, traceEventMain) buf := newEbuf() params, err := ev.args(buf, v.varname, v.pat, v.subst) if err != nil { return err } vname := string(params[0]) pat := string(params[1]) subst := string(params[2]) buf.Reset() vv := ev.LookupVar(vname) err = vv.Eval(buf, ev) if err != nil { return err } vals := splitSpaces(buf.String()) buf.release() space := false for _, val := range vals { if space { io.WriteString(w, " ") } io.WriteString(w, substRef(pat, subst, val)) space = true } traceEvent.end(te) return nil } func (v varsubst) serialize() serializableVar { return serializableVar{ Type: "varsubst", V: string(v.paren), Children: []serializableVar{ v.varname.serialize(), v.pat.serialize(), v.subst.serialize(), }, } } func (v varsubst) dump(d *dumpbuf) { d.Byte(valueTypeVarsubst) d.Byte(v.paren) v.varname.dump(d) v.pat.dump(d) v.subst.dump(d) } func str(buf []byte, alloc bool) Value { if alloc { return literal(string(buf)) } return tmpval(buf) } func appendStr(exp expr, buf []byte, alloc bool) expr { if len(buf) == 0 { return exp } if len(exp) == 0 { return append(exp, str(buf, alloc)) } switch v := exp[len(exp)-1].(type) { case literal: v += literal(string(buf)) exp[len(exp)-1] = v return exp case tmpval: v = append(v, buf...) exp[len(exp)-1] = v return exp } return append(exp, str(buf, alloc)) } func valueNum(v Value) (int, error) { switch v := v.(type) { case literal, tmpval: n, err := strconv.ParseInt(v.String(), 10, 64) return int(n), err } return 0, errNotLiteral } type parseOp struct { // alloc indicates text will be allocated as literal (string) alloc bool // matchParen matches parenthesis. // note: required for func arg matchParen bool } // parseExpr parses expression in `in` until it finds any byte in term. // if term is nil, it will parse to end of input. // if term is not nil, and it reaches to end of input, return error. // it returns parsed value, and parsed length `n`, so in[n-1] is any byte of // term, and in[n:] is next input. func parseExpr(in, term []byte, op parseOp) (Value, int, error) { var exp expr b := 0 i := 0 var saveParen byte parenDepth := 0 Loop: for i < len(in) { ch := in[i] if term != nil && bytes.IndexByte(term, ch) >= 0 { break Loop } switch ch { case '$': if i+1 >= len(in) { break Loop } if in[i+1] == '$' { exp = appendStr(exp, in[b:i+1], op.alloc) i += 2 b = i continue } if bytes.IndexByte(term, in[i+1]) >= 0 { exp = appendStr(exp, in[b:i], op.alloc) exp = append(exp, &varref{varname: literal("")}) i++ b = i break Loop } exp = appendStr(exp, in[b:i], op.alloc) v, n, err := parseDollar(in[i:], op.alloc) if err != nil { return nil, 0, err } i += n b = i exp = append(exp, v) continue case '(', '{': if !op.matchParen { break } cp := closeParen(ch) if i := bytes.IndexByte(term, cp); i >= 0 { parenDepth++ saveParen = cp term[i] = 0 } else if cp == saveParen { parenDepth++ } case saveParen: if !op.matchParen { break } parenDepth-- if parenDepth == 0 { i := bytes.IndexByte(term, 0) term[i] = saveParen saveParen = 0 } } i++ } exp = appendStr(exp, in[b:i], op.alloc) if i == len(in) && term != nil { glog.Warningf("parse: unexpected end of input: %q %d [%q]", in, i, term) return exp, i, errEndOfInput } return compactExpr(exp), i, nil } func closeParen(ch byte) byte { switch ch { case '(': return ')' case '{': return '}' } return 0 } // parseDollar parses // $(func expr[, expr...]) # func = literal SP // $(expr:expr=expr) // $(expr) // $x // it returns parsed value and parsed length. func parseDollar(in []byte, alloc bool) (Value, int, error) { if len(in) <= 1 { return nil, 0, errors.New("empty expr") } if in[0] != '$' { return nil, 0, errors.New("should starts with $") } if in[1] == '$' { return nil, 0, errors.New("should handle $$ as literal $") } oparen := in[1] paren := closeParen(oparen) if paren == 0 { // $x case. if in[1] >= '0' && in[1] <= '9' { return paramref(in[1] - '0'), 2, nil } return &varref{varname: str(in[1:2], alloc)}, 2, nil } term := []byte{paren, ':', ' '} var varname expr i := 2 op := parseOp{alloc: alloc} Again: for { e, n, err := parseExpr(in[i:], term, op) if err != nil { if err == errEndOfInput { // unmatched_paren2.mk varname = append(varname, toExpr(e)...) if len(varname) > 0 { for i, vn := range varname { if vr, ok := vn.(*varref); ok { if vr.paren == oparen { varname = varname[:i+1] varname[i] = expr{literal(fmt.Sprintf("$%c", oparen)), vr.varname} return &varref{varname: varname, paren: oparen}, i + 1 + n + 1, nil } } } } return nil, 0, errUnterminatedVariableReference } return nil, 0, err } varname = append(varname, toExpr(e)...) i += n switch in[i] { case paren: // ${expr} vname := compactExpr(varname) n, err := valueNum(vname) if err == nil { // ${n} return paramref(n), i + 1, nil } return &varref{varname: vname, paren: oparen}, i + 1, nil case ' ': // ${e ...} switch token := e.(type) { case literal, tmpval: funcName := intern(token.String()) if f, ok := funcMap[funcName]; ok { return parseFunc(f(), in, i+1, term[:1], funcName, op.alloc) } } term = term[:2] // drop ' ' continue Again case ':': // ${varname:...} colon := in[i : i+1] var vterm []byte vterm = append(vterm, term[:2]...) vterm[1] = '=' // term={paren, '='}. e, n, err := parseExpr(in[i+1:], vterm, op) if err != nil { return nil, 0, err } i += 1 + n if in[i] == paren { varname = appendStr(varname, colon, op.alloc) return &varref{varname: varname, paren: oparen}, i + 1, nil } // ${varname:xx=...} pat := e subst, n, err := parseExpr(in[i+1:], term[:1], op) if err != nil { return nil, 0, err } i += 1 + n // ${first:pat=e} return varsubst{ varname: compactExpr(varname), pat: pat, subst: subst, paren: oparen, }, i + 1, nil default: return nil, 0, fmt.Errorf("unexpected char %c at %d in %q", in[i], i, string(in)) } } } // skipSpaces skips spaces at front of `in` before any bytes in term. // in[n] will be the first non white space in in. func skipSpaces(in, term []byte) int { for i := 0; i < len(in); i++ { if bytes.IndexByte(term, in[i]) >= 0 { return i } switch in[i] { case ' ', '\t': default: return i } } return len(in) } // trimLiteralSpace trims literal space around v. func trimLiteralSpace(v Value) Value { switch v := v.(type) { case literal: return literal(strings.TrimSpace(string(v))) case tmpval: b := bytes.TrimSpace([]byte(v)) if len(b) == 0 { return literal("") } return tmpval(b) case expr: if len(v) == 0 { return v } switch s := v[0].(type) { case literal, tmpval: t := trimLiteralSpace(s) if t == literal("") { v = v[1:] } else { v[0] = t } } switch s := v[len(v)-1].(type) { case literal, tmpval: t := trimLiteralSpace(s) if t == literal("") { v = v[:len(v)-1] } else { v[len(v)-1] = t } } return compactExpr(v) } return v } // concatLine concatinates line with "\\\n" in function expression. // TODO(ukai): less alloc? func concatLine(v Value) Value { switch v := v.(type) { case literal: for { s := string(v) i := strings.Index(s, "\\\n") if i < 0 { return v } v = literal(s[:i] + strings.TrimLeft(s[i+2:], " \t")) } case tmpval: for { b := []byte(v) i := bytes.Index(b, []byte{'\\', '\n'}) if i < 0 { return v } var buf bytes.Buffer buf.Write(b[:i]) buf.Write(bytes.TrimLeft(b[i+2:], " \t")) v = tmpval(buf.Bytes()) } case expr: for i := range v { switch vv := v[i].(type) { case literal, tmpval: v[i] = concatLine(vv) } } return v } return v } // parseFunc parses function arguments from in[s:] for f. // in[0] is '$' and in[s] is space just after func name. // in[:n] will be "${func args...}" func parseFunc(f mkFunc, in []byte, s int, term []byte, funcName string, alloc bool) (Value, int, error) { f.AddArg(str(in[1:s-1], alloc)) arity := f.Arity() term = append(term, ',') i := skipSpaces(in[s:], term) i = s + i if i == len(in) { return f, i, nil } narg := 1 op := parseOp{alloc: alloc, matchParen: true} for { if arity != 0 && narg >= arity { // final arguments. term = term[:1] // drop ',' } v, n, err := parseExpr(in[i:], term, op) if err != nil { if err == errEndOfInput { return nil, 0, fmt.Errorf("*** unterminated call to function `%s': missing `)'.", funcName) } return nil, 0, err } v = concatLine(v) // TODO(ukai): do this in funcIf, funcAnd, or funcOr's compactor? if (narg == 1 && funcName == "if") || funcName == "and" || funcName == "or" { v = trimLiteralSpace(v) } f.AddArg(v) i += n narg++ if in[i] == term[0] { i++ break } i++ // should be ',' if i == len(in) { break } } var fv Value fv = f if compactor, ok := f.(compactor); ok { fv = compactor.Compact() } if EvalStatsFlag || traceEvent.enabled() { fv = funcstats{ Value: fv, str: fv.String(), } } return fv, i, nil } type compactor interface { Compact() Value } type funcstats struct { Value str string } func (f funcstats) Eval(w evalWriter, ev *Evaluator) error { te := traceEvent.begin("func", literal(f.str), traceEventMain) err := f.Value.Eval(w, ev) if err != nil { return err } // TODO(ukai): per functype? traceEvent.end(te) return nil } type matcherValue struct{} func (m matcherValue) Eval(w evalWriter, ev *Evaluator) error { return fmt.Errorf("couldn't eval matcher") } func (m matcherValue) serialize() serializableVar { return serializableVar{Type: ""} } func (m matcherValue) dump(d *dumpbuf) { d.err = fmt.Errorf("couldn't dump matcher") } type matchVarref struct{ matcherValue } func (m matchVarref) String() string { return "$(match-any)" } type literalRE struct { matcherValue *regexp.Regexp } func mustLiteralRE(s string) literalRE { return literalRE{ Regexp: regexp.MustCompile(s), } } func (r literalRE) String() string { return r.Regexp.String() } func matchValue(exp, pat Value) bool { switch pat := pat.(type) { case literal: return literal(exp.String()) == pat } // TODO: other type match? return false } func matchExpr(exp, pat expr) ([]Value, bool) { if len(exp) != len(pat) { return nil, false } var mv matchVarref var matches []Value for i := range exp { if pat[i] == mv { switch exp[i].(type) { case paramref, *varref: matches = append(matches, exp[i]) continue } return nil, false } if patre, ok := pat[i].(literalRE); ok { re := patre.Regexp m := re.FindStringSubmatch(exp[i].String()) if m == nil { return nil, false } for _, sm := range m[1:] { matches = append(matches, literal(sm)) } continue } if !matchValue(exp[i], pat[i]) { return nil, false } } return matches, true } expr.h0100644 0000000 0000000 00000004016 13654546140 010745 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef EXPR_H_ #define EXPR_H_ #include #include #include "string_piece.h" using namespace std; class Evaluator; struct Loc; class Evaluable { public: virtual void Eval(Evaluator* ev, string* s) const = 0; string Eval(Evaluator*) const; protected: Evaluable(); virtual ~Evaluable(); }; class Value : public Evaluable { public: // All NewExpr calls take ownership of the Value instances. static Value* NewExpr(Value* v1, Value* v2); static Value* NewExpr(Value* v1, Value* v2, Value* v3); static Value* NewExpr(vector* values); static Value* NewLiteral(StringPiece s); virtual ~Value(); virtual bool IsLiteral() const { return false; } // Only safe after IsLiteral() returns true. virtual StringPiece GetLiteralValueUnsafe() const { return ""; } static string DebugString(const Value*); protected: Value(); virtual string DebugString_() const = 0; }; enum struct ParseExprOpt { NORMAL = 0, DEFINE, COMMAND, FUNC, }; Value* ParseExprImpl(const Loc& loc, StringPiece s, const char* terms, ParseExprOpt opt, size_t* index_out, bool trim_right_space = false); Value* ParseExpr(const Loc& loc, StringPiece s, ParseExprOpt opt = ParseExprOpt::NORMAL); string JoinValues(const vector& vals, const char* sep); #endif // EXPR_H_ expr_test.go0100644 0000000 0000000 00000012634 13654546140 012167 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "reflect" "testing" ) func TestParseExpr(t *testing.T) { for _, tc := range []struct { in string val Value isErr bool }{ { in: "foo", val: literal("foo"), }, { in: "(foo)", val: literal("(foo)"), }, { in: "{foo}", val: literal("{foo}"), }, { in: "$$", val: literal("$"), }, { in: "foo$$bar", val: literal("foo$bar"), }, { in: "$foo", val: expr{&varref{varname: literal("f")}, literal("oo")}, }, { in: "$(foo)", val: &varref{varname: literal("foo"), paren: '('}, }, { in: "$(foo:.c=.o)", val: varsubst{ varname: literal("foo"), pat: literal(".c"), subst: literal(".o"), paren: '(', }, }, { in: "$(subst $(space),$(,),$(foo))/bar", val: expr{ &funcSubst{ fclosure: fclosure{ args: []Value{ literal("(subst"), &varref{ varname: literal("space"), paren: '(', }, &varref{ varname: literal(","), paren: '(', }, &varref{ varname: literal("foo"), paren: '(', }, }, }, }, literal("/bar"), }, }, { in: "$(subst $(space),$,,$(foo))", val: &funcSubst{ fclosure: fclosure{ args: []Value{ literal("(subst"), &varref{ varname: literal("space"), paren: '(', }, &varref{ varname: literal(""), }, expr{ literal(","), &varref{ varname: literal("foo"), paren: '(', }, }, }, }, }, }, { in: `$(shell echo '()')`, val: &funcShell{ fclosure: fclosure{ args: []Value{ literal("(shell"), literal("echo '()'"), }, }, }, }, { in: `${shell echo '()'}`, val: &funcShell{ fclosure: fclosure{ args: []Value{ literal("{shell"), literal("echo '()'"), }, }, }, }, { in: `$(shell echo ')')`, val: expr{ &funcShell{ fclosure: fclosure{ args: []Value{ literal("(shell"), literal("echo '"), }, }, }, literal("')"), }, }, { in: `${shell echo ')'}`, val: &funcShell{ fclosure: fclosure{ args: []Value{ literal("{shell"), literal("echo ')'"), }, }, }, }, { in: `${shell echo '}'}`, val: expr{ &funcShell{ fclosure: fclosure{ args: []Value{ literal("{shell"), literal("echo '"), }, }, }, literal("'}"), }, }, { in: `$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]')`, val: &funcShell{ fclosure: fclosure{ args: []Value{ literal("(shell"), literal(`make --version | ruby -n0e 'puts $_[/Make (\d)/,1]'`), }, }, }, }, { in: `$(and ${TRUE}, $(X) )`, val: &funcAnd{ fclosure: fclosure{ args: []Value{ literal("(and"), &varref{ varname: literal("TRUE"), paren: '{', }, &varref{ varname: literal("X"), paren: '(', }, }, }, }, }, { in: `$(call func, \ foo)`, val: &funcCall{ fclosure: fclosure{ args: []Value{ literal("(call"), literal("func"), literal(" foo"), }, }, }, }, { in: `$(call func, \)`, val: &funcCall{ fclosure: fclosure{ args: []Value{ literal("(call"), literal("func"), literal(` \`), }, }, }, }, { in: `$(eval ## comment)`, val: &funcNop{ expr: `$(eval ## comment)`, }, }, { in: `$(eval foo = bar)`, val: &funcEvalAssign{ lhs: "foo", op: "=", rhs: literal("bar"), }, }, { in: `$(eval foo :=)`, val: &funcEvalAssign{ lhs: "foo", op: ":=", rhs: literal(""), }, }, { in: `$(eval foo := $(bar))`, val: &funcEvalAssign{ lhs: "foo", op: ":=", rhs: &varref{ varname: literal("bar"), paren: '(', }, }, }, { in: `$(eval foo := $$(bar))`, val: &funcEvalAssign{ lhs: "foo", op: ":=", rhs: literal("$(bar)"), }, }, { in: `$(strip $1)`, val: &funcStrip{ fclosure: fclosure{ args: []Value{ literal("(strip"), paramref(1), }, }, }, }, { in: `$(strip $(1))`, val: &funcStrip{ fclosure: fclosure{ args: []Value{ literal("(strip"), paramref(1), }, }, }, }, } { val, _, err := parseExpr([]byte(tc.in), nil, parseOp{alloc: true}) if tc.isErr { if err == nil { t.Errorf(`parseExpr(%q)=_, _, nil; want error`, tc.in) } continue } if err != nil { t.Errorf(`parseExpr(%q)=_, _, %v; want nil error`, tc.in, err) continue } if got, want := val, tc.val; !reflect.DeepEqual(got, want) { t.Errorf("parseExpr(%[1]q)=%[2]q %#[2]v, _, _;\n want %[3]q %#[3]v, _, _", tc.in, got, want) } } } file.cc0100644 0000000 0000000 00000003052 13654546140 011043 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "file.h" #include #include #include #include #include "fileutil.h" #include "log.h" #include "parser.h" #include "stmt.h" Makefile::Makefile(const string& filename) : mtime_(0), filename_(filename), exists_(false) { int fd = open(filename.c_str(), O_RDONLY); if (fd < 0) { return; } struct stat st; if (fstat(fd, &st) < 0) { PERROR("fstat failed for %s", filename.c_str()); } size_t len = st.st_size; mtime_ = st.st_mtime; buf_.resize(len); exists_ = true; ssize_t r = HANDLE_EINTR(read(fd, &buf_[0], len)); if (r != static_cast(len)) { if (r < 0) PERROR("read failed for %s", filename.c_str()); ERROR("Unexpected read length=%zd expected=%zu", r, len); } if (close(fd) < 0) { PERROR("close failed for %s", filename.c_str()); } Parse(this); } Makefile::~Makefile() { for (Stmt* stmt : stmts_) delete stmt; } file.h0100644 0000000 0000000 00000002265 13654546140 010712 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef FILE_H_ #define FILE_H_ #include #include #include using namespace std; struct Stmt; class Makefile { public: explicit Makefile(const string& filename); ~Makefile(); const string& buf() const { return buf_; } const string& filename() const { return filename_; } const vector& stmts() const { return stmts_; } vector* mutable_stmts() { return &stmts_; } bool Exists() const { return exists_; } private: string buf_; uint64_t mtime_; string filename_; vector stmts_; bool exists_; }; #endif // FILE_H_ file_cache.cc0100644 0000000 0000000 00000003274 13654546140 012174 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "file_cache.h" #include #include "file.h" static MakefileCacheManager* g_instance; MakefileCacheManager::MakefileCacheManager() {} MakefileCacheManager::~MakefileCacheManager() {} MakefileCacheManager* MakefileCacheManager::Get() { return g_instance; } class MakefileCacheManagerImpl : public MakefileCacheManager { public: MakefileCacheManagerImpl() { g_instance = this; } virtual ~MakefileCacheManagerImpl() { for (auto p : cache_) { delete p.second; } } virtual Makefile* ReadMakefile(const string& filename) override { Makefile* result = NULL; auto p = cache_.emplace(filename, result); if (p.second) { p.first->second = result = new Makefile(filename); } else { result = p.first->second; } return result; } virtual void GetAllFilenames(unordered_set* out) override { for (const auto& p : cache_) out->insert(p.first); } private: unordered_map cache_; }; MakefileCacheManager* NewMakefileCacheManager() { return new MakefileCacheManagerImpl(); } file_cache.h0100644 0000000 0000000 00000002110 13654546140 012022 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef FILE_CACHE_H_ #define FILE_CACHE_H_ #include #include using namespace std; class Makefile; class MakefileCacheManager { public: virtual ~MakefileCacheManager(); virtual Makefile* ReadMakefile(const string& filename) = 0; virtual void GetAllFilenames(unordered_set* out) = 0; static MakefileCacheManager* Get(); protected: MakefileCacheManager(); }; MakefileCacheManager* NewMakefileCacheManager(); #endif // FILE_CACHE_H_ fileutil.cc0100644 0000000 0000000 00000011664 13654546140 011751 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "fileutil.h" #include #include #include #include #include #include #include #include #include #if defined(__APPLE__) #include #endif #include #include "log.h" #include "strutil.h" bool Exists(StringPiece filename) { CHECK(filename.size() < PATH_MAX); struct stat st; if (stat(filename.as_string().c_str(), &st) < 0) { return false; } return true; } double GetTimestampFromStat(const struct stat& st) { #if defined(__linux__) return st.st_mtime + st.st_mtim.tv_nsec * 0.001 * 0.001 * 0.001; #else return st.st_mtime; #endif } double GetTimestamp(StringPiece filename) { CHECK(filename.size() < PATH_MAX); struct stat st; if (stat(filename.as_string().c_str(), &st) < 0) { return -2.0; } return GetTimestampFromStat(st); } int RunCommand(const string& shell, const string& shellflag, const string& cmd, RedirectStderr redirect_stderr, string* s) { const char* argv[] = {NULL, NULL, NULL, NULL}; string cmd_with_shell; if (shell[0] != '/' || shell.find_first_of(" $") != string::npos) { string cmd_escaped = cmd; EscapeShell(&cmd_escaped); cmd_with_shell = shell + " " + shellflag + " \"" + cmd_escaped + "\""; argv[0] = "/bin/sh"; argv[1] = "-c"; argv[2] = cmd_with_shell.c_str(); } else { // If the shell isn't complicated, we don't need to wrap in /bin/sh argv[0] = shell.c_str(); argv[1] = shellflag.c_str(); argv[2] = cmd.c_str(); } int pipefd[2]; if (pipe(pipefd) != 0) PERROR("pipe failed"); int pid; if ((pid = vfork())) { int status; close(pipefd[1]); while (true) { int result = waitpid(pid, &status, WNOHANG); if (result < 0) PERROR("waitpid failed"); while (true) { char buf[4096]; ssize_t r = HANDLE_EINTR(read(pipefd[0], buf, 4096)); if (r < 0) PERROR("read failed"); if (r == 0) break; s->append(buf, buf + r); } if (result != 0) { break; } } close(pipefd[0]); return status; } else { close(pipefd[0]); if (redirect_stderr == RedirectStderr::STDOUT) { if (dup2(pipefd[1], 2) < 0) PERROR("dup2 failed"); } else if (redirect_stderr == RedirectStderr::DEV_NULL) { int fd = open("/dev/null", O_WRONLY); if (dup2(fd, 2) < 0) PERROR("dup2 failed"); close(fd); } if (dup2(pipefd[1], 1) < 0) PERROR("dup2 failed"); close(pipefd[1]); execvp(argv[0], const_cast(argv)); PLOG("execvp for %s failed", argv[0]); kill(getppid(), SIGTERM); _exit(1); } } void GetExecutablePath(string* path) { #if defined(__linux__) char mypath[PATH_MAX + 1]; ssize_t l = readlink("/proc/self/exe", mypath, PATH_MAX); if (l < 0) { PERROR("readlink for /proc/self/exe"); } mypath[l] = '\0'; *path = mypath; #elif defined(__APPLE__) char mypath[PATH_MAX + 1]; uint32_t size = PATH_MAX; if (_NSGetExecutablePath(mypath, &size) != 0) { ERROR("_NSGetExecutablePath failed"); } mypath[size] = 0; *path = mypath; #else #error "Unsupported OS" #endif } namespace { class GlobCache { public: ~GlobCache() { Clear(); } void Get(const char* pat, vector** files) { auto p = cache_.emplace(pat, nullptr); if (p.second) { vector* files = p.first->second = new vector; if (strcspn(pat, "?*[\\") != strlen(pat)) { glob_t gl; glob(pat, 0, NULL, &gl); for (size_t i = 0; i < gl.gl_pathc; i++) { files->push_back(gl.gl_pathv[i]); } globfree(&gl); } else { if (Exists(pat)) files->push_back(pat); } } *files = p.first->second; } const unordered_map*>& GetAll() const { return cache_; } void Clear() { for (auto& p : cache_) { delete p.second; } cache_.clear(); } private: unordered_map*> cache_; }; static GlobCache g_gc; } // namespace void Glob(const char* pat, vector** files) { g_gc.Get(pat, files); } const unordered_map*>& GetAllGlobCache() { return g_gc.GetAll(); } void ClearGlobCache() { g_gc.Clear(); } fileutil.go0100644 0000000 0000000 00000002622 13654546140 011763 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "os" "path/filepath" ) func exists(filename string) bool { _, err := os.Stat(filename) if os.IsNotExist(err) { return false } return true } type vpath struct { pattern string dirs []string } type searchPaths struct { vpaths []vpath // vpath directives dirs []string // VPATH variable } func (s searchPaths) exists(target string) (string, bool) { if exists(target) { return target, true } for _, vpath := range s.vpaths { if !matchPattern(vpath.pattern, target) { continue } for _, dir := range vpath.dirs { vtarget := filepath.Join(dir, target) if exists(vtarget) { return vtarget, true } } } for _, dir := range s.dirs { vtarget := filepath.Join(dir, target) if exists(vtarget) { return vtarget, true } } return target, false } fileutil.h0100644 0000000 0000000 00000003217 13654546140 011606 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef FILEUTIL_H_ #define FILEUTIL_H_ #include #include #include #include #include #include "string_piece.h" using namespace std; bool Exists(StringPiece f); double GetTimestampFromStat(const struct stat& st); double GetTimestamp(StringPiece f); enum struct RedirectStderr { NONE, STDOUT, DEV_NULL, }; int RunCommand(const string& shell, const string& shellflag, const string& cmd, RedirectStderr redirect_stderr, string* out); void GetExecutablePath(string* path); void Glob(const char* pat, vector** files); const unordered_map*>& GetAllGlobCache(); void ClearGlobCache(); #define HANDLE_EINTR(x) \ ({ \ int r; \ do { \ r = (x); \ } while (r == -1 && errno == EINTR); \ r; \ }) #endif // FILEUTIL_H_ fileutil_bench.cc0100644 0000000 0000000 00000002543 13654546140 013104 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include #include #include "fileutil.h" static void BM_RunCommand(benchmark::State& state) { std::string shell = "/bin/bash"; std::string shellflag = "-c"; std::string cmd = "echo $((1+3))"; while (state.KeepRunning()) { std::string result; RunCommand(shell, shellflag, cmd, RedirectStderr::NONE, &result); } } BENCHMARK(BM_RunCommand); static void BM_RunCommand_ComplexShell(benchmark::State& state) { std::string shell = "/bin/bash "; std::string shellflag = "-c"; std::string cmd = "echo $((1+3))"; while (state.KeepRunning()) { std::string result; RunCommand(shell, shellflag, cmd, RedirectStderr::NONE, &result); } } BENCHMARK(BM_RunCommand_ComplexShell); BENCHMARK_MAIN(); find.cc0100644 0000000 0000000 00000071551 13654546140 011055 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "find.h" #include #include #include #include #include #include #include #include #include //#undef NOLOG #include "fileutil.h" #include "log.h" #include "string_piece.h" #include "strutil.h" #include "timeutil.h" #define FIND_WARN_LOC(...) \ do { \ if (g_flags.werror_find_emulator) { \ ERROR_LOC(__VA_ARGS__); \ } else { \ WARN_LOC(__VA_ARGS__); \ } \ } while (0) class FindCond { public: virtual ~FindCond() = default; virtual bool IsTrue(const string& path, unsigned char type) const = 0; virtual bool Countable() const = 0; virtual unsigned Count() const = 0; protected: FindCond() = default; }; namespace { class NameCond : public FindCond { public: explicit NameCond(const string& n) : name_(n) { has_wildcard_ = (n.find_first_of("?*[") != string::npos); } virtual bool IsTrue(const string& path, unsigned char) const override { return fnmatch(name_.c_str(), Basename(path).data(), 0) == 0; } virtual bool Countable() const override { return !has_wildcard_; } virtual unsigned Count() const override { return 1; } private: string name_; bool has_wildcard_; }; class TypeCond : public FindCond { public: explicit TypeCond(unsigned char t) : type_(t) {} virtual bool IsTrue(const string&, unsigned char type) const override { return type == type_; } virtual bool Countable() const override { return false; } virtual unsigned Count() const override { return 0; } private: unsigned char type_; }; class NotCond : public FindCond { public: NotCond(FindCond* c) : c_(c) {} virtual bool IsTrue(const string& path, unsigned char type) const override { return !c_->IsTrue(path, type); } virtual bool Countable() const override { return false; } virtual unsigned Count() const override { return 0; } private: unique_ptr c_; }; class AndCond : public FindCond { public: AndCond(FindCond* c1, FindCond* c2) : c1_(c1), c2_(c2) {} virtual bool IsTrue(const string& path, unsigned char type) const override { if (c1_->IsTrue(path, type)) return c2_->IsTrue(path, type); return false; } virtual bool Countable() const override { return false; } virtual unsigned Count() const override { return 0; } private: unique_ptr c1_, c2_; }; class OrCond : public FindCond { public: OrCond(FindCond* c1, FindCond* c2) : c1_(c1), c2_(c2) {} virtual bool IsTrue(const string& path, unsigned char type) const override { if (!c1_->IsTrue(path, type)) return c2_->IsTrue(path, type); return true; } virtual bool Countable() const override { return c1_->Countable() && c2_->Countable(); ; } virtual unsigned Count() const override { return c1_->Count() + c2_->Count(); } private: unique_ptr c1_, c2_; }; class DirentNode { public: virtual ~DirentNode() = default; virtual const DirentNode* FindDir(StringPiece) const { return NULL; } virtual bool RunFind(const FindCommand& fc, const Loc& loc, int d, string* path, unordered_map* cur_read_dirs, vector& out) const = 0; virtual bool IsDirectory() const = 0; const string& base() const { return base_; } protected: explicit DirentNode(const string& name) { base_ = Basename(name).as_string(); } void PrintIfNecessary(const FindCommand& fc, const string& path, unsigned char type, int d, vector& out) const { if (fc.print_cond && !fc.print_cond->IsTrue(path, type)) return; if (d < fc.mindepth) return; out.push_back(path); } string base_; }; class DirentFileNode : public DirentNode { public: DirentFileNode(const string& name, unsigned char type) : DirentNode(name), type_(type) {} virtual bool RunFind(const FindCommand& fc, const Loc&, int d, string* path, unordered_map*, vector& out) const override { PrintIfNecessary(fc, *path, type_, d, out); return true; } virtual bool IsDirectory() const override { return false; } private: unsigned char type_; }; struct ScopedReadDirTracker { public: ScopedReadDirTracker(const DirentNode* n, const string& path, unordered_map* cur_read_dirs) : n_(NULL), cur_read_dirs_(cur_read_dirs) { const auto& p = cur_read_dirs->emplace(n, path); if (p.second) { n_ = n; } else { conflicted_ = p.first->second; } } ~ScopedReadDirTracker() { if (n_) cur_read_dirs_->erase(n_); } bool ok() const { return conflicted_.empty(); } const string& conflicted() const { return conflicted_; } private: string conflicted_; const DirentNode* n_; unordered_map* cur_read_dirs_; }; class DirentDirNode : public DirentNode { public: explicit DirentDirNode(const string& name) : DirentNode(name) {} ~DirentDirNode() { for (auto& p : children_) { delete p.second; } } virtual const DirentNode* FindDir(StringPiece d) const override { if (d.empty() || d == ".") return this; size_t index = d.find('/'); const string& p = d.substr(0, index).as_string(); if (p.empty() || p == ".") return FindDir(d.substr(index + 1)); ; for (auto& child : children_) { if (p == child.first) { if (index == string::npos) return child.second; StringPiece nd = d.substr(index + 1); return child.second->FindDir(nd); } } return NULL; } virtual bool RunFind(const FindCommand& fc, const Loc& loc, int d, string* path, unordered_map* cur_read_dirs, vector& out) const override { ScopedReadDirTracker srdt(this, *path, cur_read_dirs); if (!srdt.ok()) { FIND_WARN_LOC(loc, "FindEmulator: find: File system loop detected; `%s' " "is part of the same file system loop as `%s'.", path->c_str(), srdt.conflicted().c_str()); return true; } fc.read_dirs->insert(*path); if (fc.prune_cond && fc.prune_cond->IsTrue(*path, DT_DIR)) { if (fc.type != FindCommandType::FINDLEAVES) { out.push_back(*path); } return true; } PrintIfNecessary(fc, *path, DT_DIR, d, out); if (d >= fc.depth) return true; size_t orig_path_size = path->size(); if (fc.type == FindCommandType::FINDLEAVES) { size_t orig_out_size = out.size(); for (const auto& p : children_) { DirentNode* c = p.second; // We will handle directories later. if (c->IsDirectory()) continue; if ((*path)[path->size() - 1] != '/') *path += '/'; *path += c->base(); if (!c->RunFind(fc, loc, d + 1, path, cur_read_dirs, out)) return false; path->resize(orig_path_size); } // Found a leaf, stop the search. if (orig_out_size != out.size()) { // If we've found all possible files in this directory, we don't need // to add a regen dependency on the directory, we just need to ensure // that the files are not removed. if (fc.print_cond->Countable() && fc.print_cond->Count() == out.size() - orig_out_size) { fc.read_dirs->erase(*path); for (unsigned i = orig_out_size; i < out.size(); i++) { fc.found_files->push_back(out[i]); } } return true; } for (const auto& p : children_) { DirentNode* c = p.second; if (!c->IsDirectory()) continue; if ((*path)[path->size() - 1] != '/') *path += '/'; *path += c->base(); if (!c->RunFind(fc, loc, d + 1, path, cur_read_dirs, out)) return false; path->resize(orig_path_size); } } else { for (const auto& p : children_) { DirentNode* c = p.second; if ((*path)[path->size() - 1] != '/') *path += '/'; *path += c->base(); if (!c->RunFind(fc, loc, d + 1, path, cur_read_dirs, out)) return false; path->resize(orig_path_size); } } return true; } virtual bool IsDirectory() const override { return true; } void Add(const string& name, DirentNode* c) { children_.emplace(children_.end(), name, c); } private: vector> children_; }; class DirentSymlinkNode : public DirentNode { public: explicit DirentSymlinkNode(const string& name) : DirentNode(name), to_(NULL), errno_(0) {} virtual const DirentNode* FindDir(StringPiece d) const override { if (errno_ == 0 && to_) return to_->FindDir(d); return NULL; } virtual bool RunFind(const FindCommand& fc, const Loc& loc, int d, string* path, unordered_map* cur_read_dirs, vector& out) const override { unsigned char type = DT_LNK; if (fc.follows_symlinks && errno_ != ENOENT) { if (errno_) { if (fc.type != FindCommandType::FINDLEAVES) { FIND_WARN_LOC(loc, "FindEmulator: find: `%s': %s", path->c_str(), strerror(errno_)); } return true; } if (!to_) { LOG("FindEmulator does not support %s", path->c_str()); return false; } return to_->RunFind(fc, loc, d, path, cur_read_dirs, out); } PrintIfNecessary(fc, *path, type, d, out); return true; } virtual bool IsDirectory() const override { return errno_ == 0 && to_ && to_->IsDirectory(); } void set_to(const DirentNode* to) { to_ = to; } void set_errno(int e) { errno_ = e; } private: const DirentNode* to_; int errno_; }; class FindCommandParser { public: FindCommandParser(StringPiece cmd, FindCommand* fc) : cmd_(cmd), fc_(fc), has_if_(false) {} bool Parse() { cur_ = cmd_; if (!ParseImpl()) { LOG("FindEmulator: Unsupported find command: %.*s", SPF(cmd_)); return false; } CHECK(TrimLeftSpace(cur_).empty()); return true; } private: bool GetNextToken(StringPiece* tok) { if (!unget_tok_.empty()) { *tok = unget_tok_; unget_tok_.clear(); return true; } cur_ = TrimLeftSpace(cur_); if (cur_[0] == ';') { *tok = cur_.substr(0, 1); cur_ = cur_.substr(1); return true; } if (cur_[0] == '&') { if (cur_.get(1) != '&') { return false; } *tok = cur_.substr(0, 2); cur_ = cur_.substr(2); return true; } size_t i = 0; while (i < cur_.size() && !isspace(cur_[i]) && cur_[i] != ';' && cur_[i] != '&') { i++; } *tok = cur_.substr(0, i); cur_ = cur_.substr(i); const char c = tok->get(0); if (c == '\'' || c == '"') { if (tok->size() < 2 || (*tok)[tok->size() - 1] != c) return false; *tok = tok->substr(1, tok->size() - 2); return true; } else { // Support stripping off a leading backslash if (c == '\\') { *tok = tok->substr(1); } // But if there are any others, we can't support it, as unescaping would // require allocation if (tok->find("\\") != string::npos) { return false; } } return true; } void UngetToken(StringPiece tok) { CHECK(unget_tok_.empty()); if (!tok.empty()) unget_tok_ = tok; } bool ParseTest() { if (has_if_ || !fc_->testdir.empty()) return false; StringPiece tok; if (!GetNextToken(&tok) || tok != "-d") return false; if (!GetNextToken(&tok) || tok.empty()) return false; fc_->testdir = tok.as_string(); return true; } FindCond* ParseFact(StringPiece tok) { if (tok == "-not" || tok == "!") { if (!GetNextToken(&tok) || tok.empty()) return NULL; unique_ptr c(ParseFact(tok)); if (!c.get()) return NULL; return new NotCond(c.release()); } else if (tok == "(") { if (!GetNextToken(&tok) || tok.empty()) return NULL; unique_ptr c(ParseExpr(tok)); if (!GetNextToken(&tok) || tok != ")") { return NULL; } return c.release(); } else if (tok == "-name") { if (!GetNextToken(&tok) || tok.empty()) return NULL; return new NameCond(tok.as_string()); } else if (tok == "-type") { if (!GetNextToken(&tok) || tok.empty()) return NULL; char type; if (tok == "b") type = DT_BLK; else if (tok == "c") type = DT_CHR; else if (tok == "d") type = DT_DIR; else if (tok == "p") type = DT_FIFO; else if (tok == "l") type = DT_LNK; else if (tok == "f") type = DT_REG; else if (tok == "s") type = DT_SOCK; else return NULL; return new TypeCond(type); } else { UngetToken(tok); return NULL; } } FindCond* ParseTerm(StringPiece tok) { unique_ptr c(ParseFact(tok)); if (!c.get()) return NULL; while (true) { if (!GetNextToken(&tok)) return NULL; if (tok == "-and" || tok == "-a") { if (!GetNextToken(&tok) || tok.empty()) return NULL; } else { if (tok != "-not" && tok != "!" && tok != "(" && tok != "-name" && tok != "-type") { UngetToken(tok); return c.release(); } } unique_ptr r(ParseFact(tok)); if (!r.get()) { return NULL; } c.reset(new AndCond(c.release(), r.release())); } } FindCond* ParseExpr(StringPiece tok) { unique_ptr c(ParseTerm(tok)); if (!c.get()) return NULL; while (true) { if (!GetNextToken(&tok)) return NULL; if (tok != "-or" && tok != "-o") { UngetToken(tok); return c.release(); } if (!GetNextToken(&tok) || tok.empty()) return NULL; unique_ptr r(ParseTerm(tok)); if (!r.get()) { return NULL; } c.reset(new OrCond(c.release(), r.release())); } } // ::= { } // ::= {[] } // ::= | '(' ')' | // ::= '-not' | '!' // ::= '-and' | '-a' // ::= '-or' | '-o' // ::= | | // ::= '-name' NAME // ::= '-type' TYPE // ::= '-maxdepth' MAXDEPTH FindCond* ParseFindCond(StringPiece tok) { return ParseExpr(tok); } bool ParseFind() { fc_->type = FindCommandType::FIND; StringPiece tok; while (true) { if (!GetNextToken(&tok)) return false; if (tok.empty() || tok == ";") return true; if (tok == "-L") { fc_->follows_symlinks = true; } else if (tok == "-prune") { if (!fc_->print_cond || fc_->prune_cond) return false; if (!GetNextToken(&tok) || tok != "-o") return false; fc_->prune_cond.reset(fc_->print_cond.release()); } else if (tok == "-print") { if (!GetNextToken(&tok) || !tok.empty()) return false; return true; } else if (tok == "-maxdepth") { if (!GetNextToken(&tok) || tok.empty()) return false; const string& depth_str = tok.as_string(); char* endptr; long d = strtol(depth_str.c_str(), &endptr, 10); if (endptr != depth_str.data() + depth_str.size() || d < 0 || d > INT_MAX) { return false; } fc_->depth = d; } else if (tok[0] == '-' || tok == "(" || tok == "!") { if (fc_->print_cond.get()) return false; FindCond* c = ParseFindCond(tok); if (!c) return false; fc_->print_cond.reset(c); } else if (tok == "2>") { if (!GetNextToken(&tok) || tok != "/dev/null") { return false; } fc_->redirect_to_devnull = true; } else if (tok.find_first_of("|;&><*'\"") != string::npos) { return false; } else { fc_->finddirs.push_back(tok.as_string()); } } } bool ParseFindLeaves() { fc_->type = FindCommandType::FINDLEAVES; fc_->follows_symlinks = true; StringPiece tok; vector findfiles; while (true) { if (!GetNextToken(&tok)) return false; if (tok.empty()) { if (fc_->finddirs.size() == 0) { // backwards compatibility if (findfiles.size() < 2) return false; fc_->finddirs.swap(findfiles); fc_->print_cond.reset(new NameCond(fc_->finddirs.back())); fc_->finddirs.pop_back(); } else { if (findfiles.size() < 1) return false; for (auto& file : findfiles) { FindCond* cond = new NameCond(file); if (fc_->print_cond.get()) { cond = new OrCond(fc_->print_cond.release(), cond); } CHECK(!fc_->print_cond.get()); fc_->print_cond.reset(cond); } } return true; } if (HasPrefix(tok, "--prune=")) { FindCond* cond = new NameCond(tok.substr(strlen("--prune=")).as_string()); if (fc_->prune_cond.get()) { cond = new OrCond(fc_->prune_cond.release(), cond); } CHECK(!fc_->prune_cond.get()); fc_->prune_cond.reset(cond); } else if (HasPrefix(tok, "--mindepth=")) { string mindepth_str = tok.substr(strlen("--mindepth=")).as_string(); char* endptr; long d = strtol(mindepth_str.c_str(), &endptr, 10); if (endptr != mindepth_str.data() + mindepth_str.size() || d < INT_MIN || d > INT_MAX) { return false; } fc_->mindepth = d; } else if (HasPrefix(tok, "--dir=")) { StringPiece dir = tok.substr(strlen("--dir=")); fc_->finddirs.push_back(dir.as_string()); } else if (HasPrefix(tok, "--")) { if (g_flags.werror_find_emulator) { ERROR("Unknown flag in findleaves.py: %.*s", SPF(tok)); } else { WARN("Unknown flag in findleaves.py: %.*s", SPF(tok)); } return false; } else { findfiles.push_back(tok.as_string()); } } } bool ParseImpl() { while (true) { StringPiece tok; if (!GetNextToken(&tok)) return false; if (tok.empty()) return true; if (tok == "cd") { if (!GetNextToken(&tok) || tok.empty() || !fc_->chdir.empty()) return false; fc_->chdir = tok.as_string(); if (!GetNextToken(&tok) || (tok != ";" && tok != "&&")) return false; } else if (tok == "if") { if (!GetNextToken(&tok) || tok != "[") return false; if (!ParseTest()) return false; if (!GetNextToken(&tok) || tok != "]") return false; if (!GetNextToken(&tok) || tok != ";") return false; if (!GetNextToken(&tok) || tok != "then") return false; has_if_ = true; } else if (tok == "test") { if (!fc_->chdir.empty()) return false; if (!ParseTest()) return false; if (!GetNextToken(&tok) || tok != "&&") return false; } else if (tok == "find") { if (!ParseFind()) return false; if (has_if_) { if (!GetNextToken(&tok) || tok != "fi") return false; } if (!GetNextToken(&tok) || !tok.empty()) return false; return true; } else if (tok == "build/tools/findleaves.py" || tok == "build/make/tools/findleaves.py") { if (!ParseFindLeaves()) return false; return true; } else { return false; } } } StringPiece cmd_; StringPiece cur_; FindCommand* fc_; bool has_if_; StringPiece unget_tok_; }; static FindEmulator* g_instance; class FindEmulatorImpl : public FindEmulator { public: FindEmulatorImpl() : node_cnt_(0), is_initialized_(false) { g_instance = this; } virtual ~FindEmulatorImpl() = default; bool CanHandle(StringPiece s) const { return (!HasPrefix(s, "../") && !HasPrefix(s, "/") && !HasPrefix(s, ".repo") && !HasPrefix(s, ".git")); } const DirentNode* FindDir(StringPiece d, bool* should_fallback) { const DirentNode* r = root_->FindDir(d); if (!r) { *should_fallback = Exists(d); } return r; } virtual bool HandleFind(const string& cmd UNUSED, const FindCommand& fc, const Loc& loc, string* out) override { if (!CanHandle(fc.chdir)) { LOG("FindEmulator: Cannot handle chdir (%.*s): %s", SPF(fc.chdir), cmd.c_str()); return false; } if (!is_initialized_) { ScopedTimeReporter tr("init find emulator time"); root_.reset(ConstructDirectoryTree("")); if (!root_) { ERROR("FindEmulator: Cannot open root directory"); } ResolveSymlinks(); LOG_STAT("%d find nodes", node_cnt_); is_initialized_ = true; } if (!fc.testdir.empty()) { if (!CanHandle(fc.testdir)) { LOG("FindEmulator: Cannot handle test dir (%.*s): %s", SPF(fc.testdir), cmd.c_str()); return false; } bool should_fallback = false; if (!FindDir(fc.testdir, &should_fallback)) { LOG("FindEmulator: Test dir (%.*s) not found: %s", SPF(fc.testdir), cmd.c_str()); return !should_fallback; } } const DirentNode* root = root_.get(); if (!fc.chdir.empty()) { if (!CanHandle(fc.chdir)) { LOG("FindEmulator: Cannot handle chdir (%.*s): %s", SPF(fc.chdir), cmd.c_str()); return false; } root = root->FindDir(fc.chdir); if (!root) { if (Exists(fc.chdir)) return false; if (!fc.redirect_to_devnull) { FIND_WARN_LOC(loc, "FindEmulator: cd: %.*s: No such file or directory", SPF(fc.chdir)); } return true; } } vector results; for (const string& finddir : fc.finddirs) { if (!CanHandle(finddir)) { LOG("FindEmulator: Cannot handle find dir (%s): %s", finddir.c_str(), cmd.c_str()); return false; } const DirentNode* base; base = root->FindDir(finddir); if (!base) { if (Exists(finddir)) { return false; } if (!fc.redirect_to_devnull) { FIND_WARN_LOC(loc, "FindEmulator: find: `%s': No such file or directory", ConcatDir(fc.chdir, finddir).c_str()); } continue; } string path = finddir; unordered_map cur_read_dirs; if (!base->RunFind(fc, loc, 0, &path, &cur_read_dirs, results)) { LOG("FindEmulator: RunFind failed: %s", cmd.c_str()); return false; } } if (results.size() > 0) { // Calculate and reserve necessary space in out size_t new_length = 0; for (const string& result : results) { new_length += result.size() + 1; } out->reserve(out->size() + new_length - 1); if (fc.type == FindCommandType::FINDLEAVES) { sort(results.begin(), results.end()); } WordWriter writer(out); for (const string& result : results) { writer.Write(result); } } LOG("FindEmulator: OK"); return true; } private: static unsigned char GetDtTypeFromStat(const struct stat& st) { if (S_ISREG(st.st_mode)) { return DT_REG; } else if (S_ISDIR(st.st_mode)) { return DT_DIR; } else if (S_ISCHR(st.st_mode)) { return DT_CHR; } else if (S_ISBLK(st.st_mode)) { return DT_BLK; } else if (S_ISFIFO(st.st_mode)) { return DT_FIFO; } else if (S_ISLNK(st.st_mode)) { return DT_LNK; } else if (S_ISSOCK(st.st_mode)) { return DT_SOCK; } else { return DT_UNKNOWN; } } static unsigned char GetDtType(const string& path) { struct stat st; if (lstat(path.c_str(), &st)) { PERROR("stat for %s", path.c_str()); } return GetDtTypeFromStat(st); } DirentNode* ConstructDirectoryTree(const string& path) { DIR* dir = opendir(path.empty() ? "." : path.c_str()); if (!dir) { if (errno == ENOENT || errno == EACCES) { LOG("opendir failed: %s", path.c_str()); return NULL; } else { PERROR("opendir failed: %s", path.c_str()); } } DirentDirNode* n = new DirentDirNode(path); struct dirent* ent; while ((ent = readdir(dir)) != NULL) { if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, "..") || !strcmp(ent->d_name, ".repo") || !strcmp(ent->d_name, ".git")) continue; string npath = path; if (!path.empty()) npath += '/'; npath += ent->d_name; DirentNode* c = NULL; auto d_type = ent->d_type; if (d_type == DT_UNKNOWN) { d_type = GetDtType(npath); CHECK(d_type != DT_UNKNOWN); } if (d_type == DT_DIR) { c = ConstructDirectoryTree(npath); if (c == NULL) { continue; } } else if (d_type == DT_LNK) { auto s = new DirentSymlinkNode(npath); symlinks_.push_back(make_pair(npath, s)); c = s; } else { c = new DirentFileNode(npath, d_type); } node_cnt_++; n->Add(ent->d_name, c); } closedir(dir); return n; } void ResolveSymlinks() { vector> symlinks; symlinks.swap(symlinks_); for (const auto& p : symlinks) { const string& path = p.first; DirentSymlinkNode* s = p.second; char buf[PATH_MAX + 1]; buf[PATH_MAX] = 0; ssize_t len = readlink(path.c_str(), buf, PATH_MAX); if (len < 0) { WARN("readlink failed: %s", path.c_str()); continue; } buf[len] = 0; struct stat st; unsigned char type = DT_UNKNOWN; if (stat(path.c_str(), &st) == 0) { type = GetDtTypeFromStat(st); } else { s->set_errno(errno); LOG("stat failed: %s: %s", path.c_str(), strerror(errno)); } if (*buf != '/') { const string npath = ConcatDir(Dirname(path), buf); bool should_fallback = false; const DirentNode* to = FindDir(npath, &should_fallback); if (to) { s->set_to(to); continue; } } if (type == DT_DIR) { if (path.find('/') == string::npos) { DirentNode* dir = ConstructDirectoryTree(path); if (dir != NULL) { s->set_to(dir); } else { s->set_errno(errno); } } } else if (type != DT_LNK && type != DT_UNKNOWN) { s->set_to(new DirentFileNode(path, type)); } } if (!symlinks_.empty()) ResolveSymlinks(); } unique_ptr root_; vector> symlinks_; int node_cnt_; bool is_initialized_; }; } // namespace FindCommand::FindCommand() : follows_symlinks(false), depth(INT_MAX), mindepth(INT_MIN), redirect_to_devnull(false), found_files(new vector()), read_dirs(new unordered_set()) {} FindCommand::~FindCommand() {} bool FindCommand::Parse(const string& cmd) { FindCommandParser fcp(cmd, this); if (!HasWord(cmd, "find") && !HasWord(cmd, "build/tools/findleaves.py") && !HasWord(cmd, "build/make/tools/findleaves.py")) return false; if (!fcp.Parse()) return false; NormalizePath(&chdir); NormalizePath(&testdir); if (finddirs.empty()) finddirs.push_back("."); return true; } FindEmulator* FindEmulator::Get() { return g_instance; } void InitFindEmulator() { new FindEmulatorImpl(); } find.h0100644 0000000 0000000 00000003332 13654546140 010707 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef FIND_H_ #define FIND_H_ #include #include #include #include #include "loc.h" #include "string_piece.h" using namespace std; class FindCond; enum struct FindCommandType { FIND, FINDLEAVES, LS, }; struct FindCommand { FindCommand(); ~FindCommand(); bool Parse(const string& cmd); FindCommandType type; string chdir; string testdir; vector finddirs; bool follows_symlinks; unique_ptr print_cond; unique_ptr prune_cond; int depth; int mindepth; bool redirect_to_devnull; unique_ptr> found_files; unique_ptr> read_dirs; private: FindCommand(const FindCommand&) = delete; void operator=(FindCommand) = delete; }; class FindEmulator { public: virtual ~FindEmulator() = default; virtual bool HandleFind(const string& cmd, const FindCommand& fc, const Loc& loc, string* out) = 0; static FindEmulator* Get(); protected: FindEmulator() = default; }; void InitFindEmulator(); #endif // FIND_H_ find_test.cc0100644 0000000 0000000 00000010515 13654546140 012105 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "find.h" #include #include #include #include "fileutil.h" #include "strutil.h" int FindUnitTests(); int main(int argc, char* argv[]) { if (argc == 1) { return FindUnitTests(); } InitFindEmulator(); string cmd; for (int i = 1; i < argc; i++) { if (i > 1) cmd += ' '; cmd += argv[i]; } FindCommand fc; if (!fc.Parse(cmd)) { fprintf(stderr, "Find emulator does not support this command\n"); return 1; } string out; if (!FindEmulator::Get()->HandleFind(cmd, fc, Loc(), &out)) { fprintf(stderr, "Find emulator does not support this command\n"); return 1; } for (StringPiece tok : WordScanner(out)) { printf("%.*s\n", SPF(tok)); } } string Run(const string& cmd) { string s; int ret = RunCommand("/bin/sh", "-c", cmd, RedirectStderr::NONE, &s); if (ret != 0) { fprintf(stderr, "Failed to run `%s`\n", cmd.c_str()); exit(ret); } return s; } static bool unit_test_failed = false; void CompareFind(const string& cmd) { string native = Run(cmd); FindCommand fc; if (!fc.Parse(cmd)) { fprintf(stderr, "Find emulator cannot parse `%s`\n", cmd.c_str()); exit(1); } string emulated; if (!FindEmulator::Get()->HandleFind(cmd, fc, Loc(), &emulated)) { fprintf(stderr, "Find emulator cannot parse `%s`\n", cmd.c_str()); exit(1); } vector nativeWords; vector emulatedWords; WordScanner(native).Split(&nativeWords); WordScanner(emulated).Split(&emulatedWords); if (nativeWords != emulatedWords) { fprintf(stderr, "Failed to match `%s`:\n", cmd.c_str()); auto nativeIter = nativeWords.begin(); auto emulatedIter = emulatedWords.begin(); fprintf(stderr, "%-20s %-20s\n", "Native:", "Emulated:"); while (nativeIter != nativeWords.end() || emulatedIter != emulatedWords.end()) { fprintf(stderr, " %-20s %-20s\n", (nativeIter == nativeWords.end()) ? "" : (*nativeIter++).as_string().c_str(), (emulatedIter == emulatedWords.end()) ? "" : (*emulatedIter++).as_string().c_str()); } fprintf(stderr, "------------------------------------------\n"); unit_test_failed = true; } } void ExpectParseFailure(const string& cmd) { Run(cmd); FindCommand fc; if (fc.Parse(cmd)) { fprintf(stderr, "Expected parse failure for `%s`\n", cmd.c_str()); fprintf(stderr, "------------------------------------------\n"); unit_test_failed = true; } } int FindUnitTests() { Run("rm -rf out/find"); Run("mkdir -p out/find"); if (chdir("out/find")) { perror("Failed to chdir(out/find)"); return 1; } // Set up files under out/find: // drwxr-x--- top // lrwxrwxrwx top/E -> missing // lrwxrwxrwx top/C -> A // -rw-r----- top/a // drwxr-x--- top/A // lrwxrwxrwx top/A/D -> B // -rw-r----- top/A/b // drwxr-x--- top/A/B // -rw-r----- top/A/B/z Run("mkdir -p top/A/B"); Run("cd top && ln -s A C"); Run("cd top/A && ln -s B D"); Run("cd top && ln -s missing E"); Run("touch top/a top/A/b top/A/B/z"); InitFindEmulator(); CompareFind("find ."); CompareFind("find -L ."); CompareFind("find top/C"); CompareFind("find top/C/."); CompareFind("find -L top/C"); CompareFind("find -L top/C/."); CompareFind("cd top && find C"); CompareFind("cd top && find -L C"); CompareFind("cd top/C && find ."); CompareFind("cd top/C && find D/./z"); CompareFind("find .//top"); CompareFind("find top -type f -name 'a*' -o -name \\*b"); CompareFind("find top \\! -name 'a*'"); CompareFind("find top \\( -name 'a*' \\)"); ExpectParseFailure("find top -name a\\*"); return unit_test_failed ? 1 : 0; } flags.cc0100644 0000000 0000000 00000014700 13654546140 011222 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "flags.h" #include #include #include "log.h" #include "strutil.h" Flags g_flags; static bool ParseCommandLineOptionWithArg(StringPiece option, char* argv[], int* index, const char** out_arg) { const char* arg = argv[*index]; if (!HasPrefix(arg, option)) return false; if (arg[option.size()] == '\0') { ++*index; *out_arg = argv[*index]; return true; } if (arg[option.size()] == '=') { *out_arg = arg + option.size() + 1; return true; } // E.g, -j999 if (option.size() == 2) { *out_arg = arg + option.size(); return true; } return false; } void Flags::Parse(int argc, char** argv) { subkati_args.push_back(argv[0]); num_jobs = num_cpus = sysconf(_SC_NPROCESSORS_ONLN); const char* num_jobs_str; const char* writable_str; if (const char* makeflags = getenv("MAKEFLAGS")) { for (StringPiece tok : WordScanner(makeflags)) { if (!HasPrefix(tok, "-") && tok.find('=') != string::npos) cl_vars.push_back(tok); } } for (int i = 1; i < argc; i++) { const char* arg = argv[i]; bool should_propagate = true; int pi = i; if (!strcmp(arg, "-f")) { makefile = argv[++i]; should_propagate = false; } else if (!strcmp(arg, "-c")) { is_syntax_check_only = true; } else if (!strcmp(arg, "-i")) { is_dry_run = true; } else if (!strcmp(arg, "-s")) { is_silent_mode = true; } else if (!strcmp(arg, "-d")) { enable_debug = true; } else if (!strcmp(arg, "--kati_stats")) { enable_stat_logs = true; } else if (!strcmp(arg, "--warn")) { enable_kati_warnings = true; } else if (!strcmp(arg, "--ninja")) { generate_ninja = true; } else if (!strcmp(arg, "--empty_ninja_file")) { generate_empty_ninja = true; } else if (!strcmp(arg, "--gen_all_targets")) { gen_all_targets = true; } else if (!strcmp(arg, "--regen")) { // TODO: Make this default. regen = true; } else if (!strcmp(arg, "--regen_debug")) { regen_debug = true; } else if (!strcmp(arg, "--regen_ignoring_kati_binary")) { regen_ignoring_kati_binary = true; } else if (!strcmp(arg, "--dump_kati_stamp")) { dump_kati_stamp = true; regen_debug = true; } else if (!strcmp(arg, "--detect_android_echo")) { detect_android_echo = true; } else if (!strcmp(arg, "--detect_depfiles")) { detect_depfiles = true; } else if (!strcmp(arg, "--color_warnings")) { color_warnings = true; } else if (!strcmp(arg, "--no_builtin_rules")) { no_builtin_rules = true; } else if (!strcmp(arg, "--no_ninja_prelude")) { no_ninja_prelude = true; } else if (!strcmp(arg, "--werror_find_emulator")) { werror_find_emulator = true; } else if (!strcmp(arg, "--werror_overriding_commands")) { werror_overriding_commands = true; } else if (!strcmp(arg, "--warn_implicit_rules")) { warn_implicit_rules = true; } else if (!strcmp(arg, "--werror_implicit_rules")) { werror_implicit_rules = true; } else if (!strcmp(arg, "--warn_suffix_rules")) { warn_suffix_rules = true; } else if (!strcmp(arg, "--werror_suffix_rules")) { werror_suffix_rules = true; } else if (!strcmp(arg, "--top_level_phony")) { top_level_phony = true; } else if (!strcmp(arg, "--warn_real_to_phony")) { warn_real_to_phony = true; } else if (!strcmp(arg, "--werror_real_to_phony")) { warn_real_to_phony = true; werror_real_to_phony = true; } else if (!strcmp(arg, "--warn_phony_looks_real")) { warn_phony_looks_real = true; } else if (!strcmp(arg, "--werror_phony_looks_real")) { warn_phony_looks_real = true; werror_phony_looks_real = true; } else if (!strcmp(arg, "--werror_writable")) { werror_writable = true; } else if (ParseCommandLineOptionWithArg("-j", argv, &i, &num_jobs_str)) { num_jobs = strtol(num_jobs_str, NULL, 10); if (num_jobs <= 0) { ERROR("Invalid -j flag: %s", num_jobs_str); } } else if (ParseCommandLineOptionWithArg("--remote_num_jobs", argv, &i, &num_jobs_str)) { remote_num_jobs = strtol(num_jobs_str, NULL, 10); if (remote_num_jobs <= 0) { ERROR("Invalid -j flag: %s", num_jobs_str); } } else if (ParseCommandLineOptionWithArg("--ninja_suffix", argv, &i, &ninja_suffix)) { } else if (ParseCommandLineOptionWithArg("--ninja_dir", argv, &i, &ninja_dir)) { } else if (!strcmp(arg, "--use_find_emulator")) { use_find_emulator = true; } else if (ParseCommandLineOptionWithArg("--goma_dir", argv, &i, &goma_dir)) { } else if (ParseCommandLineOptionWithArg( "--ignore_optional_include", argv, &i, &ignore_optional_include_pattern)) { } else if (ParseCommandLineOptionWithArg("--ignore_dirty", argv, &i, &ignore_dirty_pattern)) { } else if (ParseCommandLineOptionWithArg("--no_ignore_dirty", argv, &i, &no_ignore_dirty_pattern)) { } else if (ParseCommandLineOptionWithArg("--writable", argv, &i, &writable_str)) { writable.push_back(writable_str); } else if (arg[0] == '-') { ERROR("Unknown flag: %s", arg); } else { if (strchr(arg, '=')) { cl_vars.push_back(arg); } else { should_propagate = false; targets.push_back(Intern(arg)); } } if (should_propagate) { for (; pi <= i; pi++) { subkati_args.push_back(argv[pi]); } } } } flags.go0100644 0000000 0000000 00000001472 13654546140 011244 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati // Flags to control kati. var ( StatsFlag bool PeriodicStatsFlag bool EvalStatsFlag bool DryRunFlag bool UseFindEmulator bool UseShellBuiltins bool IgnoreOptionalInclude string ) flags.h0100644 0000000 0000000 00000004011 13654546140 011056 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef FLAGS_H_ #define FLAGS_H_ #include #include #include "string_piece.h" #include "symtab.h" using namespace std; struct Flags { bool detect_android_echo; bool detect_depfiles; bool dump_kati_stamp; bool enable_debug; bool enable_kati_warnings; bool enable_stat_logs; bool gen_all_targets; bool generate_ninja; bool generate_empty_ninja; bool is_dry_run; bool is_silent_mode; bool is_syntax_check_only; bool regen; bool regen_debug; bool regen_ignoring_kati_binary; bool use_find_emulator; bool color_warnings; bool no_builtin_rules; bool no_ninja_prelude; bool werror_find_emulator; bool werror_overriding_commands; bool warn_implicit_rules; bool werror_implicit_rules; bool warn_suffix_rules; bool werror_suffix_rules; bool top_level_phony; bool warn_real_to_phony; bool werror_real_to_phony; bool warn_phony_looks_real; bool werror_phony_looks_real; bool werror_writable; const char* goma_dir; const char* ignore_dirty_pattern; const char* no_ignore_dirty_pattern; const char* ignore_optional_include_pattern; const char* makefile; const char* ninja_dir; const char* ninja_suffix; int num_cpus; int num_jobs; int remote_num_jobs; vector subkati_args; vector targets; vector cl_vars; vector writable; void Parse(int argc, char** argv); }; extern Flags g_flags; #endif // FLAGS_H_ func.cc0100644 0000000 0000000 00000065537 13654546140 011077 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "func.h" #include #include #include #include #include #include #include #include #include #include #include #include "eval.h" #include "fileutil.h" #include "find.h" #include "log.h" #include "parser.h" #include "stats.h" #include "stmt.h" #include "strutil.h" #include "symtab.h" #include "var.h" namespace { // TODO: This code is very similar to // NinjaGenerator::TranslateCommand. Factor them out. void StripShellComment(string* cmd) { if (cmd->find('#') == string::npos) return; string res; bool prev_backslash = false; // Set space as an initial value so the leading comment will be // stripped out. char prev_char = ' '; char quote = 0; bool done = false; const char* in = cmd->c_str(); for (; *in && !done; in++) { switch (*in) { case '#': if (quote == 0 && isspace(prev_char)) { while (in[1] && *in != '\n') in++; break; } #if defined(__has_cpp_attribute) && __has_cpp_attribute(clang::fallthrough) [[clang::fallthrough]]; #endif case '\'': case '"': case '`': if (quote) { if (quote == *in) quote = 0; } else if (!prev_backslash) { quote = *in; } res += *in; break; case '\\': res += '\\'; break; default: res += *in; } if (*in == '\\') { prev_backslash = !prev_backslash; } else { prev_backslash = false; } prev_char = *in; } cmd->swap(res); } void PatsubstFunc(const vector& args, Evaluator* ev, string* s) { const string&& pat_str = args[0]->Eval(ev); const string&& repl = args[1]->Eval(ev); const string&& str = args[2]->Eval(ev); WordWriter ww(s); Pattern pat(pat_str); for (StringPiece tok : WordScanner(str)) { ww.MaybeAddWhitespace(); pat.AppendSubst(tok, repl, s); } } void StripFunc(const vector& args, Evaluator* ev, string* s) { const string&& str = args[0]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(str)) { ww.Write(tok); } } void SubstFunc(const vector& args, Evaluator* ev, string* s) { const string&& pat = args[0]->Eval(ev); const string&& repl = args[1]->Eval(ev); const string&& str = args[2]->Eval(ev); if (pat.empty()) { *s += str; *s += repl; return; } size_t index = 0; while (index < str.size()) { size_t found = str.find(pat, index); if (found == string::npos) break; AppendString(StringPiece(str).substr(index, found - index), s); AppendString(repl, s); index = found + pat.size(); } AppendString(StringPiece(str).substr(index), s); } void FindstringFunc(const vector& args, Evaluator* ev, string* s) { const string&& find = args[0]->Eval(ev); const string&& in = args[1]->Eval(ev); if (in.find(find) != string::npos) AppendString(find, s); } void FilterFunc(const vector& args, Evaluator* ev, string* s) { const string&& pat_buf = args[0]->Eval(ev); const string&& text = args[1]->Eval(ev); vector pats; for (StringPiece pat : WordScanner(pat_buf)) { pats.push_back(Pattern(pat)); } WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { for (const Pattern& pat : pats) { if (pat.Match(tok)) { ww.Write(tok); break; } } } } void FilterOutFunc(const vector& args, Evaluator* ev, string* s) { const string&& pat_buf = args[0]->Eval(ev); const string&& text = args[1]->Eval(ev); vector pats; for (StringPiece pat : WordScanner(pat_buf)) { pats.push_back(Pattern(pat)); } WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { bool matched = false; for (const Pattern& pat : pats) { if (pat.Match(tok)) { matched = true; break; } } if (!matched) ww.Write(tok); } } void SortFunc(const vector& args, Evaluator* ev, string* s) { string list; args[0]->Eval(ev, &list); COLLECT_STATS("func sort time"); // TODO(hamaji): Probably we could use a faster string-specific sort // algorithm. vector toks; WordScanner(list).Split(&toks); stable_sort(toks.begin(), toks.end()); WordWriter ww(s); StringPiece prev; for (StringPiece tok : toks) { if (prev != tok) { ww.Write(tok); prev = tok; } } } static int GetNumericValueForFunc(const string& buf) { StringPiece s = TrimLeftSpace(buf); char* end; long n = strtol(s.data(), &end, 10); if (n < 0 || n == LONG_MAX || s.data() + s.size() != end) { return -1; } return n; } void WordFunc(const vector& args, Evaluator* ev, string* s) { const string&& n_str = args[0]->Eval(ev); int n = GetNumericValueForFunc(n_str); if (n < 0) { ev->Error( StringPrintf("*** non-numeric first argument to `word' function: '%s'.", n_str.c_str())); } if (n == 0) { ev->Error("*** first argument to `word' function must be greater than 0."); } const string&& text = args[1]->Eval(ev); for (StringPiece tok : WordScanner(text)) { n--; if (n == 0) { AppendString(tok, s); break; } } } void WordlistFunc(const vector& args, Evaluator* ev, string* s) { const string&& s_str = args[0]->Eval(ev); int si = GetNumericValueForFunc(s_str); if (si < 0) { ev->Error(StringPrintf( "*** non-numeric first argument to `wordlist' function: '%s'.", s_str.c_str())); } if (si == 0) { ev->Error( StringPrintf("*** invalid first argument to `wordlist' function: %s`", s_str.c_str())); } const string&& e_str = args[1]->Eval(ev); int ei = GetNumericValueForFunc(e_str); if (ei < 0) { ev->Error(StringPrintf( "*** non-numeric second argument to `wordlist' function: '%s'.", e_str.c_str())); } const string&& text = args[2]->Eval(ev); int i = 0; WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { i++; if (si <= i && i <= ei) { ww.Write(tok); } } } void WordsFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); WordScanner ws(text); int n = 0; for (auto iter = ws.begin(); iter != ws.end(); ++iter) n++; char buf[32]; sprintf(buf, "%d", n); *s += buf; } void FirstwordFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); for (StringPiece tok : WordScanner(text)) { AppendString(tok, s); return; } } void LastwordFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); StringPiece last; for (StringPiece tok : WordScanner(text)) { last = tok; } AppendString(last, s); } void JoinFunc(const vector& args, Evaluator* ev, string* s) { const string&& list1 = args[0]->Eval(ev); const string&& list2 = args[1]->Eval(ev); WordScanner ws1(list1); WordScanner ws2(list2); WordWriter ww(s); WordScanner::Iterator iter1, iter2; for (iter1 = ws1.begin(), iter2 = ws2.begin(); iter1 != ws1.end() && iter2 != ws2.end(); ++iter1, ++iter2) { ww.Write(*iter1); // Use |AppendString| not to append extra ' '. AppendString(*iter2, s); } for (; iter1 != ws1.end(); ++iter1) ww.Write(*iter1); for (; iter2 != ws2.end(); ++iter2) ww.Write(*iter2); } void WildcardFunc(const vector& args, Evaluator* ev, string* s) { const string&& pat = args[0]->Eval(ev); COLLECT_STATS("func wildcard time"); // Note GNU make does not delay the execution of $(wildcard) so we // do not need to check avoid_io here. WordWriter ww(s); vector* files; for (StringPiece tok : WordScanner(pat)) { ScopedTerminator st(tok); Glob(tok.data(), &files); for (const string& file : *files) { ww.Write(file); } } } void DirFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { ww.Write(Dirname(tok)); s->push_back('/'); } } void NotdirFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { if (tok == "/") { ww.Write(StringPiece("")); } else { ww.Write(Basename(tok)); } } } void SuffixFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { StringPiece suf = GetExt(tok); if (!suf.empty()) ww.Write(suf); } } void BasenameFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { ww.Write(StripExt(tok)); } } void AddsuffixFunc(const vector& args, Evaluator* ev, string* s) { const string&& suf = args[0]->Eval(ev); const string&& text = args[1]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { ww.Write(tok); *s += suf; } } void AddprefixFunc(const vector& args, Evaluator* ev, string* s) { const string&& pre = args[0]->Eval(ev); const string&& text = args[1]->Eval(ev); WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { ww.Write(pre); AppendString(tok, s); } } void RealpathFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); if (ev->avoid_io()) { *s += "$("; string kati_binary; GetExecutablePath(&kati_binary); *s += kati_binary; *s += " --realpath "; *s += text; *s += " 2> /dev/null)"; return; } WordWriter ww(s); for (StringPiece tok : WordScanner(text)) { ScopedTerminator st(tok); char buf[PATH_MAX]; if (realpath(tok.data(), buf)) ww.Write(buf); } } void AbspathFunc(const vector& args, Evaluator* ev, string* s) { const string&& text = args[0]->Eval(ev); WordWriter ww(s); string buf; for (StringPiece tok : WordScanner(text)) { AbsPath(tok, &buf); ww.Write(buf); } } void IfFunc(const vector& args, Evaluator* ev, string* s) { const string&& cond = args[0]->Eval(ev); if (cond.empty()) { if (args.size() > 2) args[2]->Eval(ev, s); } else { args[1]->Eval(ev, s); } } void AndFunc(const vector& args, Evaluator* ev, string* s) { string cond; for (Value* a : args) { cond = a->Eval(ev); if (cond.empty()) return; } if (!cond.empty()) { *s += cond; } } void OrFunc(const vector& args, Evaluator* ev, string* s) { for (Value* a : args) { const string&& cond = a->Eval(ev); if (!cond.empty()) { *s += cond; return; } } } void ValueFunc(const vector& args, Evaluator* ev, string* s) { const string&& var_name = args[0]->Eval(ev); Var* var = ev->LookupVar(Intern(var_name)); AppendString(var->String().as_string(), s); } void EvalFunc(const vector& args, Evaluator* ev, string*) { // TODO: eval leaks everything... for now. // const string text = args[0]->Eval(ev); ev->CheckStack(); string* text = new string; args[0]->Eval(ev, text); if (ev->avoid_io()) { KATI_WARN_LOC(ev->loc(), "*warning*: $(eval) in a recipe is not recommended: %s", text->c_str()); } vector stmts; Parse(*text, ev->loc(), &stmts); for (Stmt* stmt : stmts) { LOG("%s", stmt->DebugString().c_str()); stmt->Eval(ev); // delete stmt; } } //#define TEST_FIND_EMULATOR // A hack for Android build. We need to evaluate things like $((3+4)) // when we emit ninja file, because the result of such expressions // will be passed to other make functions. // TODO: Maybe we should introduce a helper binary which evaluate // make expressions at ninja-time. static bool HasNoIoInShellScript(const string& cmd) { if (cmd.empty()) return true; if (HasPrefix(cmd, "echo $((") && cmd[cmd.size() - 1] == ')') return true; return false; } static void ShellFuncImpl(const string& shell, const string& shellflag, const string& cmd, const Loc& loc, string* s, FindCommand** fc) { LOG("ShellFunc: %s", cmd.c_str()); #ifdef TEST_FIND_EMULATOR bool need_check = false; string out2; #endif if (FindEmulator::Get()) { *fc = new FindCommand(); if ((*fc)->Parse(cmd)) { #ifdef TEST_FIND_EMULATOR if (FindEmulator::Get()->HandleFind(cmd, **fc, loc, &out2)) { need_check = true; } #else if (FindEmulator::Get()->HandleFind(cmd, **fc, loc, s)) { return; } #endif } delete *fc; *fc = NULL; } COLLECT_STATS_WITH_SLOW_REPORT("func shell time", cmd.c_str()); RunCommand(shell, shellflag, cmd, RedirectStderr::NONE, s); FormatForCommandSubstitution(s); #ifdef TEST_FIND_EMULATOR if (need_check) { if (*s != out2) { ERROR("FindEmulator is broken: %s\n%s\nvs\n%s", cmd.c_str(), s->c_str(), out2.c_str()); } } #endif } static vector g_command_results; bool ShouldStoreCommandResult(StringPiece cmd) { // We really just want to ignore this one, or remove BUILD_DATETIME from // Android completely if (cmd == "date +%s") return false; Pattern pat(g_flags.ignore_dirty_pattern); Pattern nopat(g_flags.no_ignore_dirty_pattern); for (StringPiece tok : WordScanner(cmd)) { if (pat.Match(tok) && !nopat.Match(tok)) { return false; } } return true; } void ShellFunc(const vector& args, Evaluator* ev, string* s) { string cmd = args[0]->Eval(ev); if (ev->avoid_io() && !HasNoIoInShellScript(cmd)) { if (ev->eval_depth() > 1) { ERROR_LOC(ev->loc(), "kati doesn't support passing results of $(shell) " "to other make constructs: %s", cmd.c_str()); } StripShellComment(&cmd); *s += "$("; *s += cmd; *s += ")"; return; } const string&& shell = ev->GetShell(); const string&& shellflag = ev->GetShellFlag(); string out; FindCommand* fc = NULL; ShellFuncImpl(shell, shellflag, cmd, ev->loc(), &out, &fc); if (ShouldStoreCommandResult(cmd)) { CommandResult* cr = new CommandResult(); cr->op = (fc == NULL) ? CommandOp::SHELL : CommandOp::FIND, cr->shell = shell; cr->shellflag = shellflag; cr->cmd = cmd; cr->find.reset(fc); cr->result = out; g_command_results.push_back(cr); } *s += out; } void CallFunc(const vector& args, Evaluator* ev, string* s) { static const Symbol tmpvar_names[] = { Intern("0"), Intern("1"), Intern("2"), Intern("3"), Intern("4"), Intern("5"), Intern("6"), Intern("7"), Intern("8"), Intern("9")}; ev->CheckStack(); const string&& func_name_buf = args[0]->Eval(ev); Symbol func_sym = Intern(TrimSpace(func_name_buf)); Var* func = ev->LookupVar(func_sym); func->Used(ev, func_sym); if (!func->IsDefined()) { KATI_WARN_LOC(ev->loc(), "*warning*: undefined user function: %s", func_sym.c_str()); } vector> av; for (size_t i = 1; i < args.size(); i++) { unique_ptr s( new SimpleVar(args[i]->Eval(ev), VarOrigin::AUTOMATIC)); av.push_back(move(s)); } vector> sv; for (size_t i = 1;; i++) { string s; Symbol tmpvar_name_sym; if (i < sizeof(tmpvar_names) / sizeof(tmpvar_names[0])) { tmpvar_name_sym = tmpvar_names[i]; } else { s = StringPrintf("%d", i); tmpvar_name_sym = Intern(s); } if (i < args.size()) { sv.emplace_back(new ScopedGlobalVar(tmpvar_name_sym, av[i - 1].get())); } else { // We need to blank further automatic vars Var* v = ev->LookupVar(tmpvar_name_sym); if (!v->IsDefined()) break; if (v->Origin() != VarOrigin::AUTOMATIC) break; av.emplace_back(new SimpleVar("", VarOrigin::AUTOMATIC)); sv.emplace_back(new ScopedGlobalVar(tmpvar_name_sym, av[i - 1].get())); } } ev->DecrementEvalDepth(); func->Eval(ev, s); ev->IncrementEvalDepth(); } void ForeachFunc(const vector& args, Evaluator* ev, string* s) { const string&& varname = args[0]->Eval(ev); const string&& list = args[1]->Eval(ev); ev->DecrementEvalDepth(); WordWriter ww(s); for (StringPiece tok : WordScanner(list)) { unique_ptr v( new SimpleVar(tok.as_string(), VarOrigin::AUTOMATIC)); ScopedGlobalVar sv(Intern(varname), v.get()); ww.MaybeAddWhitespace(); args[2]->Eval(ev, s); } ev->IncrementEvalDepth(); } void OriginFunc(const vector& args, Evaluator* ev, string* s) { const string&& var_name = args[0]->Eval(ev); Var* var = ev->LookupVar(Intern(var_name)); *s += GetOriginStr(var->Origin()); } void FlavorFunc(const vector& args, Evaluator* ev, string* s) { const string&& var_name = args[0]->Eval(ev); Var* var = ev->LookupVar(Intern(var_name)); *s += var->Flavor(); } void InfoFunc(const vector& args, Evaluator* ev, string*) { const string&& a = args[0]->Eval(ev); if (ev->avoid_io()) { ev->add_delayed_output_command( StringPrintf("echo -e \"%s\"", EchoEscape(a).c_str())); return; } printf("%s\n", a.c_str()); fflush(stdout); } void WarningFunc(const vector& args, Evaluator* ev, string*) { const string&& a = args[0]->Eval(ev); if (ev->avoid_io()) { ev->add_delayed_output_command(StringPrintf( "echo -e \"%s:%d: %s\" 2>&1", LOCF(ev->loc()), EchoEscape(a).c_str())); return; } WARN_LOC(ev->loc(), "%s", a.c_str()); } void ErrorFunc(const vector& args, Evaluator* ev, string*) { const string&& a = args[0]->Eval(ev); if (ev->avoid_io()) { ev->add_delayed_output_command( StringPrintf("echo -e \"%s:%d: *** %s.\" 2>&1 && false", LOCF(ev->loc()), EchoEscape(a).c_str())); return; } ev->Error(StringPrintf("*** %s.", a.c_str())); } static void FileReadFunc(Evaluator* ev, const string& filename, string* s) { int fd = open(filename.c_str(), O_RDONLY); if (fd < 0) { if (errno == ENOENT) { if (ShouldStoreCommandResult(filename)) { CommandResult* cr = new CommandResult(); cr->op = CommandOp::READ_MISSING; cr->cmd = filename; g_command_results.push_back(cr); } return; } else { ev->Error("*** open failed."); } } struct stat st; if (fstat(fd, &st) < 0) { ev->Error("*** fstat failed."); } size_t len = st.st_size; string out; out.resize(len); ssize_t r = HANDLE_EINTR(read(fd, &out[0], len)); if (r != static_cast(len)) { ev->Error("*** read failed."); } if (close(fd) < 0) { ev->Error("*** close failed."); } if (out.back() == '\n') { out.pop_back(); } if (ShouldStoreCommandResult(filename)) { CommandResult* cr = new CommandResult(); cr->op = CommandOp::READ; cr->cmd = filename; g_command_results.push_back(cr); } *s += out; } static void FileWriteFunc(Evaluator* ev, const string& filename, bool append, string text) { FILE* f = fopen(filename.c_str(), append ? "ab" : "wb"); if (f == NULL) { ev->Error("*** fopen failed."); } if (fwrite(&text[0], text.size(), 1, f) != 1) { ev->Error("*** fwrite failed."); } if (fclose(f) != 0) { ev->Error("*** fclose failed."); } if (ShouldStoreCommandResult(filename)) { CommandResult* cr = new CommandResult(); cr->op = CommandOp::WRITE; cr->cmd = filename; cr->result = text; g_command_results.push_back(cr); } } void FileFunc(const vector& args, Evaluator* ev, string* s) { if (ev->avoid_io()) { ev->Error("*** $(file ...) is not supported in rules."); } string arg = args[0]->Eval(ev); StringPiece filename = TrimSpace(arg); if (filename.size() <= 1) { ev->Error("*** Missing filename"); } if (filename[0] == '<') { filename = TrimLeftSpace(filename.substr(1)); if (!filename.size()) { ev->Error("*** Missing filename"); } if (args.size() > 1) { ev->Error("*** invalid argument"); } FileReadFunc(ev, filename.as_string(), s); } else if (filename[0] == '>') { bool append = false; if (filename[1] == '>') { append = true; filename = filename.substr(2); } else { filename = filename.substr(1); } filename = TrimLeftSpace(filename); if (!filename.size()) { ev->Error("*** Missing filename"); } string text; if (args.size() > 1) { text = args[1]->Eval(ev); if (text.size() == 0 || text.back() != '\n') { text.push_back('\n'); } } FileWriteFunc(ev, filename.as_string(), append, text); } else { ev->Error(StringPrintf("*** Invalid file operation: %s. Stop.", filename.as_string().c_str())); } } void DeprecatedVarFunc(const vector& args, Evaluator* ev, string*) { string vars_str = args[0]->Eval(ev); string msg; if (args.size() == 2) { msg = ". " + args[1]->Eval(ev); } if (ev->avoid_io()) { ev->Error("*** $(KATI_deprecated_var ...) is not supported in rules."); } for (StringPiece var : WordScanner(vars_str)) { Symbol sym = Intern(var); Var* v = ev->PeekVar(sym); if (!v->IsDefined()) { v = new SimpleVar(VarOrigin::FILE); sym.SetGlobalVar(v, false, nullptr); } if (v->Deprecated()) { ev->Error( StringPrintf("*** Cannot call KATI_deprecated_var on already " "deprecated variable: %s.", sym.c_str())); } else if (v->Obsolete()) { ev->Error( StringPrintf("*** Cannot call KATI_deprecated_var on already " "obsolete variable: %s.", sym.c_str())); } v->SetDeprecated(msg); } } void ObsoleteVarFunc(const vector& args, Evaluator* ev, string*) { string vars_str = args[0]->Eval(ev); string msg; if (args.size() == 2) { msg = ". " + args[1]->Eval(ev); } if (ev->avoid_io()) { ev->Error("*** $(KATI_obsolete_var ...) is not supported in rules."); } for (StringPiece var : WordScanner(vars_str)) { Symbol sym = Intern(var); Var* v = ev->PeekVar(sym); if (!v->IsDefined()) { v = new SimpleVar(VarOrigin::FILE); sym.SetGlobalVar(v, false, nullptr); } if (v->Deprecated()) { ev->Error( StringPrintf("*** Cannot call KATI_obsolete_var on already " "deprecated variable: %s.", sym.c_str())); } else if (v->Obsolete()) { ev->Error(StringPrintf( "*** Cannot call KATI_obsolete_var on already obsolete variable: %s.", sym.c_str())); } v->SetObsolete(msg); } } void DeprecateExportFunc(const vector& args, Evaluator* ev, string*) { string msg = ". " + args[0]->Eval(ev); if (ev->avoid_io()) { ev->Error("*** $(KATI_deprecate_export) is not supported in rules."); } if (ev->ExportObsolete()) { ev->Error("*** Export is already obsolete."); } else if (ev->ExportDeprecated()) { ev->Error("*** Export is already deprecated."); } ev->SetExportDeprecated(msg); } void ObsoleteExportFunc(const vector& args, Evaluator* ev, string*) { string msg = ". " + args[0]->Eval(ev); if (ev->avoid_io()) { ev->Error("*** $(KATI_obsolete_export) is not supported in rules."); } if (ev->ExportObsolete()) { ev->Error("*** Export is already obsolete."); } ev->SetExportObsolete(msg); } FuncInfo g_func_infos[] = { {"patsubst", &PatsubstFunc, 3, 3, false, false}, {"strip", &StripFunc, 1, 1, false, false}, {"subst", &SubstFunc, 3, 3, false, false}, {"findstring", &FindstringFunc, 2, 2, false, false}, {"filter", &FilterFunc, 2, 2, false, false}, {"filter-out", &FilterOutFunc, 2, 2, false, false}, {"sort", &SortFunc, 1, 1, false, false}, {"word", &WordFunc, 2, 2, false, false}, {"wordlist", &WordlistFunc, 3, 3, false, false}, {"words", &WordsFunc, 1, 1, false, false}, {"firstword", &FirstwordFunc, 1, 1, false, false}, {"lastword", &LastwordFunc, 1, 1, false, false}, {"join", &JoinFunc, 2, 2, false, false}, {"wildcard", &WildcardFunc, 1, 1, false, false}, {"dir", &DirFunc, 1, 1, false, false}, {"notdir", &NotdirFunc, 1, 1, false, false}, {"suffix", &SuffixFunc, 1, 1, false, false}, {"basename", &BasenameFunc, 1, 1, false, false}, {"addsuffix", &AddsuffixFunc, 2, 2, false, false}, {"addprefix", &AddprefixFunc, 2, 2, false, false}, {"realpath", &RealpathFunc, 1, 1, false, false}, {"abspath", &AbspathFunc, 1, 1, false, false}, {"if", &IfFunc, 3, 2, false, true}, {"and", &AndFunc, 0, 0, true, false}, {"or", &OrFunc, 0, 0, true, false}, {"value", &ValueFunc, 1, 1, false, false}, {"eval", &EvalFunc, 1, 1, false, false}, {"shell", &ShellFunc, 1, 1, false, false}, {"call", &CallFunc, 0, 0, false, false}, {"foreach", &ForeachFunc, 3, 3, false, false}, {"origin", &OriginFunc, 1, 1, false, false}, {"flavor", &FlavorFunc, 1, 1, false, false}, {"info", &InfoFunc, 1, 1, false, false}, {"warning", &WarningFunc, 1, 1, false, false}, {"error", &ErrorFunc, 1, 1, false, false}, {"file", &FileFunc, 2, 1, false, false}, /* Kati custom extension functions */ {"KATI_deprecated_var", &DeprecatedVarFunc, 2, 1, false, false}, {"KATI_obsolete_var", &ObsoleteVarFunc, 2, 1, false, false}, {"KATI_deprecate_export", &DeprecateExportFunc, 1, 1, false, false}, {"KATI_obsolete_export", &ObsoleteExportFunc, 1, 1, false, false}, }; unordered_map* g_func_info_map; } // namespace void InitFuncTable() { g_func_info_map = new unordered_map; for (size_t i = 0; i < sizeof(g_func_infos) / sizeof(g_func_infos[0]); i++) { FuncInfo* fi = &g_func_infos[i]; bool ok = g_func_info_map->emplace(fi->name, fi).second; CHECK(ok); } } void QuitFuncTable() { delete g_func_info_map; } FuncInfo* GetFuncInfo(StringPiece name) { auto found = g_func_info_map->find(name); if (found == g_func_info_map->end()) return NULL; return found->second; } const vector& GetShellCommandResults() { return g_command_results; } func.go0100644 0000000 0000000 00000101022 13654546140 011073 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "fmt" "io" "os" "os/exec" "path/filepath" "sort" "strconv" "strings" "time" "github.com/golang/glog" ) // mkFunc is a make function. // http://www.gnu.org/software/make/manual/make.html#Functions // mkFunc is make builtin function. type mkFunc interface { // Arity is max function's arity. // ',' will not be handled as argument separator more than arity. // 0 means varargs. Arity() int // AddArg adds value as an argument. // the first argument will be "(funcname", or "{funcname". AddArg(Value) Value } var ( funcMap = map[string]func() mkFunc{ "patsubst": func() mkFunc { return &funcPatsubst{} }, "strip": func() mkFunc { return &funcStrip{} }, "subst": func() mkFunc { return &funcSubst{} }, "findstring": func() mkFunc { return &funcFindstring{} }, "filter": func() mkFunc { return &funcFilter{} }, "filter-out": func() mkFunc { return &funcFilterOut{} }, "sort": func() mkFunc { return &funcSort{} }, "word": func() mkFunc { return &funcWord{} }, "wordlist": func() mkFunc { return &funcWordlist{} }, "words": func() mkFunc { return &funcWords{} }, "firstword": func() mkFunc { return &funcFirstword{} }, "lastword": func() mkFunc { return &funcLastword{} }, "join": func() mkFunc { return &funcJoin{} }, "wildcard": func() mkFunc { return &funcWildcard{} }, "dir": func() mkFunc { return &funcDir{} }, "notdir": func() mkFunc { return &funcNotdir{} }, "suffix": func() mkFunc { return &funcSuffix{} }, "basename": func() mkFunc { return &funcBasename{} }, "addsuffix": func() mkFunc { return &funcAddsuffix{} }, "addprefix": func() mkFunc { return &funcAddprefix{} }, "realpath": func() mkFunc { return &funcRealpath{} }, "abspath": func() mkFunc { return &funcAbspath{} }, "if": func() mkFunc { return &funcIf{} }, "and": func() mkFunc { return &funcAnd{} }, "or": func() mkFunc { return &funcOr{} }, "value": func() mkFunc { return &funcValue{} }, "eval": func() mkFunc { return &funcEval{} }, "shell": func() mkFunc { return &funcShell{} }, "call": func() mkFunc { return &funcCall{} }, "foreach": func() mkFunc { return &funcForeach{} }, "origin": func() mkFunc { return &funcOrigin{} }, "flavor": func() mkFunc { return &funcFlavor{} }, "info": func() mkFunc { return &funcInfo{} }, "warning": func() mkFunc { return &funcWarning{} }, "error": func() mkFunc { return &funcError{} }, } ) type arityError struct { narg int name string } func (e arityError) Error() string { return fmt.Sprintf("*** insufficient number of arguments (%d) to function `%s'.", e.narg, e.name) } func assertArity(name string, req, n int) error { if n-1 < req { return arityError{narg: n - 1, name: name} } return nil } func numericValueForFunc(v string) (int, bool) { n, err := strconv.Atoi(v) if err != nil || n < 0 { return n, false } return n, true } func formatCommandOutput(out []byte) []byte { out = bytes.TrimRight(out, "\n") out = bytes.Replace(out, []byte{'\n'}, []byte{' '}, -1) return out } type fclosure struct { // args[0] is "(funcname", or "{funcname". args []Value } func (c *fclosure) AddArg(v Value) { c.args = append(c.args, v) } func (c *fclosure) String() string { if len(c.args) == 0 { return "$(func)" } arg0 := c.args[0].String() if arg0 == "" { return "$(func )" } cp := closeParen(arg0[0]) if cp == 0 { return "${func }" } var args []string for _, arg := range c.args[1:] { args = append(args, arg.String()) } return fmt.Sprintf("$%s %s%c", arg0, strings.Join(args, ","), cp) } func (c *fclosure) serialize() serializableVar { r := serializableVar{Type: "func"} for _, a := range c.args { r.Children = append(r.Children, a.serialize()) } return r } func (c *fclosure) dump(d *dumpbuf) { d.Byte(valueTypeFunc) for _, a := range c.args { a.dump(d) } } // http://www.gnu.org/software/make/manual/make.html#Text-Functions type funcSubst struct{ fclosure } func (f *funcSubst) Arity() int { return 3 } func (f *funcSubst) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("subst", 3, len(f.args)) if err != nil { return err } abuf := newEbuf() fargs, err := ev.args(abuf, f.args[1:]...) if err != nil { return err } t := time.Now() from := fargs[0] to := fargs[1] text := fargs[2] glog.V(1).Infof("subst from:%q to:%q text:%q", from, to, text) if len(from) == 0 { w.Write(text) w.Write(to) } else { w.Write(bytes.Replace(text, from, to, -1)) } abuf.release() stats.add("funcbody", "subst", t) return nil } type funcPatsubst struct{ fclosure } func (f *funcPatsubst) Arity() int { return 3 } func (f *funcPatsubst) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("patsubst", 3, len(f.args)) if err != nil { return err } abuf := newEbuf() fargs, err := ev.args(abuf, f.args[1], f.args[2]) if err != nil { return err } wb := newWbuf() err = f.args[3].Eval(wb, ev) if err != nil { return err } t := time.Now() pat := fargs[0] repl := fargs[1] for _, word := range wb.words { pre, subst, post := substPatternBytes(pat, repl, word) var sword []byte sword = append(sword, pre...) if subst != nil { sword = append(sword, subst...) sword = append(sword, post...) } w.writeWord(sword) } abuf.release() wb.release() stats.add("funcbody", "patsubst", t) return nil } type funcStrip struct{ fclosure } func (f *funcStrip) Arity() int { return 1 } func (f *funcStrip) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("strip", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { w.writeWord(word) } wb.release() stats.add("funcbody", "strip", t) return nil } type funcFindstring struct{ fclosure } func (f *funcFindstring) Arity() int { return 2 } func (f *funcFindstring) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("findstring", 2, len(f.args)) if err != nil { return err } abuf := newEbuf() fargs, err := ev.args(abuf, f.args[1:]...) if err != nil { return err } t := time.Now() find := fargs[0] text := fargs[1] if bytes.Index(text, find) >= 0 { w.Write(find) } abuf.release() stats.add("funcbody", "findstring", t) return nil } type funcFilter struct{ fclosure } func (f *funcFilter) Arity() int { return 2 } func (f *funcFilter) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("filter", 2, len(f.args)) if err != nil { return err } patternsBuffer := newWbuf() err = f.args[1].Eval(patternsBuffer, ev) if err != nil { return err } textBuffer := newWbuf() err = f.args[2].Eval(textBuffer, ev) if err != nil { return err } t := time.Now() for _, text := range textBuffer.words { for _, pat := range patternsBuffer.words { if matchPatternBytes(pat, text) { w.writeWord(text) } } } patternsBuffer.release() textBuffer.release() stats.add("funcbody", "filter", t) return nil } type funcFilterOut struct{ fclosure } func (f *funcFilterOut) Arity() int { return 2 } func (f *funcFilterOut) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("filter-out", 2, len(f.args)) if err != nil { return err } patternsBuffer := newWbuf() err = f.args[1].Eval(patternsBuffer, ev) if err != nil { return err } textBuffer := newWbuf() err = f.args[2].Eval(textBuffer, ev) if err != nil { return err } t := time.Now() Loop: for _, text := range textBuffer.words { for _, pat := range patternsBuffer.words { if matchPatternBytes(pat, text) { continue Loop } } w.writeWord(text) } patternsBuffer.release() textBuffer.release() stats.add("funcbody", "filter-out", t) return err } type funcSort struct{ fclosure } func (f *funcSort) Arity() int { return 1 } func (f *funcSort) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("sort", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() var toks []string for _, tok := range wb.words { toks = append(toks, string(tok)) } wb.release() sort.Strings(toks) // Remove duplicate words. var prev string for _, tok := range toks { if prev == tok { continue } w.writeWordString(tok) prev = tok } stats.add("funcbody", "sort", t) return nil } type funcWord struct{ fclosure } func (f *funcWord) Arity() int { return 2 } func (f *funcWord) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("word", 2, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } v := string(trimSpaceBytes(abuf.Bytes())) abuf.release() index, ok := numericValueForFunc(v) if !ok { return ev.errorf(`*** non-numeric first argument to "word" function: %q.`, v) } if index == 0 { return ev.errorf(`*** first argument to "word" function must be greater than 0.`) } wb := newWbuf() err = f.args[2].Eval(wb, ev) if err != nil { return err } t := time.Now() if index-1 < len(wb.words) { w.writeWord(wb.words[index-1]) } wb.release() stats.add("funcbody", "word", t) return err } type funcWordlist struct{ fclosure } func (f *funcWordlist) Arity() int { return 3 } func (f *funcWordlist) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("wordlist", 3, len(f.args)) if err != nil { return err } abuf := newEbuf() fargs, err := ev.args(abuf, f.args[1], f.args[2]) if err != nil { return err } t := time.Now() v := string(trimSpaceBytes(fargs[0])) si, ok := numericValueForFunc(v) if !ok { return ev.errorf(`*** non-numeric first argument to "wordlist" function: %q.`, v) } if si == 0 { return ev.errorf(`*** invalid first argument to "wordlist" function: %s`, f.args[1]) } v = string(trimSpaceBytes(fargs[1])) ei, ok := numericValueForFunc(v) if !ok { return ev.errorf(`*** non-numeric second argument to "wordlist" function: %q.`, v) } abuf.release() wb := newWbuf() err = f.args[3].Eval(wb, ev) if err != nil { return err } for i, word := range wb.words { if si <= i+1 && i+1 <= ei { w.writeWord(word) } } wb.release() stats.add("funcbody", "wordlist", t) return nil } type funcWords struct{ fclosure } func (f *funcWords) Arity() int { return 1 } func (f *funcWords) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("words", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() n := len(wb.words) wb.release() w.writeWordString(strconv.Itoa(n)) stats.add("funcbody", "words", t) return nil } type funcFirstword struct{ fclosure } func (f *funcFirstword) Arity() int { return 1 } func (f *funcFirstword) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("firstword", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() if len(wb.words) > 0 { w.writeWord(wb.words[0]) } wb.release() stats.add("funcbody", "firstword", t) return nil } type funcLastword struct{ fclosure } func (f *funcLastword) Arity() int { return 1 } func (f *funcLastword) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("lastword", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() if len(wb.words) > 0 { w.writeWord(wb.words[len(wb.words)-1]) } wb.release() stats.add("funcbody", "lastword", t) return err } // https://www.gnu.org/software/make/manual/html_node/File-Name-Functions.html#File-Name-Functions type funcJoin struct{ fclosure } func (f *funcJoin) Arity() int { return 2 } func (f *funcJoin) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("join", 2, len(f.args)) if err != nil { return err } wb1 := newWbuf() err = f.args[1].Eval(wb1, ev) if err != nil { return err } wb2 := newWbuf() err = f.args[2].Eval(wb2, ev) if err != nil { return err } t := time.Now() for i := 0; i < len(wb1.words) || i < len(wb2.words); i++ { var word []byte if i < len(wb1.words) { word = append(word, wb1.words[i]...) } if i < len(wb2.words) { word = append(word, wb2.words[i]...) } w.writeWord(word) } wb1.release() wb2.release() stats.add("funcbody", "join", t) return nil } type funcWildcard struct{ fclosure } func (f *funcWildcard) Arity() int { return 1 } func (f *funcWildcard) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("wildcard", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } te := traceEvent.begin("wildcard", tmpval(wb.Bytes()), traceEventMain) // Note GNU make does not delay the execution of $(wildcard) so we // do not need to check avoid_io here. t := time.Now() for _, word := range wb.words { pat := string(word) err = wildcard(w, pat) if err != nil { return err } } wb.release() traceEvent.end(te) stats.add("funcbody", "wildcard", t) return nil } type funcDir struct{ fclosure } func (f *funcDir) Arity() int { return 1 } func (f *funcDir) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("dir", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { name := filepath.Dir(string(word)) if name == "/" { w.writeWordString(name) continue } w.writeWordString(name + string(filepath.Separator)) } wb.release() stats.add("funcbody", "dir", t) return nil } type funcNotdir struct{ fclosure } func (f *funcNotdir) Arity() int { return 1 } func (f *funcNotdir) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("notdir", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { name := string(word) if name == string(filepath.Separator) { w.writeWord([]byte{}) // separator continue } w.writeWordString(filepath.Base(name)) } wb.release() stats.add("funcbody", "notdir", t) return nil } type funcSuffix struct{ fclosure } func (f *funcSuffix) Arity() int { return 1 } func (f *funcSuffix) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("suffix", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { tok := string(word) e := filepath.Ext(tok) if len(e) > 0 { w.writeWordString(e) } } wb.release() stats.add("funcbody", "suffix", t) return err } type funcBasename struct{ fclosure } func (f *funcBasename) Arity() int { return 1 } func (f *funcBasename) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("basename", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { tok := string(word) e := stripExt(tok) w.writeWordString(e) } wb.release() stats.add("funcbody", "basename", t) return nil } type funcAddsuffix struct{ fclosure } func (f *funcAddsuffix) Arity() int { return 2 } func (f *funcAddsuffix) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("addsuffix", 2, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } wb := newWbuf() err = f.args[2].Eval(wb, ev) if err != nil { return err } t := time.Now() suf := abuf.Bytes() for _, word := range wb.words { var name []byte name = append(name, word...) name = append(name, suf...) w.writeWord(name) } wb.release() abuf.release() stats.add("funcbody", "addsuffix", t) return err } type funcAddprefix struct{ fclosure } func (f *funcAddprefix) Arity() int { return 2 } func (f *funcAddprefix) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("addprefix", 2, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } pre := abuf.Bytes() wb := newWbuf() err = f.args[2].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { var name []byte name = append(name, pre...) name = append(name, word...) w.writeWord(name) } wb.release() abuf.release() stats.add("funcbody", "addprefix", t) return err } type funcRealpath struct{ fclosure } func (f *funcRealpath) Arity() int { return 1 } func (f *funcRealpath) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("realpath", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } if ev.avoidIO { fmt.Fprintf(w, "$(realpath %s 2>/dev/null)", string(wb.Bytes())) ev.hasIO = true wb.release() return nil } t := time.Now() for _, word := range wb.words { name := string(word) name, err := filepath.Abs(name) if err != nil { glog.Warningf("abs %q: %v", name, err) continue } name, err = filepath.EvalSymlinks(name) if err != nil { glog.Warningf("realpath %q: %v", name, err) continue } w.writeWordString(name) } wb.release() stats.add("funcbody", "realpath", t) return err } type funcAbspath struct{ fclosure } func (f *funcAbspath) Arity() int { return 1 } func (f *funcAbspath) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("abspath", 1, len(f.args)) if err != nil { return err } wb := newWbuf() err = f.args[1].Eval(wb, ev) if err != nil { return err } t := time.Now() for _, word := range wb.words { name := string(word) name, err := filepath.Abs(name) if err != nil { glog.Warningf("abs %q: %v", name, err) continue } w.writeWordString(name) } wb.release() stats.add("funcbody", "abspath", t) return nil } // http://www.gnu.org/software/make/manual/make.html#Conditional-Functions type funcIf struct{ fclosure } func (f *funcIf) Arity() int { return 3 } func (f *funcIf) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("if", 2, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } if len(abuf.Bytes()) != 0 { abuf.release() return f.args[2].Eval(w, ev) } abuf.release() if len(f.args) > 3 { return f.args[3].Eval(w, ev) } return nil } type funcAnd struct{ fclosure } func (f *funcAnd) Arity() int { return 0 } func (f *funcAnd) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("and", 0, len(f.args)) if err != nil { return nil } abuf := newEbuf() var cond []byte for _, arg := range f.args[1:] { abuf.Reset() err = arg.Eval(abuf, ev) if err != nil { return err } cond = abuf.Bytes() if len(cond) == 0 { abuf.release() return nil } } w.Write(cond) abuf.release() return nil } type funcOr struct{ fclosure } func (f *funcOr) Arity() int { return 0 } func (f *funcOr) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("or", 0, len(f.args)) if err != nil { return err } abuf := newEbuf() for _, arg := range f.args[1:] { abuf.Reset() err = arg.Eval(abuf, ev) if err != nil { return err } cond := abuf.Bytes() if len(cond) != 0 { w.Write(cond) abuf.release() return nil } } abuf.release() return nil } // http://www.gnu.org/software/make/manual/make.html#Shell-Function type funcShell struct{ fclosure } func (f *funcShell) Arity() int { return 1 } // A hack for Android build. We need to evaluate things like $((3+4)) // when we emit ninja file, because the result of such expressions // will be passed to other make functions. // TODO: Maybe we should modify Android's Makefile and remove this // workaround. It would be also nice if we can detect things like // this. func hasNoIoInShellScript(s []byte) bool { if len(s) == 0 { return true } if !bytes.HasPrefix(s, []byte("echo $((")) || s[len(s)-1] != ')' { return false } glog.Infof("has no IO - evaluate now: %s", s) return true } func (f *funcShell) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("shell", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } if ev.avoidIO && !hasNoIoInShellScript(abuf.Bytes()) { te := traceEvent.begin("shell", tmpval(abuf.Bytes()), traceEventMain) ev.hasIO = true io.WriteString(w, "$(") w.Write(abuf.Bytes()) writeByte(w, ')') traceEvent.end(te) abuf.release() return nil } arg := abuf.String() abuf.release() if bc, err := parseBuiltinCommand(arg); err != nil { glog.V(1).Infof("sh builtin: %v", err) } else { glog.Info("use sh builtin:", arg) glog.V(2).Infof("builtin command: %#v", bc) te := traceEvent.begin("sh-builtin", literal(arg), traceEventMain) bc.run(w) traceEvent.end(te) return nil } shellVar, err := ev.EvaluateVar("SHELL") if err != nil { return err } cmdline := []string{shellVar, "-c", arg} if glog.V(1) { glog.Infof("shell %q", cmdline) } cmd := exec.Cmd{ Path: cmdline[0], Args: cmdline, Stderr: os.Stderr, } te := traceEvent.begin("shell", literal(arg), traceEventMain) out, err := cmd.Output() shellStats.add(time.Since(te.t)) if err != nil { glog.Warningf("$(shell %q) failed: %q", arg, err) } w.Write(formatCommandOutput(out)) traceEvent.end(te) return nil } func (f *funcShell) Compact() Value { if len(f.args)-1 < 1 { return f } if !UseShellBuiltins { return f } var exp expr switch v := f.args[1].(type) { case expr: exp = v default: exp = expr{v} } if UseShellBuiltins { // hack for android for _, sb := range shBuiltins { if v, ok := matchExpr(exp, sb.pattern); ok { glog.Infof("shell compact apply %s for %s", sb.name, exp) return sb.compact(f, v) } } glog.V(1).Infof("shell compact no match: %s", exp) } return f } // https://www.gnu.org/software/make/manual/html_node/Call-Function.html#Call-Function type funcCall struct{ fclosure } func (f *funcCall) Arity() int { return 0 } func (f *funcCall) Eval(w evalWriter, ev *Evaluator) error { abuf := newEbuf() fargs, err := ev.args(abuf, f.args[1:]...) if err != nil { return err } varname := fargs[0] variable := string(varname) te := traceEvent.begin("call", literal(variable), traceEventMain) if glog.V(1) { glog.Infof("call %q variable %q", f.args[1], variable) } v := ev.LookupVar(variable) // Evalualte all arguments first before we modify the table. // An omitted argument should be blank, even if it's nested inside // another call statement that did have that argument passed. // see testcases/nested_call.mk arglen := len(ev.paramVars) if arglen == 0 { arglen++ } if arglen < len(fargs[1:])+1 { arglen = len(fargs[1:]) + 1 } args := make([]tmpval, arglen) // $0 is variable. args[0] = tmpval(varname) // TODO(ukai): If variable is the name of a built-in function, // the built-in function is always invoked (even if a make variable // by that name also exists). for i, arg := range fargs[1:] { // f.args[2]=>args[1] will be $1. args[i+1] = tmpval(arg) if glog.V(1) { glog.Infof("call $%d: %q=>%q", i+1, arg, fargs[i+1]) } } oldParams := ev.paramVars ev.paramVars = args var buf bytes.Buffer if glog.V(1) { w = &ssvWriter{Writer: io.MultiWriter(w, &buf)} } err = v.Eval(w, ev) if err != nil { return err } ev.paramVars = oldParams traceEvent.end(te) if glog.V(1) { glog.Infof("call %q variable %q return %q", f.args[1], variable, buf.Bytes()) } abuf.release() return nil } // http://www.gnu.org/software/make/manual/make.html#Value-Function type funcValue struct{ fclosure } func (f *funcValue) Arity() int { return 1 } func (f *funcValue) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("value", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } v := ev.LookupVar(abuf.String()) abuf.release() io.WriteString(w, v.String()) return nil } // http://www.gnu.org/software/make/manual/make.html#Eval-Function type funcEval struct{ fclosure } func (f *funcEval) Arity() int { return 1 } func (f *funcEval) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("eval", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } s := abuf.Bytes() glog.V(1).Infof("eval %v=>%q at %s", f.args[1], s, ev.srcpos) mk, err := parseMakefileBytes(trimSpaceBytes(s), ev.srcpos) if err != nil { return ev.errorf("%v", err) } for _, stmt := range mk.stmts { err = ev.eval(stmt) if err != nil { return err } } abuf.release() return nil } func (f *funcEval) Compact() Value { if len(f.args)-1 < 1 { return f } switch arg := f.args[1].(type) { case literal, tmpval: case expr: if len(arg) == 1 { return f } switch prefix := arg[0].(type) { case literal, tmpval: lhs, op, rhsprefix, ok := parseAssignLiteral(prefix.String()) if ok { // $(eval foo = $(bar)) var rhs expr if rhsprefix != literal("") { rhs = append(rhs, rhsprefix) } rhs = append(rhs, arg[1:]...) glog.V(1).Infof("eval assign %#v => lhs:%q op:%q rhs:%#v", f, lhs, op, rhs) return &funcEvalAssign{ lhs: lhs, op: op, rhs: compactExpr(rhs), } } } // TODO(ukai): eval -> varassign. e.g $(eval $(foo) := $(x)). return f default: return f } arg := f.args[1].String() arg = stripComment(arg) if arg == "" || strings.TrimSpace(arg) == "" { return &funcNop{expr: f.String()} } f.args[1] = literal(arg) lhs, op, rhs, ok := parseAssignLiteral(f.args[1].String()) if ok { return &funcEvalAssign{ lhs: lhs, op: op, rhs: rhs, } } return f } func stripComment(arg string) string { for { i := strings.Index(arg, "#") if i < 0 { return arg } eol := strings.Index(arg[i:], "\n") if eol < 0 { return arg[:i] } arg = arg[:i] + arg[eol+1:] } } type funcNop struct{ expr string } func (f *funcNop) String() string { return f.expr } func (f *funcNop) Eval(evalWriter, *Evaluator) error { return nil } func (f *funcNop) serialize() serializableVar { return serializableVar{ Type: "funcNop", V: f.expr, } } func (f *funcNop) dump(d *dumpbuf) { d.Byte(valueTypeNop) } func parseAssignLiteral(s string) (lhs, op string, rhs Value, ok bool) { eq := strings.Index(s, "=") if eq < 0 { return "", "", nil, false } // TODO(ukai): factor out parse assign? lhs = s[:eq] op = s[eq : eq+1] if eq >= 1 && (s[eq-1] == ':' || s[eq-1] == '+' || s[eq-1] == '?') { lhs = s[:eq-1] op = s[eq-1 : eq+1] } lhs = strings.TrimSpace(lhs) if strings.IndexAny(lhs, ":$") >= 0 { // target specific var, or need eval. return "", "", nil, false } r := strings.TrimLeft(s[eq+1:], " \t") rhs = literal(r) return lhs, op, rhs, true } type funcEvalAssign struct { lhs string op string rhs Value } func (f *funcEvalAssign) String() string { return fmt.Sprintf("$(eval %s %s %s)", f.lhs, f.op, f.rhs) } func (f *funcEvalAssign) Eval(w evalWriter, ev *Evaluator) error { var abuf evalBuffer abuf.resetSep() err := f.rhs.Eval(&abuf, ev) if err != nil { return err } rhs := trimLeftSpaceBytes(abuf.Bytes()) glog.V(1).Infof("evalAssign: lhs=%q rhs=%s %q", f.lhs, f.rhs, rhs) var rvalue Var switch f.op { case ":=": // TODO(ukai): compute parsed expr in Compact when f.rhs is // literal? e.g. literal("$(foo)") => varref{literal("foo")}. exp, _, err := parseExpr(rhs, nil, parseOp{}) if err != nil { return ev.errorf("eval assign error: %q: %v", f.String(), err) } vbuf := newEbuf() err = exp.Eval(vbuf, ev) if err != nil { return err } rvalue = &simpleVar{value: []string{vbuf.String()}, origin: "file"} vbuf.release() case "=": rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"} case "+=": prev := ev.LookupVar(f.lhs) if prev.IsDefined() { rvalue, err = prev.Append(ev, string(rhs)) if err != nil { return err } } else { rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"} } case "?=": prev := ev.LookupVar(f.lhs) if prev.IsDefined() { return nil } rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"} } if glog.V(1) { glog.Infof("Eval ASSIGN: %s=%q (flavor:%q)", f.lhs, rvalue, rvalue.Flavor()) } ev.outVars.Assign(f.lhs, rvalue) return nil } func (f *funcEvalAssign) serialize() serializableVar { return serializableVar{ Type: "funcEvalAssign", Children: []serializableVar{ serializableVar{V: f.lhs}, serializableVar{V: f.op}, f.rhs.serialize(), }, } } func (f *funcEvalAssign) dump(d *dumpbuf) { d.Byte(valueTypeAssign) d.Str(f.lhs) d.Str(f.op) f.rhs.dump(d) } // http://www.gnu.org/software/make/manual/make.html#Origin-Function type funcOrigin struct{ fclosure } func (f *funcOrigin) Arity() int { return 1 } func (f *funcOrigin) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("origin", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } v := ev.LookupVar(abuf.String()) abuf.release() io.WriteString(w, v.Origin()) return nil } // https://www.gnu.org/software/make/manual/html_node/Flavor-Function.html#Flavor-Function type funcFlavor struct{ fclosure } func (f *funcFlavor) Arity() int { return 1 } func (f *funcFlavor) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("flavor", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } v := ev.LookupVar(abuf.String()) abuf.release() io.WriteString(w, v.Flavor()) return nil } // http://www.gnu.org/software/make/manual/make.html#Make-Control-Functions type funcInfo struct{ fclosure } func (f *funcInfo) Arity() int { return 1 } func (f *funcInfo) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("info", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } if ev.avoidIO { ev.delayedOutputs = append(ev.delayedOutputs, fmt.Sprintf("echo %q", abuf.String())) ev.hasIO = true abuf.release() return nil } fmt.Printf("%s\n", abuf.String()) abuf.release() return nil } type funcWarning struct{ fclosure } func (f *funcWarning) Arity() int { return 1 } func (f *funcWarning) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("warning", 1, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } if ev.avoidIO { ev.delayedOutputs = append(ev.delayedOutputs, fmt.Sprintf("echo '%s: %s' 1>&2", ev.srcpos, abuf.String())) ev.hasIO = true abuf.release() return nil } fmt.Printf("%s: %s\n", ev.srcpos, abuf.String()) abuf.release() return nil } type funcError struct{ fclosure } func (f *funcError) Arity() int { return 1 } func (f *funcError) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("error", 1, len(f.args)) if err != nil { return err } var abuf evalBuffer abuf.resetSep() err = f.args[1].Eval(&abuf, ev) if err != nil { return err } if ev.avoidIO { ev.delayedOutputs = append(ev.delayedOutputs, fmt.Sprintf("echo '%s: *** %s.' 1>&2 && false", ev.srcpos, abuf.String())) ev.hasIO = true abuf.release() return nil } return ev.errorf("*** %s.", abuf.String()) } // http://www.gnu.org/software/make/manual/make.html#Foreach-Function type funcForeach struct{ fclosure } func (f *funcForeach) Arity() int { return 3 } func (f *funcForeach) Eval(w evalWriter, ev *Evaluator) error { err := assertArity("foreach", 3, len(f.args)) if err != nil { return err } abuf := newEbuf() err = f.args[1].Eval(abuf, ev) if err != nil { return err } varname := string(abuf.Bytes()) abuf.release() wb := newWbuf() err = f.args[2].Eval(wb, ev) if err != nil { return err } text := f.args[3] ov := ev.LookupVar(varname) space := false for _, word := range wb.words { ev.outVars.Assign(varname, &automaticVar{value: word}) if space { writeByte(w, ' ') } err = text.Eval(w, ev) if err != nil { return err } space = true } wb.release() av := ev.LookupVar(varname) if _, ok := av.(*automaticVar); ok { ev.outVars.Assign(varname, ov) } return nil } func.h0100644 0000000 0000000 00000002573 13654546140 010730 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef FUNC_H_ #define FUNC_H_ #include #include #include #include "expr.h" using namespace std; struct FuncInfo { const char* name; void (*func)(const vector& args, Evaluator* ev, string* s); int arity; int min_arity; // For all parameters. bool trim_space; // Only for the first parameter. bool trim_right_space_1st; }; void InitFuncTable(); void QuitFuncTable(); FuncInfo* GetFuncInfo(StringPiece name); struct FindCommand; enum struct CommandOp { SHELL, FIND, READ, READ_MISSING, WRITE, APPEND, }; struct CommandResult { CommandOp op; string shell; string shellflag; string cmd; unique_ptr find; string result; }; const vector& GetShellCommandResults(); #endif // FUNC_H_ func_test.go0100644 0000000 0000000 00000003254 13654546140 012142 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import "testing" func BenchmarkFuncStrip(b *testing.B) { strip := &funcStrip{ fclosure: fclosure{ args: []Value{ literal("(strip"), literal("a b c "), }, }, } ev := NewEvaluator(make(map[string]Var)) var buf evalBuffer b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { buf.Reset() strip.Eval(&buf, ev) } } func BenchmarkFuncSort(b *testing.B) { sort := &funcSort{ fclosure: fclosure{ args: []Value{ literal("(sort"), literal("foo bar lose"), }, }, } ev := NewEvaluator(make(map[string]Var)) var buf evalBuffer b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { buf.Reset() sort.Eval(&buf, ev) } } func BenchmarkFuncPatsubst(b *testing.B) { patsubst := &funcPatsubst{ fclosure: fclosure{ args: []Value{ literal("(patsubst"), literal("%.java"), literal("%.class"), literal("foo.jar bar.java baz.h"), }, }, } ev := NewEvaluator(make(map[string]Var)) var buf evalBuffer b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { buf.Reset() patsubst.Eval(&buf, ev) } } io.cc0100644 0000000 0000000 00000002330 13654546140 010531 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "io.h" #include "log.h" void DumpInt(FILE* fp, int v) { size_t r = fwrite(&v, sizeof(v), 1, fp); CHECK(r == 1); } void DumpString(FILE* fp, StringPiece s) { DumpInt(fp, s.size()); size_t r = fwrite(s.data(), 1, s.size(), fp); CHECK(r == s.size()); } int LoadInt(FILE* fp) { int v; size_t r = fread(&v, sizeof(v), 1, fp); if (r != 1) return -1; return v; } bool LoadString(FILE* fp, string* s) { int len = LoadInt(fp); if (len < 0) return false; s->resize(len); size_t r = fread(&(*s)[0], 1, s->size(), fp); if (r != s->size()) return false; return true; } io.h0100644 0000000 0000000 00000002015 13654546140 010373 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef IO_H_ #define IO_H_ #include #include #include "string_piece.h" using namespace std; void DumpInt(FILE* fp, int v); void DumpString(FILE* fp, StringPiece s); int LoadInt(FILE* fp); bool LoadString(FILE* fp, string* s); struct ScopedFile { public: explicit ScopedFile(FILE* fp) : fp_(fp) {} ~ScopedFile() { if (fp_) fclose(fp_); } private: FILE* fp_; }; #endif // IO_H_ loc.h0100644 0000000 0000000 00000001600 13654546140 010540 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef LOC_H_ #define LOC_H_ #include #include "stringprintf.h" struct Loc { Loc() : filename(0), lineno(-1) {} Loc(const char* f, int l) : filename(f), lineno(l) {} const char* filename; int lineno; }; #define LOCF(x) (x).filename, (x).lineno #endif // LOC_H_ log.cc0100644 0000000 0000000 00000003270 13654546140 010707 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "log.h" #include "flags.h" #include "strutil.h" #define BOLD "\033[1m" #define RESET "\033[0m" #define MAGENTA "\033[35m" #define RED "\033[31m" void ColorErrorLog(const char* file, int line, const char* msg) { if (file == nullptr) { ERROR("%s", msg); return; } if (g_flags.color_warnings) { StringPiece filtered = TrimPrefix(msg, "*** "); ERROR(BOLD "%s:%d: " RED "error: " RESET BOLD "%s" RESET, file, line, filtered.as_string().c_str()); } else { ERROR("%s:%d: %s", file, line, msg); } } void ColorWarnLog(const char* file, int line, const char* msg) { if (file == nullptr) { fprintf(stderr, "%s\n", msg); return; } if (g_flags.color_warnings) { StringPiece filtered = TrimPrefix(msg, "*warning*: "); filtered = TrimPrefix(filtered, "warning: "); fprintf(stderr, BOLD "%s:%d: " MAGENTA "warning: " RESET BOLD "%s" RESET "\n", file, line, filtered.as_string().c_str()); } else { fprintf(stderr, "%s:%d: %s\n", file, line, msg); } } bool g_log_no_exit; string* g_last_error; log.go0100644 0000000 0000000 00000002012 13654546140 010720 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "github.com/golang/glog" ) func logStats(f string, a ...interface{}) { // TODO(ukai): vmodule? if !StatsFlag { return } glog.Infof(f, a...) } func warn(loc srcpos, f string, a ...interface{}) { f = fmt.Sprintf("%s: warning: %s\n", loc, f) fmt.Printf(f, a...) } func warnNoPrefix(loc srcpos, f string, a ...interface{}) { f = fmt.Sprintf("%s: %s\n", loc, f) fmt.Printf(f, a...) } log.h0100644 0000000 0000000 00000007602 13654546140 010554 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef LOG_H_ #define LOG_H_ #include #include #include #include #include "flags.h" #include "log.h" #include "stringprintf.h" using namespace std; extern bool g_log_no_exit; extern string* g_last_error; // Useful for logging-only arguments. #define UNUSED __attribute__((unused)) #ifdef NOLOG #define LOG(args...) #else #define LOG(args...) \ do { \ fprintf(stderr, "*kati*: %s\n", StringPrintf(args).c_str()); \ } while (0) #endif #define LOG_STAT(args...) \ do { \ if (g_flags.enable_stat_logs) \ fprintf(stderr, "*kati*: %s\n", StringPrintf(args).c_str()); \ } while (0) #define PLOG(...) \ do { \ fprintf(stderr, "%s: %s\n", StringPrintf(__VA_ARGS__).c_str(), \ strerror(errno)); \ } while (0) #define PERROR(...) \ do { \ PLOG(__VA_ARGS__); \ exit(1); \ } while (0) #define WARN(...) \ do { \ fprintf(stderr, "%s\n", StringPrintf(__VA_ARGS__).c_str()); \ } while (0) #define KATI_WARN(...) \ do { \ if (g_flags.enable_kati_warnings) \ fprintf(stderr, "%s\n", StringPrintf(__VA_ARGS__).c_str()); \ } while (0) #define ERROR(...) \ do { \ if (!g_log_no_exit) { \ fprintf(stderr, "%s\n", StringPrintf(__VA_ARGS__).c_str()); \ exit(1); \ } \ g_last_error = new string(StringPrintf(__VA_ARGS__)); \ } while (0) #define CHECK(c) \ if (!(c)) \ ERROR("%s:%d: %s", __FILE__, __LINE__, #c) // Set of logging functions that will automatically colorize lines that have // location information when --color_warnings is set. void ColorWarnLog(const char* file, int line, const char* msg); void ColorErrorLog(const char* file, int line, const char* msg); #define WARN_LOC(loc, ...) \ do { \ ColorWarnLog(LOCF(loc), StringPrintf(__VA_ARGS__).c_str()); \ } while (0) #define KATI_WARN_LOC(loc, ...) \ do { \ if (g_flags.enable_kati_warnings) \ ColorWarnLog(LOCF(loc), StringPrintf(__VA_ARGS__).c_str()); \ } while (0) #define ERROR_LOC(loc, ...) \ do { \ ColorErrorLog(LOCF(loc), StringPrintf(__VA_ARGS__).c_str()); \ } while (0) #endif // LOG_H_ m2n0100755 0000000 0000000 00000005606 13654546140 010246 0ustar000000000 0000000 #!/bin/bash # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # A wrapper for kati which generates build.ninja mainly for Android. # set -e kati_dir=$(cd $(dirname $0) && pwd) extra_flags= goma_flag= goma_dir=${GOMA_DIR:-$HOME/goma} while [ x"$1" != x"" ]; do case "$1" in --help) cat - < #include #include #include #include #include #include #include "affinity.h" #include "dep.h" #include "eval.h" #include "exec.h" #include "file.h" #include "file_cache.h" #include "fileutil.h" #include "find.h" #include "flags.h" #include "func.h" #include "log.h" #include "ninja.h" #include "parser.h" #include "regen.h" #include "stats.h" #include "stmt.h" #include "string_piece.h" #include "stringprintf.h" #include "strutil.h" #include "symtab.h" #include "timeutil.h" #include "var.h" // We know that there are leaks in Kati. Turn off LeakSanitizer by default. extern "C" const char* __asan_default_options() { return "detect_leaks=0:allow_user_segv_handler=1"; } static void Init() { InitSymtab(); InitFuncTable(); InitDepNodePool(); InitParser(); } static void Quit() { ReportAllStats(); QuitParser(); QuitDepNodePool(); QuitFuncTable(); QuitSymtab(); } static void ReadBootstrapMakefile(const vector& targets, vector* stmts) { string bootstrap = ("CC?=cc\n" #if defined(__APPLE__) "CXX?=c++\n" #else "CXX?=g++\n" #endif "AR?=ar\n" // Pretend to be GNU make 3.81, for compatibility. "MAKE_VERSION?=3.81\n" "KATI?=ckati\n" // Overwrite $SHELL environment variable. "SHELL=/bin/sh\n" // TODO: Add more builtin vars. ); if (!g_flags.no_builtin_rules) { bootstrap += ( // http://www.gnu.org/software/make/manual/make.html#Catalogue-of-Rules // The document above is actually not correct. See default.c: // http://git.savannah.gnu.org/cgit/make.git/tree/default.c?id=4.1 ".c.o:\n" "\t$(CC) $(CFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c -o $@ $<\n" ".cc.o:\n" "\t$(CXX) $(CXXFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c -o $@ $<\n" // TODO: Add more builtin rules. ); } if (g_flags.generate_ninja) { bootstrap += StringPrintf("MAKE?=make -j%d\n", g_flags.num_jobs <= 1 ? 1 : g_flags.num_jobs / 2); } else { bootstrap += StringPrintf("MAKE?=%s\n", JoinStrings(g_flags.subkati_args, " ").c_str()); } bootstrap += StringPrintf("MAKECMDGOALS?=%s\n", JoinSymbols(targets, " ").c_str()); char cwd[PATH_MAX]; if (!getcwd(cwd, PATH_MAX)) { fprintf(stderr, "getcwd failed\n"); CHECK(false); } bootstrap += StringPrintf("CURDIR:=%s\n", cwd); Parse(Intern(bootstrap).str(), Loc("*bootstrap*", 0), stmts); } static void SetVar(StringPiece l, VarOrigin origin) { size_t found = l.find('='); CHECK(found != string::npos); Symbol lhs = Intern(l.substr(0, found)); StringPiece rhs = l.substr(found + 1); lhs.SetGlobalVar( new RecursiveVar(Value::NewLiteral(rhs.data()), origin, rhs.data())); } extern "C" char** environ; class SegfaultHandler { public: explicit SegfaultHandler(Evaluator* ev); ~SegfaultHandler(); void handle(int, siginfo_t*, void*); private: static SegfaultHandler* global_handler; void dumpstr(const char* s) const { (void)write(STDERR_FILENO, s, strlen(s)); } void dumpint(int i) const { char buf[11]; char* ptr = buf + sizeof(buf) - 1; if (i < 0) { i = -i; dumpstr("-"); } else if (i == 0) { dumpstr("0"); return; } *ptr = '\0'; while (ptr > buf && i > 0) { *--ptr = '0' + (i % 10); i = i / 10; } dumpstr(ptr); } Evaluator* ev_; struct sigaction orig_action_; struct sigaction new_action_; }; SegfaultHandler* SegfaultHandler::global_handler = nullptr; SegfaultHandler::SegfaultHandler(Evaluator* ev) : ev_(ev) { CHECK(global_handler == nullptr); global_handler = this; // Construct an alternate stack, so that we can handle stack overflows. stack_t ss; ss.ss_sp = malloc(SIGSTKSZ * 2); CHECK(ss.ss_sp != nullptr); ss.ss_size = SIGSTKSZ * 2; ss.ss_flags = 0; if (sigaltstack(&ss, nullptr) == -1) { PERROR("sigaltstack"); } // Register our segfault handler using the alternate stack, falling // back to the default handler. sigemptyset(&new_action_.sa_mask); new_action_.sa_flags = SA_ONSTACK | SA_SIGINFO | SA_RESETHAND; new_action_.sa_sigaction = [](int sig, siginfo_t* info, void* context) { if (global_handler != nullptr) { global_handler->handle(sig, info, context); } raise(SIGSEGV); }; sigaction(SIGSEGV, &new_action_, &orig_action_); } void SegfaultHandler::handle(int sig, siginfo_t* info, void* context) { // Avoid fprintf in case it allocates or tries to do anything else that may // hang. dumpstr("*kati*: Segmentation fault, last evaluated line was "); dumpstr(ev_->loc().filename); dumpstr(":"); dumpint(ev_->loc().lineno); dumpstr("\n"); // Run the original handler, in case we've been preloaded with libSegFault // or similar. if (orig_action_.sa_sigaction != nullptr) { orig_action_.sa_sigaction(sig, info, context); } } SegfaultHandler::~SegfaultHandler() { sigaction(SIGSEGV, &orig_action_, nullptr); global_handler = nullptr; } static int Run(const vector& targets, const vector& cl_vars, const string& orig_args) { double start_time = GetTime(); if (g_flags.generate_ninja && (g_flags.regen || g_flags.dump_kati_stamp)) { ScopedTimeReporter tr("regen check time"); if (!NeedsRegen(start_time, orig_args)) { fprintf(stderr, "No need to regenerate ninja file\n"); return 0; } if (g_flags.dump_kati_stamp) { printf("Need to regenerate ninja file\n"); return 0; } ClearGlobCache(); } SetAffinityForSingleThread(); MakefileCacheManager* cache_mgr = NewMakefileCacheManager(); Intern("MAKEFILE_LIST") .SetGlobalVar(new SimpleVar(StringPrintf(" %s", g_flags.makefile), VarOrigin::FILE)); for (char** p = environ; *p; p++) { SetVar(*p, VarOrigin::ENVIRONMENT); } unique_ptr ev(new Evaluator()); SegfaultHandler segfault(ev.get()); vector bootstrap_asts; ReadBootstrapMakefile(targets, &bootstrap_asts); ev->set_is_bootstrap(true); for (Stmt* stmt : bootstrap_asts) { LOG("%s", stmt->DebugString().c_str()); stmt->Eval(ev.get()); } ev->set_is_bootstrap(false); ev->set_is_commandline(true); for (StringPiece l : cl_vars) { vector asts; Parse(Intern(l).str(), Loc("*bootstrap*", 0), &asts); CHECK(asts.size() == 1); asts[0]->Eval(ev.get()); } ev->set_is_commandline(false); { ScopedTimeReporter tr("eval time"); Makefile* mk = cache_mgr->ReadMakefile(g_flags.makefile); for (Stmt* stmt : mk->stmts()) { LOG("%s", stmt->DebugString().c_str()); stmt->Eval(ev.get()); } } for (ParseErrorStmt* err : GetParseErrors()) { WARN_LOC(err->loc(), "warning for parse error in an unevaluated line: %s", err->msg.c_str()); } vector nodes; { ScopedTimeReporter tr("make dep time"); MakeDep(ev.get(), ev->rules(), ev->rule_vars(), targets, &nodes); } if (g_flags.is_syntax_check_only) return 0; if (g_flags.generate_ninja) { ScopedTimeReporter tr("generate ninja time"); GenerateNinja(nodes, ev.get(), orig_args, start_time); ev->DumpStackStats(); return 0; } for (const auto& p : ev->exports()) { const Symbol name = p.first; if (p.second) { Var* v = ev->LookupVar(name); const string&& value = v->Eval(ev.get()); LOG("setenv(%s, %s)", name.c_str(), value.c_str()); setenv(name.c_str(), value.c_str(), 1); } else { LOG("unsetenv(%s)", name.c_str()); unsetenv(name.c_str()); } } { ScopedTimeReporter tr("exec time"); Exec(nodes, ev.get()); } ev->DumpStackStats(); for (Stmt* stmt : bootstrap_asts) delete stmt; delete cache_mgr; return 0; } static void FindFirstMakefie() { if (g_flags.makefile != NULL) return; if (Exists("GNUmakefile")) { g_flags.makefile = "GNUmakefile"; #if !defined(__APPLE__) } else if (Exists("makefile")) { g_flags.makefile = "makefile"; #endif } else if (Exists("Makefile")) { g_flags.makefile = "Makefile"; } } static void HandleRealpath(int argc, char** argv) { char buf[PATH_MAX]; for (int i = 0; i < argc; i++) { if (realpath(argv[i], buf)) printf("%s\n", buf); } } int main(int argc, char* argv[]) { if (argc >= 2 && !strcmp(argv[1], "--realpath")) { HandleRealpath(argc - 2, argv + 2); return 0; } Init(); string orig_args; for (int i = 0; i < argc; i++) { if (i) orig_args += ' '; orig_args += argv[i]; } g_flags.Parse(argc, argv); FindFirstMakefie(); if (g_flags.makefile == NULL) ERROR("*** No targets specified and no makefile found."); // This depends on command line flags. if (g_flags.use_find_emulator) InitFindEmulator(); int r = Run(g_flags.targets, g_flags.cl_vars, orig_args); Quit(); return r; } make-c.sh0100755 0000000 0000000 00000001404 13654546140 011310 0ustar000000000 0000000 #!/bin/sh # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # simple tool to measure time to parse Makefiles in android. # # usage: # $ ./repo/android.sh make-c.sh dir=$(cd $(dirname $0); pwd) go run ${dir}/make-c/main.go make-c/0040755 0000000 0000000 00000000000 13654546140 010755 5ustar000000000 0000000 make-c/main.go0100644 0000000 0000000 00000002477 13654546140 012237 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // make-c is simple program to measure time to parse Makefiles in android. package main import ( "bufio" "bytes" "fmt" "os/exec" "time" ) func main() { parseDone := make(chan bool) cmd := exec.Command("make", "-n") r, err := cmd.StdoutPipe() if err != nil { panic(err) } t := time.Now() go func() { s := bufio.NewScanner(r) for s.Scan() { if bytes.HasPrefix(s.Bytes(), []byte("echo ")) { parseDone <- true return } fmt.Println(s.Text()) } if err := s.Err(); err != nil { panic(err) } panic("unexpected end of make?") }() err = cmd.Start() if err != nil { panic(err) } select { case <-parseDone: fmt.Printf("make -c: %v\n", time.Since(t)) } cmd.Process.Kill() cmd.Wait() } ninja.cc0100644 0000000 0000000 00000054073 13654546140 011234 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "ninja.h" #include #include #include #include #include #include #include #include #include #include "command.h" #include "dep.h" #include "eval.h" #include "file_cache.h" #include "fileutil.h" #include "find.h" #include "flags.h" #include "func.h" #include "io.h" #include "log.h" #include "stats.h" #include "string_piece.h" #include "stringprintf.h" #include "strutil.h" #include "thread_pool.h" #include "timeutil.h" #include "var.h" #include "version.h" static size_t FindCommandLineFlag(StringPiece cmd, StringPiece name) { const size_t found = cmd.find(name); if (found == string::npos || found == 0) return string::npos; return found; } static StringPiece FindCommandLineFlagWithArg(StringPiece cmd, StringPiece name) { size_t index = FindCommandLineFlag(cmd, name); if (index == string::npos) return StringPiece(); StringPiece val = TrimLeftSpace(cmd.substr(index + name.size())); index = val.find(name); while (index != string::npos) { val = TrimLeftSpace(val.substr(index + name.size())); index = val.find(name); } index = val.find_first_of(" \t"); return val.substr(0, index); } static bool StripPrefix(StringPiece p, StringPiece* s) { if (!HasPrefix(*s, p)) return false; *s = s->substr(p.size()); return true; } size_t GetGomaccPosForAndroidCompileCommand(StringPiece cmdline) { size_t index = cmdline.find(' '); if (index == string::npos) return string::npos; StringPiece cmd = cmdline.substr(0, index); if (HasSuffix(cmd, "ccache")) { index++; size_t pos = GetGomaccPosForAndroidCompileCommand(cmdline.substr(index)); return pos == string::npos ? string::npos : pos + index; } if (!StripPrefix("prebuilts/", &cmd)) return string::npos; if (!StripPrefix("gcc/", &cmd) && !StripPrefix("clang/", &cmd)) return string::npos; if (!HasSuffix(cmd, "gcc") && !HasSuffix(cmd, "g++") && !HasSuffix(cmd, "clang") && !HasSuffix(cmd, "clang++")) { return string::npos; } StringPiece rest = cmdline.substr(index); return rest.find(" -c ") != string::npos ? 0 : string::npos; } static bool GetDepfileFromCommandImpl(StringPiece cmd, string* out) { if ((FindCommandLineFlag(cmd, " -MD") == string::npos && FindCommandLineFlag(cmd, " -MMD") == string::npos) || FindCommandLineFlag(cmd, " -c") == string::npos) { return false; } StringPiece mf = FindCommandLineFlagWithArg(cmd, " -MF"); if (!mf.empty()) { mf.AppendToString(out); return true; } StringPiece o = FindCommandLineFlagWithArg(cmd, " -o"); if (o.empty()) { ERROR("Cannot find the depfile in %s", cmd.as_string().c_str()); return false; } StripExt(o).AppendToString(out); *out += ".d"; return true; } bool GetDepfileFromCommand(string* cmd, string* out) { CHECK(!cmd->empty()); if (!GetDepfileFromCommandImpl(*cmd, out)) return false; // A hack for Android - llvm-rs-cc seems not to emit a dep file. if (cmd->find("bin/llvm-rs-cc ") != string::npos) { return false; } // TODO: A hack for Makefiles generated by automake. // A hack for Android to get .P files instead of .d. string p; StripExt(*out).AppendToString(&p); p += ".P"; if (cmd->find(p) != string::npos) { const string rm_f = "; rm -f " + *out; const size_t found = cmd->find(rm_f); if (found == string::npos) { ERROR("Cannot find removal of .d file: %s", cmd->c_str()); } cmd->erase(found, rm_f.size()); return true; } // A hack for Android. For .s files, GCC does not use C // preprocessor, so it ignores -MF flag. string as = "/"; StripExt(Basename(*out)).AppendToString(&as); as += ".s"; if (cmd->find(as) != string::npos) { return false; } *cmd += "&& cp "; *cmd += *out; *cmd += ' '; *cmd += *out; *cmd += ".tmp "; *out += ".tmp"; return true; } struct NinjaNode { const DepNode* node; vector commands; int rule_id; }; class NinjaGenerator { public: NinjaGenerator(Evaluator* ev, double start_time) : ce_(ev), ev_(ev), fp_(NULL), rule_id_(0), start_time_(start_time), default_target_(NULL) { ev_->set_avoid_io(true); shell_ = EscapeNinja(ev->GetShell()); shell_flags_ = EscapeNinja(ev->GetShellFlag()); const string use_goma_str = ev->EvalVar(Intern("USE_GOMA")); use_goma_ = !(use_goma_str.empty() || use_goma_str == "false"); if (g_flags.goma_dir) gomacc_ = StringPrintf("%s/gomacc ", g_flags.goma_dir); GetExecutablePath(&kati_binary_); } ~NinjaGenerator() { ev_->set_avoid_io(false); for (NinjaNode* nn : nodes_) delete nn; } void Generate(const vector& nodes, const string& orig_args) { unlink(GetNinjaStampFilename().c_str()); PopulateNinjaNodes(nodes); GenerateNinja(); GenerateShell(); GenerateStamp(orig_args); } static string GetStampTempFilename() { return GetFilename(".kati_stamp%s.tmp"); } static string GetFilename(const char* fmt) { string r = g_flags.ninja_dir ? g_flags.ninja_dir : "."; r += '/'; r += StringPrintf(fmt, g_flags.ninja_suffix ? g_flags.ninja_suffix : ""); return r; } private: void PopulateNinjaNodes(const vector& nodes) { ScopedTimeReporter tr("ninja gen (eval)"); for (auto const& node : nodes) { PopulateNinjaNode(node.second); } } void PopulateNinjaNode(DepNode* node) { if (done_.exists(node->output)) { return; } done_.insert(node->output); // A hack to exclude out phony target in Android. If this exists, // "ninja -t clean" tries to remove this directory and fails. if (g_flags.detect_android_echo && node->output.str() == "out") return; // This node is a leaf node if (!node->has_rule && !node->is_phony) { return; } NinjaNode* nn = new NinjaNode; nn->node = node; ce_.Eval(node, &nn->commands); nn->rule_id = nn->commands.empty() ? -1 : rule_id_++; nodes_.push_back(nn); for (auto const& d : node->deps) { PopulateNinjaNode(d.second); } for (auto const& d : node->order_onlys) { PopulateNinjaNode(d.second); } } StringPiece TranslateCommand(const char* in, string* cmd_buf) { const size_t orig_size = cmd_buf->size(); bool prev_backslash = false; // Set space as an initial value so the leading comment will be // stripped out. char prev_char = ' '; char quote = 0; for (; *in; in++) { switch (*in) { case '#': if (quote == 0 && isspace(prev_char)) { while (in[1] && *in != '\n') in++; } else { *cmd_buf += *in; } break; case '\'': case '"': case '`': if (quote) { if (quote == *in) quote = 0; } else if (!prev_backslash) { quote = *in; } *cmd_buf += *in; break; case '$': *cmd_buf += "$$"; break; case '\n': if (prev_backslash) { cmd_buf->resize(cmd_buf->size() - 1); } else { *cmd_buf += ' '; } break; case '\\': *cmd_buf += '\\'; break; default: *cmd_buf += *in; } if (*in == '\\') { prev_backslash = !prev_backslash; } else { prev_backslash = false; } prev_char = *in; } if (prev_backslash) { cmd_buf->resize(cmd_buf->size() - 1); } while (true) { char c = (*cmd_buf)[cmd_buf->size() - 1]; if (!isspace(c) && c != ';') break; cmd_buf->resize(cmd_buf->size() - 1); } return StringPiece(cmd_buf->data() + orig_size, cmd_buf->size() - orig_size); } bool IsOutputMkdir(const char* name, StringPiece cmd) { if (!HasPrefix(cmd, "mkdir -p ")) { return false; } cmd = cmd.substr(9, cmd.size()); if (cmd.get(cmd.size() - 1) == '/') { cmd = cmd.substr(0, cmd.size() - 1); } StringPiece dir = Dirname(name); if (cmd == dir) { return true; } return false; } bool GetDescriptionFromCommand(StringPiece cmd, string* out) { if (!HasPrefix(cmd, "echo ")) { return false; } cmd = cmd.substr(5, cmd.size()); bool prev_backslash = false; char quote = 0; string out_buf; // Strip outer quotes, and fail if it is not a single echo command for (StringPiece::iterator in = cmd.begin(); in != cmd.end(); in++) { if (prev_backslash) { prev_backslash = false; out_buf += *in; } else if (*in == '\\') { prev_backslash = true; out_buf += *in; } else if (quote) { if (*in == quote) { quote = 0; } else { out_buf += *in; } } else { switch (*in) { case '\'': case '"': case '`': quote = *in; break; case '<': case '>': case '&': case '|': case ';': return false; default: out_buf += *in; } } } *out = out_buf; return true; } bool GenShellScript(const char* name, const vector& commands, string* cmd_buf, string* description) { bool got_descritpion = false; bool use_gomacc = false; auto command_count = commands.size(); for (const Command* c : commands) { size_t cmd_begin = cmd_buf->size(); if (!cmd_buf->empty()) { *cmd_buf += " && "; } const char* in = c->cmd.c_str(); while (isspace(*in)) in++; bool needs_subshell = (command_count > 1 || c->ignore_error); if (needs_subshell) *cmd_buf += '('; size_t cmd_start = cmd_buf->size(); StringPiece translated = TranslateCommand(in, cmd_buf); if (g_flags.detect_android_echo && !got_descritpion && !c->echo && GetDescriptionFromCommand(translated, description)) { got_descritpion = true; translated.clear(); } else if (IsOutputMkdir(name, translated) && !c->echo && cmd_begin == 0) { translated.clear(); } if (translated.empty()) { cmd_buf->resize(cmd_begin); command_count -= 1; continue; } else if (g_flags.goma_dir) { size_t pos = GetGomaccPosForAndroidCompileCommand(translated); if (pos != string::npos) { cmd_buf->insert(cmd_start + pos, gomacc_); use_gomacc = true; } } else if (translated.find("/gomacc") != string::npos) { use_gomacc = true; } if (c->ignore_error) { *cmd_buf += " ; true"; } if (needs_subshell) *cmd_buf += " )"; } return (use_goma_ || g_flags.remote_num_jobs || g_flags.goma_dir) && !use_gomacc; } bool GetDepfile(const DepNode* node, string* cmd_buf, string* depfile) { if (node->depfile_var) { node->depfile_var->Eval(ev_, depfile); return true; } if (!g_flags.detect_depfiles) return false; *cmd_buf += ' '; bool result = GetDepfileFromCommand(cmd_buf, depfile); cmd_buf->resize(cmd_buf->size() - 1); return result; } void EmitDepfile(NinjaNode* nn, string* cmd_buf, ostringstream* o) { const DepNode* node = nn->node; string depfile; if (!GetDepfile(node, cmd_buf, &depfile)) return; *o << " depfile = " << depfile << "\n"; *o << " deps = gcc\n"; } void EmitNode(NinjaNode* nn, ostringstream* o) { const DepNode* node = nn->node; const vector& commands = nn->commands; string rule_name = "phony"; bool use_local_pool = false; if (node->output.get(0) == '.') { return; } if (g_flags.enable_debug) { *o << "# " << (node->loc.filename ? node->loc.filename : "(null)") << ':' << node->loc.lineno << "\n"; } if (!commands.empty()) { rule_name = StringPrintf("rule%d", nn->rule_id); *o << "rule " << rule_name << "\n"; string description = "build $out"; string cmd_buf; use_local_pool |= GenShellScript(node->output.c_str(), commands, &cmd_buf, &description); *o << " description = " << description << "\n"; EmitDepfile(nn, &cmd_buf, o); // It seems Linux is OK with ~130kB and Mac's limit is ~250kB. // TODO: Find this number automatically. if (cmd_buf.size() > 100 * 1000) { *o << " rspfile = $out.rsp\n"; *o << " rspfile_content = " << cmd_buf << "\n"; *o << " command = " << shell_ << " $out.rsp\n"; } else { EscapeShell(&cmd_buf); *o << " command = " << shell_ << ' ' << shell_flags_ << " \"" << cmd_buf << "\"\n"; } if (node->is_restat) { *o << " restat = 1\n"; } } EmitBuild(nn, rule_name, use_local_pool, o); } string EscapeNinja(const string& s) const { if (s.find_first_of("$: ") == string::npos) return s; string r; for (char c : s) { switch (c) { case '$': case ':': case ' ': r += '$'; #if defined(__has_cpp_attribute) && __has_cpp_attribute(clang::fallthrough) [[clang::fallthrough]]; #endif default: r += c; } } return r; } string EscapeBuildTarget(Symbol s) const { return EscapeNinja(s.str()); } void EmitBuild(NinjaNode* nn, const string& rule_name, bool use_local_pool, ostringstream* o) { const DepNode* node = nn->node; string target = EscapeBuildTarget(node->output); *o << "build " << target; if (!node->implicit_outputs.empty()) { *o << " |"; for (Symbol output : node->implicit_outputs) { *o << " " << EscapeBuildTarget(output); } } *o << ": " << rule_name; vector order_onlys; if (node->is_phony) { *o << " _kati_always_build_"; } for (auto const& d : node->deps) { *o << " " << EscapeBuildTarget(d.first).c_str(); } if (!node->order_onlys.empty()) { *o << " ||"; for (auto const& d : node->order_onlys) { *o << " " << EscapeBuildTarget(d.first).c_str(); } } *o << "\n"; if (node->ninja_pool_var) { string pool; node->ninja_pool_var->Eval(ev_, &pool); *o << " pool = " << pool << "\n"; } else if (use_local_pool) { *o << " pool = local_pool\n"; } if (node->is_default_target) { unique_lock lock(mu_); default_target_ = node; } } static string GetEnvScriptFilename() { return GetFilename("env%s.sh"); } void GenerateNinja() { ScopedTimeReporter tr("ninja gen (emit)"); fp_ = fopen(GetNinjaFilename().c_str(), "wb"); if (fp_ == NULL) PERROR("fopen(build.ninja) failed"); fprintf(fp_, "# Generated by kati %s\n", kGitVersion); fprintf(fp_, "\n"); if (!used_envs_.empty()) { fprintf(fp_, "# Environment variables used:\n"); for (const auto& p : used_envs_) { fprintf(fp_, "# %s=%s\n", p.first.c_str(), p.second.c_str()); } fprintf(fp_, "\n"); } if (!g_flags.no_ninja_prelude) { if (g_flags.ninja_dir) { fprintf(fp_, "builddir = %s\n\n", g_flags.ninja_dir); } fprintf(fp_, "pool local_pool\n"); fprintf(fp_, " depth = %d\n\n", g_flags.num_jobs); fprintf(fp_, "build _kati_always_build_: phony\n\n"); } unique_ptr tp(NewThreadPool(g_flags.num_jobs)); CHECK(g_flags.num_jobs); int num_nodes_per_task = nodes_.size() / (g_flags.num_jobs * 10) + 1; int num_tasks = nodes_.size() / num_nodes_per_task + 1; vector bufs(num_tasks); for (int i = 0; i < num_tasks; i++) { tp->Submit([this, i, num_nodes_per_task, &bufs]() { int l = min(num_nodes_per_task * (i + 1), static_cast(nodes_.size())); for (int j = num_nodes_per_task * i; j < l; j++) { EmitNode(nodes_[j], &bufs[i]); } }); } tp->Wait(); if (!g_flags.generate_empty_ninja) { for (const ostringstream& buf : bufs) { fprintf(fp_, "%s", buf.str().c_str()); } } SymbolSet used_env_vars(Vars::used_env_vars()); // PATH changes $(shell). used_env_vars.insert(Intern("PATH")); for (Symbol e : used_env_vars) { StringPiece val(getenv(e.c_str())); used_envs_.emplace(e.str(), val.as_string()); } string default_targets; if (g_flags.targets.empty() || g_flags.gen_all_targets) { CHECK(default_target_); default_targets = EscapeBuildTarget(default_target_->output); } else { for (Symbol s : g_flags.targets) { if (!default_targets.empty()) default_targets += ' '; default_targets += EscapeBuildTarget(s); } } if (!g_flags.generate_empty_ninja) { fprintf(fp_, "\n"); fprintf(fp_, "default %s\n", default_targets.c_str()); } fclose(fp_); } void GenerateShell() { FILE* fp = fopen(GetEnvScriptFilename().c_str(), "wb"); if (fp == NULL) PERROR("fopen(env.sh) failed"); fprintf(fp, "#!/bin/sh\n"); fprintf(fp, "# Generated by kati %s\n", kGitVersion); fprintf(fp, "\n"); for (const auto& p : ev_->exports()) { if (p.second) { const string val = ev_->EvalVar(p.first); fprintf(fp, "export '%s'='%s'\n", p.first.c_str(), val.c_str()); } else { fprintf(fp, "unset '%s'\n", p.first.c_str()); } } fclose(fp); fp = fopen(GetNinjaShellScriptFilename().c_str(), "wb"); if (fp == NULL) PERROR("fopen(ninja.sh) failed"); fprintf(fp, "#!/bin/sh\n"); fprintf(fp, "# Generated by kati %s\n", kGitVersion); fprintf(fp, "\n"); fprintf(fp, ". %s\n", GetEnvScriptFilename().c_str()); fprintf(fp, "exec ninja -f %s ", GetNinjaFilename().c_str()); if (g_flags.remote_num_jobs > 0) { fprintf(fp, "-j%d ", g_flags.remote_num_jobs); } else if (g_flags.goma_dir) { fprintf(fp, "-j500 "); } fprintf(fp, "\"$@\"\n"); fclose(fp); if (chmod(GetNinjaShellScriptFilename().c_str(), 0755) != 0) PERROR("chmod ninja.sh failed"); } void GenerateStamp(const string& orig_args) { FILE* fp = fopen(GetStampTempFilename().c_str(), "wb"); CHECK(fp); size_t r = fwrite(&start_time_, sizeof(start_time_), 1, fp); CHECK(r == 1); unordered_set makefiles; MakefileCacheManager::Get()->GetAllFilenames(&makefiles); DumpInt(fp, makefiles.size() + 1); DumpString(fp, kati_binary_); for (const string& makefile : makefiles) { DumpString(fp, makefile); } DumpInt(fp, Evaluator::used_undefined_vars().size()); for (Symbol v : Evaluator::used_undefined_vars()) { DumpString(fp, v.str()); } DumpInt(fp, used_envs_.size()); for (const auto& p : used_envs_) { DumpString(fp, p.first); DumpString(fp, p.second); } const unordered_map*>& globs = GetAllGlobCache(); DumpInt(fp, globs.size()); for (const auto& p : globs) { DumpString(fp, p.first); const vector& files = *p.second; #if 0 unordered_set dirs; GetReadDirs(p.first, files, &dirs); DumpInt(fp, dirs.size()); for (const string& dir : dirs) { DumpString(fp, dir); } #endif DumpInt(fp, files.size()); for (const string& file : files) { DumpString(fp, file); } } const vector& crs = GetShellCommandResults(); DumpInt(fp, crs.size()); for (CommandResult* cr : crs) { DumpInt(fp, static_cast(cr->op)); DumpString(fp, cr->shell); DumpString(fp, cr->shellflag); DumpString(fp, cr->cmd); DumpString(fp, cr->result); if (cr->op == CommandOp::FIND) { vector missing_dirs; for (StringPiece fd : cr->find->finddirs) { const string& d = ConcatDir(cr->find->chdir, fd); if (!Exists(d)) missing_dirs.push_back(d); } DumpInt(fp, missing_dirs.size()); for (const string& d : missing_dirs) { DumpString(fp, d); } DumpInt(fp, cr->find->found_files->size()); for (StringPiece s : *cr->find->found_files) { DumpString(fp, ConcatDir(cr->find->chdir, s)); } DumpInt(fp, cr->find->read_dirs->size()); for (StringPiece s : *cr->find->read_dirs) { DumpString(fp, ConcatDir(cr->find->chdir, s)); } } } DumpString(fp, orig_args); fclose(fp); rename(GetStampTempFilename().c_str(), GetNinjaStampFilename().c_str()); } CommandEvaluator ce_; Evaluator* ev_; FILE* fp_; SymbolSet done_; int rule_id_; bool use_goma_; string gomacc_; string shell_; string shell_flags_; map used_envs_; string kati_binary_; const double start_time_; vector nodes_; mutex mu_; const DepNode* default_target_; }; string GetNinjaFilename() { return NinjaGenerator::GetFilename("build%s.ninja"); } string GetNinjaShellScriptFilename() { return NinjaGenerator::GetFilename("ninja%s.sh"); } string GetNinjaStampFilename() { return NinjaGenerator::GetFilename(".kati_stamp%s"); } void GenerateNinja(const vector& nodes, Evaluator* ev, const string& orig_args, double start_time) { NinjaGenerator ng(ev, start_time); ng.Generate(nodes, orig_args); } ninja.go0100644 0000000 0000000 00000043345 13654546140 011254 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "fmt" "os" "path/filepath" "regexp" "runtime" "sort" "strings" "time" "github.com/golang/glog" ) type nodeState int const ( nodeInit nodeState = iota // not visited nodeVisit // visited nodeFile // visited & file exists nodeAlias // visited & alias for other target nodeMissing // visited & no target for this output nodeBuild // visited & build emitted ) func (s nodeState) String() string { switch s { case nodeInit: return "node-init" case nodeVisit: return "node-visit" case nodeFile: return "node-file" case nodeAlias: return "node-alias" case nodeMissing: return "node-missing" case nodeBuild: return "node-build" default: return fmt.Sprintf("node-unknown[%d]", int(s)) } } // NinjaGenerator generates ninja build files from DepGraph. type NinjaGenerator struct { // Args is original arguments to generate the ninja file. Args []string // Suffix is suffix for generated files. Suffix string // GomaDir is goma directory. If empty, goma will not be used. GomaDir string // DetectAndroidEcho detects echo as description. DetectAndroidEcho bool f *os.File nodes []*DepNode exports map[string]bool ctx *execContext ruleID int done map[string]nodeState } func (n *NinjaGenerator) init(g *DepGraph) { g.resolveVPATH() n.nodes = g.nodes n.exports = g.exports n.ctx = newExecContext(g.vars, g.vpaths, true) n.done = make(map[string]nodeState) } func getDepfileImpl(ss string) (string, error) { tss := ss + " " if (!strings.Contains(tss, " -MD ") && !strings.Contains(tss, " -MMD ")) || !strings.Contains(tss, " -c ") { return "", nil } mfIndex := strings.Index(ss, " -MF ") if mfIndex >= 0 { mf := trimLeftSpace(ss[mfIndex+4:]) if strings.Index(mf, " -MF ") >= 0 { return "", fmt.Errorf("Multiple output file candidates in %s", ss) } mfEndIndex := strings.IndexAny(mf, " \t\n") if mfEndIndex >= 0 { mf = mf[:mfEndIndex] } return mf, nil } outIndex := strings.Index(ss, " -o ") if outIndex < 0 { return "", fmt.Errorf("Cannot find the depfile in %s", ss) } out := trimLeftSpace(ss[outIndex+4:]) if strings.Index(out, " -o ") >= 0 { return "", fmt.Errorf("Multiple output file candidates in %s", ss) } outEndIndex := strings.IndexAny(out, " \t\n") if outEndIndex >= 0 { out = out[:outEndIndex] } return stripExt(out) + ".d", nil } // getDepfile gets depfile from cmdline, and returns cmdline and depfile. func getDepfile(cmdline string) (string, string, error) { // A hack for Android - llvm-rs-cc seems not to emit a dep file. if strings.Contains(cmdline, "bin/llvm-rs-cc ") { return cmdline, "", nil } depfile, err := getDepfileImpl(cmdline) if depfile == "" || err != nil { return cmdline, depfile, err } // A hack for Makefiles generated by automake. mvCmd := "(mv -f " + depfile + " " if i := strings.LastIndex(cmdline, mvCmd); i >= 0 { rest := cmdline[i+len(mvCmd):] ei := strings.IndexByte(rest, ')') if ei < 0 { return cmdline, "", fmt.Errorf("unbalanced parenthes? %s", cmdline) } cmdline = cmdline[:i] + "(cp -f " + depfile + " " + rest return cmdline, depfile, nil } // A hack for Android to get .P files instead of .d. p := stripExt(depfile) + ".P" if strings.Contains(cmdline, p) { rmfCmd := "; rm -f " + depfile ncmdline := strings.Replace(cmdline, rmfCmd, "", 1) if ncmdline == cmdline { return cmdline, "", fmt.Errorf("cannot find removal of .d file: %s", cmdline) } return ncmdline, p, nil } // A hack for Android. For .s files, GCC does not use // C preprocessor, so it ignores -MF flag. as := "/" + stripExt(filepath.Base(depfile)) + ".s" if strings.Contains(cmdline, as) { return cmdline, "", nil } cmdline += fmt.Sprintf(" && cp %s %s.tmp", depfile, depfile) depfile += ".tmp" return cmdline, depfile, nil } func trimTailingSlash(s string) string { if s == "" { return s } if s[len(s)-1] != '\\' { return s } // drop single trailing slash - multiline_arg.mk if len(s) > 2 && s[len(s)-2] != '\\' { return s[:len(s)-1] } // preserve two trailing slash - escaped_backslash.mk return s } func stripShellComment(s string) string { if strings.IndexByte(s, '#') < 0 { // Fast path. return s } // set space as an initial value so the leading comment will be // stripped out. lastch := rune(' ') var escape bool var quote rune var skip rune var cmdsubst []rune var buf bytes.Buffer Loop: for _, c := range s { if skip != 0 { if skip != c { continue Loop } if len(cmdsubst) > 0 && cmdsubst[len(cmdsubst)-1] == skip { cmdsubst = cmdsubst[:len(cmdsubst)-1] } skip = 0 } if quote != 0 { if quote == c && (quote == '\'' || !escape) { quote = 0 } } else if !escape { if c == '#' && isWhitespace(lastch) { if len(cmdsubst) == 0 { // strip comment until the end of line. skip = '\n' continue Loop } // strip comment until the end of command subst. skip = cmdsubst[len(cmdsubst)-1] continue Loop } else if c == '\'' || c == '"' { quote = c } else if lastch == '$' && c == '(' { cmdsubst = append(cmdsubst, ')') } else if c == '`' { cmdsubst = append(cmdsubst, '`') } } if escape { escape = false } else if c == '\\' { escape = true } else { escape = false } lastch = c buf.WriteRune(c) } return buf.String() } var ccRE = regexp.MustCompile(`^prebuilts/(gcc|clang)/.*(gcc|g\+\+|clang|clang\+\+) .* ?-c `) func gomaCmdForAndroidCompileCmd(cmd string) (string, bool) { i := strings.Index(cmd, " ") if i < 0 { return cmd, false } driver := cmd[:i] if strings.HasSuffix(driver, "ccache") { return gomaCmdForAndroidCompileCmd(cmd[i+1:]) } return cmd, ccRE.MatchString(cmd) } func descriptionFromCmd(cmd string) (string, bool) { if !strings.HasPrefix(cmd, "echo") || !isWhitespace(rune(cmd[4])) { return "", false } echoarg := cmd[5:] // strip outer quotes, and fail if it is not a single echo command. var buf bytes.Buffer var escape bool var quote rune for _, c := range echoarg { if escape { escape = false buf.WriteRune(c) continue } if c == '\\' { escape = true buf.WriteRune(c) continue } if quote != 0 { if c == quote { quote = 0 continue } buf.WriteRune(c) continue } switch c { case '\'', '"', '`': quote = c case '<', '>', '&', '|', ';': return "", false default: buf.WriteRune(c) } } return buf.String(), true } func (n *NinjaGenerator) genShellScript(runners []runner) (cmd string, desc string, useLocalPool bool) { const defaultDesc = "build $out" var useGomacc bool var buf bytes.Buffer for i, r := range runners { if i > 0 { if runners[i-1].ignoreError { buf.WriteString(" ; ") } else { buf.WriteString(" && ") } } cmd := trimTailingSlash(r.cmd) cmd = stripShellComment(cmd) cmd = trimLeftSpace(cmd) cmd = strings.Replace(cmd, "\\\n\t", "", -1) cmd = strings.Replace(cmd, "\\\n", "", -1) cmd = strings.TrimRight(cmd, " \t\n;") cmd = escapeNinja(cmd) if cmd == "" { cmd = "true" } glog.V(2).Infof("cmd %q=>%q", r.cmd, cmd) if n.GomaDir != "" { rcmd, ok := gomaCmdForAndroidCompileCmd(cmd) if ok { cmd = fmt.Sprintf("%s/gomacc %s", n.GomaDir, rcmd) useGomacc = true } } if n.DetectAndroidEcho && desc == "" { d, ok := descriptionFromCmd(cmd) if ok { desc = d cmd = "true" } } needsSubShell := i > 0 || len(runners) > 1 if cmd[0] == '(' { needsSubShell = false } if needsSubShell { buf.WriteByte('(') } buf.WriteString(cmd) if i == len(runners)-1 && r.ignoreError { buf.WriteString(" ; true") } if needsSubShell { buf.WriteByte(')') } } if desc == "" { desc = defaultDesc } return buf.String(), desc, n.GomaDir != "" && !useGomacc } func (n *NinjaGenerator) genRuleName() string { ruleName := fmt.Sprintf("rule%d", n.ruleID) n.ruleID++ return ruleName } func (n *NinjaGenerator) emitBuild(output, rule, inputs, orderOnlys string) { fmt.Fprintf(n.f, "build %s: %s", escapeBuildTarget(output), rule) if inputs != "" { fmt.Fprintf(n.f, " %s", inputs) } if orderOnlys != "" { fmt.Fprintf(n.f, " || %s", orderOnlys) } } func escapeBuildTarget(s string) string { i := strings.IndexAny(s, "$: \\") if i < 0 { return s } // unescapeInput only "\ ", "\=" unescape as " ", "=". // TODO(ukai): which char should unescape, which should not here? var esc rune var buf bytes.Buffer for _, c := range s { switch c { case '\\': esc = c continue case '$', ':', ' ': esc = 0 buf.WriteByte('$') } if esc != 0 { buf.WriteRune(esc) esc = 0 } buf.WriteRune(c) } if esc != 0 { buf.WriteRune(esc) } return buf.String() } func (n *NinjaGenerator) dependency(node *DepNode) (string, string) { var deps []string seen := make(map[string]bool) for _, d := range node.Deps { t := escapeBuildTarget(d.Output) if seen[t] { continue } deps = append(deps, t) seen[t] = true } var orderOnlys []string for _, d := range node.OrderOnlys { t := escapeBuildTarget(d.Output) if seen[t] { continue } orderOnlys = append(orderOnlys, t) seen[t] = true } return strings.Join(deps, " "), strings.Join(orderOnlys, " ") } func escapeNinja(s string) string { return strings.Replace(s, "$", "$$", -1) } func escapeShell(s string) string { i := strings.IndexAny(s, "$`!\\\"") if i < 0 { return s } var buf bytes.Buffer var lastDollar bool for _, c := range s { switch c { case '$': if lastDollar { buf.WriteRune(c) lastDollar = false continue } buf.WriteString(`\$`) lastDollar = true continue case '`', '"', '!', '\\': buf.WriteByte('\\') } buf.WriteRune(c) lastDollar = false } return buf.String() } func (n *NinjaGenerator) ninjaVars(s string, nv [][]string, esc func(string) string) string { for _, v := range nv { k, v := v[0], v[1] if v == "" { continue } if strings.Contains(v, "/./") || strings.Contains(v, "/../") || strings.Contains(v, "$") { // ninja will normalize paths (/./, /../), so keep it as is // ninja will emit quoted string for $ continue } if esc != nil { v = esc(v) } s = strings.Replace(s, v, k, -1) } return s } func (n *NinjaGenerator) emitNode(node *DepNode) error { output := node.Output if _, found := n.done[output]; found { return nil } n.done[output] = nodeVisit if len(node.Cmds) == 0 && len(node.Deps) == 0 && len(node.OrderOnlys) == 0 && !node.IsPhony { if _, ok := n.ctx.vpaths.exists(output); ok { n.done[output] = nodeFile return nil } o := filepath.Clean(output) if o != output { // if normalized target has been done, it marks as alias. if s, found := n.done[o]; found { glog.V(1).Infof("node %s=%s => %s=alias", o, s, node.Output) n.done[output] = nodeAlias return nil } } if node.Filename == "" { n.done[output] = nodeMissing } return nil } runners, _, err := createRunners(n.ctx, node) if err != nil { return err } ruleName := "phony" useLocalPool := false inputs, orderOnlys := n.dependency(node) if len(runners) > 0 { ruleName = n.genRuleName() fmt.Fprintf(n.f, "\n# rule for %q\n", node.Output) fmt.Fprintf(n.f, "rule %s\n", ruleName) ss, desc, ulp := n.genShellScript(runners) if ulp { useLocalPool = true } fmt.Fprintf(n.f, " description = %s\n", desc) cmdline, depfile, err := getDepfile(ss) if err != nil { return err } if depfile != "" { fmt.Fprintf(n.f, " depfile = %s\n", depfile) fmt.Fprintf(n.f, " deps = gcc\n") } nv := [][]string{ []string{"${in}", inputs}, []string{"${out}", escapeNinja(output)}, } // It seems Linux is OK with ~130kB. // TODO: Find this number automatically. ArgLenLimit := 100 * 1000 if len(cmdline) > ArgLenLimit { fmt.Fprintf(n.f, " rspfile = $out.rsp\n") cmdline = n.ninjaVars(cmdline, nv, nil) fmt.Fprintf(n.f, " rspfile_content = %s\n", cmdline) fmt.Fprintf(n.f, " command = %s $out.rsp\n", n.ctx.shell) } else { cmdline = escapeShell(cmdline) cmdline = n.ninjaVars(cmdline, nv, escapeShell) fmt.Fprintf(n.f, " command = %s -c \"%s\"\n", n.ctx.shell, cmdline) } } n.emitBuild(output, ruleName, inputs, orderOnlys) if useLocalPool { fmt.Fprintf(n.f, " pool = local_pool\n") } fmt.Fprintf(n.f, "\n") n.done[output] = nodeBuild for _, d := range node.Deps { err := n.emitNode(d) if err != nil { return err } glog.V(1).Infof("node %s dep node %q %s", node.Output, d.Output, n.done[d.Output]) } for _, d := range node.OrderOnlys { err := n.emitNode(d) if err != nil { return err } glog.V(1).Infof("node %s order node %q %s", node.Output, d.Output, n.done[d.Output]) } return nil } func (n *NinjaGenerator) emitRegenRules() error { if len(n.Args) == 0 { return nil } mkfiles, err := n.ctx.ev.EvaluateVar("MAKEFILE_LIST") if err != nil { return err } fmt.Fprintf(n.f, ` rule regen_ninja description = Regenerate ninja files due to dependency generator=1 command=%s `, strings.Join(n.Args, " ")) fmt.Fprintf(n.f, "build %s: regen_ninja %s", n.ninjaName(), mkfiles) // TODO: Add dependencies to directories read by $(wildcard) or // $(shell find). if len(usedEnvs) > 0 { fmt.Fprintf(n.f, " %s", n.envlistName()) } fmt.Fprintf(n.f, "\n\n") return nil } func (n *NinjaGenerator) shName() string { return fmt.Sprintf("ninja%s.sh", n.Suffix) } func (n *NinjaGenerator) ninjaName() string { return fmt.Sprintf("build%s.ninja", n.Suffix) } func (n *NinjaGenerator) envlistName() string { return fmt.Sprintf(".kati_env%s", n.Suffix) } func (n *NinjaGenerator) generateEnvlist() (err error) { f, err := os.Create(n.envlistName()) if err != nil { return err } defer func() { cerr := f.Close() if err == nil { err = cerr } }() for k := range usedEnvs { v, err := n.ctx.ev.EvaluateVar(k) if err != nil { return err } fmt.Fprintf(f, "%q=%q\n", k, v) } return nil } func (n *NinjaGenerator) generateShell() (err error) { f, err := os.Create(n.shName()) if err != nil { return err } defer func() { cerr := f.Close() if err == nil { err = cerr } }() fmt.Fprintf(f, "#!/bin/bash\n") fmt.Fprintf(f, "# Generated by kati %s\n", gitVersion) fmt.Fprintln(f) fmt.Fprintln(f, `cd $(dirname "$0")`) if n.Suffix != "" { fmt.Fprintf(f, "if [ -f %s ]; then\n export $(cat %s)\nfi\n", n.envlistName(), n.envlistName()) } for name, export := range n.exports { // export "a b"=c will error on bash // bash: export `a b=c': not a valid identifier if strings.ContainsAny(name, " \t\n\r") { glog.V(1).Infof("ignore export %q (export:%t)", name, export) continue } if export { v, err := n.ctx.ev.EvaluateVar(name) if err != nil { return err } fmt.Fprintf(f, "export %q=%q\n", name, v) } else { fmt.Fprintf(f, "unset %q\n", name) } } if n.GomaDir == "" { fmt.Fprintf(f, `exec ninja -f %s "$@"`+"\n", n.ninjaName()) } else { fmt.Fprintf(f, `exec ninja -f %s -j500 "$@"`+"\n", n.ninjaName()) } return f.Chmod(0755) } func (n *NinjaGenerator) generateNinja(defaultTarget string) (err error) { f, err := os.Create(n.ninjaName()) if err != nil { return err } defer func() { cerr := f.Close() if err == nil { err = cerr } }() n.f = f fmt.Fprintf(n.f, "# Generated by kati %s\n", gitVersion) fmt.Fprintf(n.f, "\n") if len(usedEnvs) > 0 { fmt.Fprintln(n.f, "# Environment variables used:") var names []string for name := range usedEnvs { names = append(names, name) } sort.Strings(names) for _, name := range names { v, err := n.ctx.ev.EvaluateVar(name) if err != nil { return err } fmt.Fprintf(n.f, "# %q=%q\n", name, v) } fmt.Fprintf(n.f, "\n") } if n.GomaDir != "" { fmt.Fprintf(n.f, "pool local_pool\n") fmt.Fprintf(n.f, " depth = %d\n\n", runtime.NumCPU()) } err = n.emitRegenRules() if err != nil { return err } // defining $out for $@ and $in for $^ here doesn't work well, // because these texts will be processed in escapeShell... for _, node := range n.nodes { err := n.emitNode(node) if err != nil { return err } glog.V(1).Infof("node %q %s", node.Output, n.done[node.Output]) } // emit phony targets for visited nodes that are // - not existing file // - not alias for other targets. var nodes []string for node, state := range n.done { if state != nodeVisit { continue } nodes = append(nodes, node) } if len(nodes) > 0 { fmt.Fprintln(n.f) sort.Strings(nodes) for _, node := range nodes { n.emitBuild(node, "phony", "", "") fmt.Fprintln(n.f) n.done[node] = nodeBuild } } // emit default if the target was emitted. if defaultTarget != "" && n.done[defaultTarget] == nodeBuild { fmt.Fprintf(n.f, "\ndefault %s\n", escapeNinja(defaultTarget)) } return nil } // Save generates build.ninja from DepGraph. func (n *NinjaGenerator) Save(g *DepGraph, name string, targets []string) error { startTime := time.Now() n.init(g) err := n.generateEnvlist() if err != nil { return err } err = n.generateShell() if err != nil { return err } var defaultTarget string if len(targets) == 0 && len(g.nodes) > 0 { defaultTarget = g.nodes[0].Output } err = n.generateNinja(defaultTarget) if err != nil { return err } logStats("generate ninja time: %q", time.Since(startTime)) return nil } ninja.h0100644 0000000 0000000 00000002306 13654546140 011066 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef NINJA_H_ #define NINJA_H_ #include #include #include #include "dep.h" #include "string_piece.h" using namespace std; class Evaluator; void GenerateNinja(const vector& nodes, Evaluator* ev, const string& orig_args, double start_time); string GetNinjaFilename(); string GetNinjaShellScriptFilename(); string GetNinjaStampFilename(); // Exposed only for test. bool GetDepfileFromCommand(string* cmd, string* out); size_t GetGomaccPosForAndroidCompileCommand(StringPiece cmdline); #endif // NINJA_H_ ninja_test.cc0100644 0000000 0000000 00000041450 13654546140 012266 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include #include "log.h" #include "ninja.h" #include "testutil.h" namespace { string GetDepfile(string cmd, string* new_cmd) { new_cmd->clear(); string r; if (GetDepfileFromCommand(&cmd, &r)) { *new_cmd = cmd; return r; } return ""; } void TestGetDepfile() { string new_cmd; ASSERT_EQ(GetDepfile("g++ -c fat.cc -MD ", &new_cmd), ""); assert(g_last_error); delete g_last_error; g_last_error = NULL; // clang-format off ASSERT_EQ(GetDepfile("g++ -c fat.cc -o fat.o", &new_cmd), ""); ASSERT_EQ(GetDepfile("g++ -c fat.cc -MD -o fat.o -o fuga.o", &new_cmd), "fuga.d.tmp"); ASSERT_EQ(GetDepfile("g++ -c fat.cc -MD -o fat.o", &new_cmd), "fat.d.tmp"); ASSERT_EQ(GetDepfile("g++ -c fat.cc -MD -o fat", &new_cmd), "fat.d.tmp"); ASSERT_EQ(GetDepfile("g++ -c fat.cc -MD -MF foo.d -o fat.o", &new_cmd), "foo.d.tmp"); ASSERT_EQ(GetDepfile("g++ -c fat.cc -MD -o fat.o -MF foo.d", &new_cmd), "foo.d.tmp"); // A real example from maloader. ASSERT_EQ(GetDepfile("g++ -g -Iinclude -Wall -MMD -fno-omit-frame-pointer -O -m64 -W -Werror -c -o fat.o fat.cc", &new_cmd), "fat.d.tmp"); // A real example from Android. ASSERT_EQ(GetDepfile("mkdir -p out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/ && echo \"host C++: llvm-rs-cc <= frameworks/compile/slang/llvm-rs-cc.cpp\" && prebuilts/clang/linux-x86/host/3.6/bin/clang++ -I external/llvm -I external/llvm/include -I external/llvm/host/include -I external/clang/include -I external/clang/lib/CodeGen -I frameworks/compile/libbcc/include -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates/include -I external/libcxx/include -I frameworks/compile/slang -I out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates -I libnativehelper/include/nativehelper $(cat out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem tools/include -isystem out/host/linux-x86/obj/include -c -fno-exceptions -Wno-multichar -m64 -Wa,--noexecstack -fPIC -no-canonical-prefixes -include build/core/combo/include/arch/linux-x86/AndroidConfig.h -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -O2 -g -fno-strict-aliasing -DNDEBUG -UDEBUG -D__compiler_offsetof=__builtin_offsetof -Werror=int-conversion -Wno-unused-command-line-argument --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -target x86_64-linux-gnu -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Wsign-promo -std=gnu++11 -DNDEBUG -UDEBUG -Wno-inconsistent-missing-override --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8 -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/x86_64-linux -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/backward -target x86_64-linux-gnu -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -D_GNU_SOURCE -D__STDC_LIMIT_MACROS -O2 -fomit-frame-pointer -Wall -W -Wno-unused-parameter -Wwrite-strings -Dsprintf=sprintf -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -fno-exceptions -fpie -D_USING_LIBCXX -Wno-sign-promo -fno-rtti -Woverloaded-virtual -Wno-sign-promo -std=c++11 -nostdinc++ -MD -MF out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d -o out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.o frameworks/compile/slang/llvm-rs-cc.cpp && cp out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$//' -e '/^$/ d' -e 's/$/ :/' < out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d >> out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; rm -f out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d", &new_cmd), "out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d"); ASSERT_EQ(new_cmd, "mkdir -p out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/ && echo \"host C++: llvm-rs-cc <= frameworks/compile/slang/llvm-rs-cc.cpp\" && prebuilts/clang/linux-x86/host/3.6/bin/clang++ -I external/llvm -I external/llvm/include -I external/llvm/host/include -I external/clang/include -I external/clang/lib/CodeGen -I frameworks/compile/libbcc/include -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates/include -I external/libcxx/include -I frameworks/compile/slang -I out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates -I libnativehelper/include/nativehelper $(cat out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem tools/include -isystem out/host/linux-x86/obj/include -c -fno-exceptions -Wno-multichar -m64 -Wa,--noexecstack -fPIC -no-canonical-prefixes -include build/core/combo/include/arch/linux-x86/AndroidConfig.h -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -O2 -g -fno-strict-aliasing -DNDEBUG -UDEBUG -D__compiler_offsetof=__builtin_offsetof -Werror=int-conversion -Wno-unused-command-line-argument --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -target x86_64-linux-gnu -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Wsign-promo -std=gnu++11 -DNDEBUG -UDEBUG -Wno-inconsistent-missing-override --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8 -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/x86_64-linux -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/backward -target x86_64-linux-gnu -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -D_GNU_SOURCE -D__STDC_LIMIT_MACROS -O2 -fomit-frame-pointer -Wall -W -Wno-unused-parameter -Wwrite-strings -Dsprintf=sprintf -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -fno-exceptions -fpie -D_USING_LIBCXX -Wno-sign-promo -fno-rtti -Woverloaded-virtual -Wno-sign-promo -std=c++11 -nostdinc++ -MD -MF out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d -o out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.o frameworks/compile/slang/llvm-rs-cc.cpp && cp out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$//' -e '/^$/ d' -e 's/$/ :/' < out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d >> out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P"); ASSERT_EQ(GetDepfile("echo \"target asm: libsonivox <= external/sonivox/arm-wt-22k/lib_src/ARM-E_filter_gnu.s\" && mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ && prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9/bin/arm-linux-androideabi-gcc -I external/sonivox/arm-wt-22k/host_src -I external/sonivox/arm-wt-22k/lib_src -I external/libcxx/include -I external/sonivox/arm-wt-22k -I out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates -I out/target/product/generic/gen/SHARED_LIBRARIES/libsonivox_intermediates -I libnativehelper/include/nativehelper $$(cat out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem out/target/product/generic/obj/include -isystem bionic/libc/arch-arm/include -isystem bionic/libc/include -isystem bionic/libc/kernel/uapi -isystem bionic/libc/kernel/uapi/asm-arm -isystem bionic/libm/include -isystem bionic/libm/include/arm -c -fno-exceptions -Wno-multichar -msoft-float -ffunction-sections -fdata-sections -funwind-tables -fstack-protector -Wa,--noexecstack -Werror=format-security -D_FORTIFY_SOURCE=2 -fno-short-enums -no-canonical-prefixes -fno-canonical-system-headers -march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16 -include build/core/combo/include/arch/linux-arm/AndroidConfig.h -I build/core/combo/include/arch/linux-arm/ -fno-builtin-sin -fno-strict-volatile-bitfields -Wno-psabi -mthumb-interwork -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -DNDEBUG -g -Wstrict-aliasing=2 -fgcse-after-reload -frerun-cse-after-loop -frename-registers -DNDEBUG -UDEBUG -Wa,\"-I\" -Wa,\"external/sonivox/arm-wt-22k/lib_src\" -Wa,\"--defsym\" -Wa,\"SAMPLE_RATE_22050=1\" -Wa,\"--defsym\" -Wa,\"STEREO_OUTPUT=1\" -Wa,\"--defsym\" -Wa,\"FILTER_ENABLED=1\" -Wa,\"--defsym\" -Wa,\"SAMPLES_8_BIT=1\" -D__ASSEMBLY__ -MD -MF out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ARM-E_filter_gnu.d -o out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ARM-E_filter_gnu.o external/sonivox/arm-wt-22k/lib_src/ARM-E_filter_gnu.s", &new_cmd), ""); ASSERT_EQ(GetDepfile("echo \"RenderScript: Galaxy4 <= packages/wallpapers/Galaxy4/src/com/android/galaxy4/galaxy.rs\" && rm -rf out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/res/raw && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/src && out/host/linux-x86/bin/llvm-rs-cc -o out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/res/raw -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/src -d out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript -a out/target/common/obj/APPS/Galaxy4_intermediates/src/RenderScript.stamp -MD -target-api 14 -Wall -Werror -I prebuilts/sdk/renderscript/clang-include -I prebuilts/sdk/renderscript/include packages/wallpapers/Galaxy4/src/com/android/galaxy4/galaxy.rs && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/ && touch out/target/common/obj/APPS/Galaxy4_intermediates/src/RenderScript.stamp", &new_cmd), ""); ASSERT_EQ(GetDepfile("(echo \"bc: libclcore.bc <= frameworks/rs/driver/runtime/arch/generic.c\") && (mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/) && (prebuilts/clang/linux-x86/host/3.6/bin/clang -Iframeworks/rs/scriptc -Iexternal/clang/lib/Headers -MD -DRS_VERSION=23 -std=c99 -c -O3 -fno-builtin -emit-llvm -target armv7-none-linux-gnueabi -fsigned-char -Iframeworks/rs/cpu_ref -DRS_DECLARE_EXPIRED_APIS -Xclang -target-feature -Xclang +long64 frameworks/rs/driver/runtime/arch/generic.c -o out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.bc) && (cp out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' -e '/^$$/ d' -e 's/$$/ :/' < out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d >> out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; rm -f out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d)", &new_cmd), "out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d"); ASSERT_EQ(new_cmd, "(echo \"bc: libclcore.bc <= frameworks/rs/driver/runtime/arch/generic.c\") && (mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/) && (prebuilts/clang/linux-x86/host/3.6/bin/clang -Iframeworks/rs/scriptc -Iexternal/clang/lib/Headers -MD -DRS_VERSION=23 -std=c99 -c -O3 -fno-builtin -emit-llvm -target armv7-none-linux-gnueabi -fsigned-char -Iframeworks/rs/cpu_ref -DRS_DECLARE_EXPIRED_APIS -Xclang -target-feature -Xclang +long64 frameworks/rs/driver/runtime/arch/generic.c -o out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.bc) && (cp out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' -e '/^$$/ d' -e 's/$$/ :/' < out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d >> out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P)"); ASSERT_EQ(GetDepfile("gcc -c foo.P.c", &new_cmd), ""); ASSERT_EQ(GetDepfile("gcc -MMD foo.o -o foo", &new_cmd), ""); // TODO: Fix for automake. // ASSERT_EQ(GetDepfile("(/bin/sh ./libtool --tag=CXX --mode=compile g++ -DHAVE_CONFIG_H -I. -I./src -I./src -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -DNDEBUG -g -O2 -MT libglog_la-logging.lo -MD -MP -MF .deps/libglog_la-logging.Tpo -c -o libglog_la-logging.lo `test -f 'src/logging.cc' || echo './'`src/logging.cc) && (mv -f .deps/libglog_la-logging.Tpo .deps/libglog_la-logging.Plo)", &new_cmd), ".deps/libglog_la-logging.Plo"); // ASSERT_EQ(GetDepfile("(g++ -DHAVE_CONFIG_H -I. -I./src -I./src -pthread -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -g -O2 -MT signalhandler_unittest-signalhandler_unittest.o -MD -MP -MF .deps/signalhandler_unittest-signalhandler_unittest.Tpo -c -o signalhandler_unittest-signalhandler_unittest.o `test -f 'src/signalhandler_unittest.cc' || echo './'`src/signalhandler_unittest.cc) && (mv -f .deps/signalhandler_unittest-signalhandler_unittest.Tpo .deps/signalhandler_unittest-signalhandler_unittest.Po)", &new_cmd), ".deps/signalhandler_unittest-signalhandler_unittest.Po"); // clang-format on assert(!g_last_error); } static void TestGetGomaccPosForAndroidCompileCommand() { ASSERT_EQ(GetGomaccPosForAndroidCompileCommand( "prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c"), 0); ASSERT_EQ(GetGomaccPosForAndroidCompileCommand( "prebuilts/misc/linux-x86/ccache/ccache " "prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c"), 39); ASSERT_EQ(GetGomaccPosForAndroidCompileCommand("echo foo"), string::npos); } } // namespace int main() { g_log_no_exit = true; TestGetDepfile(); TestGetGomaccPosForAndroidCompileCommand(); assert(!g_failed); } ninja_test.go0100644 0000000 0000000 00000046405 13654546140 012313 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import "testing" func TestStripShellComment(t *testing.T) { for _, tc := range []struct { in string want string }{ { in: `foo`, want: `foo`, }, { in: `foo # bar`, want: `foo `, }, { in: `foo '# bar'`, want: `foo '# bar'`, }, { in: `foo '\'# bar'`, want: `foo '\'# bar'`, // unbalanced ' }, { in: `foo "# bar"`, want: `foo "# bar"`, }, { in: `foo "\"# bar"`, want: `foo "\"# bar"`, }, { in: `foo "\\"# bar"`, want: `foo "\\"# bar"`, // unbalanced " }, { in: "foo `# bar`", want: "foo `# bar`", }, { in: "foo `\\`# bar`", want: "foo `\\`# bar`", // unbalanced ` }, { in: "foo `\\\\`# bar`", want: "foo `\\\\`# bar`", }, } { got := stripShellComment(tc.in) if got != tc.want { t.Errorf(`stripShellComment(%q)=%q, want %q`, tc.in, got, tc.want) } } } func TestGetDepFile(t *testing.T) { for _, tc := range []struct { in string cmd string depfile string err bool }{ { in: `g++ -c fat.cc -o fat.o`, }, { in: `g++ -c fat.cc -MD`, err: true, }, { in: `g++ -c fat.cc -MD -o fat.o -o fat.o`, err: true, }, { in: `g++ -c fat.cc -MD -o fat.o`, cmd: `g++ -c fat.cc -MD -o fat.o && cp fat.d fat.d.tmp`, depfile: `fat.d.tmp`, }, { in: `g++ -c fat.cc -MD -o fat`, cmd: `g++ -c fat.cc -MD -o fat && cp fat.d fat.d.tmp`, depfile: `fat.d.tmp`, }, { in: `g++ -c fat.cc -MD -MF foo.d -o fat.o`, cmd: `g++ -c fat.cc -MD -MF foo.d -o fat.o && cp foo.d foo.d.tmp`, depfile: `foo.d.tmp`, }, { in: `g++ -c fat.cc -MD -o fat.o -MF foo.d`, cmd: `g++ -c fat.cc -MD -o fat.o -MF foo.d && cp foo.d foo.d.tmp`, depfile: `foo.d.tmp`, }, // A real example from maloader. { in: `g++ -g -Iinclude -Wall -MMD -fno-omit-frame-pointer -O -m64 -W -Werror -c -o fat.o fat.cc`, cmd: `g++ -g -Iinclude -Wall -MMD -fno-omit-frame-pointer -O -m64 -W -Werror -c -o fat.o fat.cc && cp fat.d fat.d.tmp`, depfile: `fat.d.tmp`, }, // A real example from Android. { in: `mkdir -p out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/ && echo "host C++: llvm-rs-cc <= frameworks/compile/slang/llvm-rs-cc.cpp" && prebuilts/clang/linux-x86/host/3.6/bin/clang++ -I external/llvm -I external/llvm/include -I external/llvm/host/include -I external/clang/include -I external/clang/lib/CodeGen -I frameworks/compile/libbcc/include -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates/include -I external/libcxx/include -I frameworks/compile/slang -I out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates -I libnativehelper/include/nativehelper $(cat out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem tools/include -isystem out/host/linux-x86/obj/include -c -fno-exceptions -Wno-multichar -m64 -Wa,--noexecstack -fPIC -no-canonical-prefixes -include build/core/combo/include/arch/linux-x86/AndroidConfig.h -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -O2 -g -fno-strict-aliasing -DNDEBUG -UDEBUG -D__compiler_offsetof=__builtin_offsetof -Werror=int-conversion -Wno-unused-command-line-argument --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -target x86_64-linux-gnu -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Wsign-promo -std=gnu++11 -DNDEBUG -UDEBUG -Wno-inconsistent-missing-override --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8 -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/x86_64-linux -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/backward -target x86_64-linux-gnu -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -D_GNU_SOURCE -D__STDC_LIMIT_MACROS -O2 -fomit-frame-pointer -Wall -W -Wno-unused-parameter -Wwrite-strings -Dsprintf=sprintf -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -fno-exceptions -fpie -D_USING_LIBCXX -Wno-sign-promo -fno-rtti -Woverloaded-virtual -Wno-sign-promo -std=c++11 -nostdinc++ -MD -MF out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d -o out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.o frameworks/compile/slang/llvm-rs-cc.cpp && cp out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$//' -e '/^$/ d' -e 's/$/ :/' < out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d >> out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; rm -f out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d`, cmd: `mkdir -p out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/ && echo "host C++: llvm-rs-cc <= frameworks/compile/slang/llvm-rs-cc.cpp" && prebuilts/clang/linux-x86/host/3.6/bin/clang++ -I external/llvm -I external/llvm/include -I external/llvm/host/include -I external/clang/include -I external/clang/lib/CodeGen -I frameworks/compile/libbcc/include -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates/include -I external/libcxx/include -I frameworks/compile/slang -I out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates -I libnativehelper/include/nativehelper $(cat out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem tools/include -isystem out/host/linux-x86/obj/include -c -fno-exceptions -Wno-multichar -m64 -Wa,--noexecstack -fPIC -no-canonical-prefixes -include build/core/combo/include/arch/linux-x86/AndroidConfig.h -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -O2 -g -fno-strict-aliasing -DNDEBUG -UDEBUG -D__compiler_offsetof=__builtin_offsetof -Werror=int-conversion -Wno-unused-command-line-argument --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -target x86_64-linux-gnu -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Wsign-promo -std=gnu++11 -DNDEBUG -UDEBUG -Wno-inconsistent-missing-override --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8 -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/x86_64-linux -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/backward -target x86_64-linux-gnu -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -D_GNU_SOURCE -D__STDC_LIMIT_MACROS -O2 -fomit-frame-pointer -Wall -W -Wno-unused-parameter -Wwrite-strings -Dsprintf=sprintf -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -fno-exceptions -fpie -D_USING_LIBCXX -Wno-sign-promo -fno-rtti -Woverloaded-virtual -Wno-sign-promo -std=c++11 -nostdinc++ -MD -MF out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d -o out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.o frameworks/compile/slang/llvm-rs-cc.cpp && cp out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$//' -e '/^$/ d' -e 's/$/ :/' < out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d >> out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P`, depfile: `out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P`, }, { in: `echo "target asm: libsonivox <= external/sonivox/arm-wt-22k/lib_src/ARM-E_filter_gnu.s" && mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ && prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9/bin/arm-linux-androideabi-gcc -I external/sonivox/arm-wt-22k/host_src -I external/sonivox/arm-wt-22k/lib_src -I external/libcxx/include -I external/sonivox/arm-wt-22k -I out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates -I out/target/product/generic/gen/SHARED_LIBRARIES/libsonivox_intermediates -I libnativehelper/include/nativehelper $$(cat out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem out/target/product/generic/obj/include -isystem bionic/libc/arch-arm/include -isystem bionic/libc/include -isystem bionic/libc/kernel/uapi -isystem bionic/libc/kernel/uapi/asm-arm -isystem bionic/libm/include -isystem bionic/libm/include/arm -c -fno-exceptions -Wno-multichar -msoft-float -ffunction-sections -fdata-sections -funwind-tables -fstack-protector -Wa,--noexecstack -Werror=format-security -D_FORTIFY_SOURCE=2 -fno-short-enums -no-canonical-prefixes -fno-canonical-system-headers -march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16 -include build/core/combo/include/arch/linux-arm/AndroidConfig.h -I build/core/combo/include/arch/linux-arm/ -fno-builtin-sin -fno-strict-volatile-bitfields -Wno-psabi -mthumb-interwork -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -DNDEBUG -g -Wstrict-aliasing=2 -fgcse-after-reload -frerun-cse-after-loop -frename-registers -DNDEBUG -UDEBUG -Wa,"-I" -Wa,"external/sonivox/arm-wt-22k/lib_src" -Wa,"--defsym" -Wa,"SAMPLE_RATE_22050=1" -Wa,"--defsym" -Wa,"STEREO_OUTPUT=1" -Wa,"--defsym" -Wa,"FILTER_ENABLED=1" -Wa,"--defsym" -Wa,"SAMPLES_8_BIT=1" -D__ASSEMBLY__ -MD -MF out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ARM-E_filter_gnu.d -o out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ARM-E_filter_gnu.o external/sonivox/arm-wt-22k/lib_src/ARM-E_filter_gnu.s`, depfile: ``, }, { in: `echo "RenderScript: Galaxy4 <= packages/wallpapers/Galaxy4/src/com/android/galaxy4/galaxy.rs" && rm -rf out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/res/raw && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/src && out/host/linux-x86/bin/llvm-rs-cc -o out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/res/raw -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/src -d out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript -a out/target/common/obj/APPS/Galaxy4_intermediates/src/RenderScript.stamp -MD -target-api 14 -Wall -Werror -I prebuilts/sdk/renderscript/clang-include -I prebuilts/sdk/renderscript/include packages/wallpapers/Galaxy4/src/com/android/galaxy4/galaxy.rs && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/ && touch out/target/common/obj/APPS/Galaxy4_intermediates/src/RenderScript.stamp`, depfile: ``, }, { in: `(echo "bc: libclcore.bc <= frameworks/rs/driver/runtime/arch/generic.c") && (mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/) && (prebuilts/clang/linux-x86/host/3.6/bin/clang -Iframeworks/rs/scriptc -Iexternal/clang/lib/Headers -MD -DRS_VERSION=23 -std=c99 -c -O3 -fno-builtin -emit-llvm -target armv7-none-linux-gnueabi -fsigned-char -Iframeworks/rs/cpu_ref -DRS_DECLARE_EXPIRED_APIS -Xclang -target-feature -Xclang +long64 frameworks/rs/driver/runtime/arch/generic.c -o out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.bc) && (cp out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' -e '/^$$/ d' -e 's/$$/ :/' < out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d >> out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; rm -f out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d)`, cmd: `(echo "bc: libclcore.bc <= frameworks/rs/driver/runtime/arch/generic.c") && (mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/) && (prebuilts/clang/linux-x86/host/3.6/bin/clang -Iframeworks/rs/scriptc -Iexternal/clang/lib/Headers -MD -DRS_VERSION=23 -std=c99 -c -O3 -fno-builtin -emit-llvm -target armv7-none-linux-gnueabi -fsigned-char -Iframeworks/rs/cpu_ref -DRS_DECLARE_EXPIRED_APIS -Xclang -target-feature -Xclang +long64 frameworks/rs/driver/runtime/arch/generic.c -o out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.bc) && (cp out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' -e '/^$$/ d' -e 's/$$/ :/' < out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d >> out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P)`, depfile: `out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P`, }, { in: `gcc -c foo.P.c`, depfile: ``, }, { in: `(/bin/sh ./libtool --tag=CXX --mode=compile g++ -DHAVE_CONFIG_H -I. -I./src -I./src -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -DNDEBUG -g -O2 -MT libglog_la-logging.lo -MD -MP -MF .deps/libglog_la-logging.Tpo -c -o libglog_la-logging.lo ` + "`" + `test -f 'src/logging.cc' || echo './'` + "`" + `src/logging.cc) && (mv -f .deps/libglog_la-logging.Tpo .deps/libglog_la-logging.Plo)`, cmd: `(/bin/sh ./libtool --tag=CXX --mode=compile g++ -DHAVE_CONFIG_H -I. -I./src -I./src -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -DNDEBUG -g -O2 -MT libglog_la-logging.lo -MD -MP -MF .deps/libglog_la-logging.Tpo -c -o libglog_la-logging.lo ` + "`" + `test -f 'src/logging.cc' || echo './'` + "`" + `src/logging.cc) && (cp -f .deps/libglog_la-logging.Tpo .deps/libglog_la-logging.Plo)`, depfile: `.deps/libglog_la-logging.Tpo`, }, { in: `(g++ -DHAVE_CONFIG_H -I. -I./src -I./src -pthread -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -g -O2 -MT signalhandler_unittest-signalhandler_unittest.o -MD -MP -MF .deps/signalhandler_unittest-signalhandler_unittest.Tpo -c -o signalhandler_unittest-signalhandler_unittest.o ` + "`" + `test -f 'src/signalhandler_unittest.cc' || echo './'` + "`" + `src/signalhandler_unittest.cc) && (mv -f .deps/signalhandler_unittest-signalhandler_unittest.Tpo .deps/signalhandler_unittest-signalhandler_unittest.Po)`, cmd: `(g++ -DHAVE_CONFIG_H -I. -I./src -I./src -pthread -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -g -O2 -MT signalhandler_unittest-signalhandler_unittest.o -MD -MP -MF .deps/signalhandler_unittest-signalhandler_unittest.Tpo -c -o signalhandler_unittest-signalhandler_unittest.o ` + "`" + `test -f 'src/signalhandler_unittest.cc' || echo './'` + "`" + `src/signalhandler_unittest.cc) && (cp -f .deps/signalhandler_unittest-signalhandler_unittest.Tpo .deps/signalhandler_unittest-signalhandler_unittest.Po)`, depfile: `.deps/signalhandler_unittest-signalhandler_unittest.Tpo`, }, } { cmd, depfile, err := getDepfile(tc.in) if tc.err && err == nil { t.Errorf(`getDepfile(%q) unexpectedly has no error`, tc.in) } else if !tc.err && err != nil { t.Errorf(`getDepfile(%q) has an error: %q`, tc.in, err) } wantcmd := tc.cmd if wantcmd == "" { wantcmd = tc.in } if got, want := cmd, wantcmd; got != want { t.Errorf("getDepfile(%q)=\n %q, _, _;\nwant=%q, _, _", tc.in, got, want) } if got, want := depfile, tc.depfile; got != want { t.Errorf(`getDepfile(%q)=_, %q, _; want=_, %q, _`, tc.in, got, want) } } } func TestGomaCmdForAndroidCompileCmd(t *testing.T) { for _, tc := range []struct { in string want string ok bool }{ { in: "prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c ", ok: true, }, { in: "prebuilts/misc/linux-x86/ccache/ccache prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c ", want: "prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c ", ok: true, }, { in: "echo foo ", ok: false, }, } { got, ok := gomaCmdForAndroidCompileCmd(tc.in) want := tc.want if tc.want == "" { want = tc.in } if got != want { t.Errorf("gomaCmdForAndroidCompileCmd(%q)=%q, _; want=%q, _", tc.in, got, tc.want) } if ok != tc.ok { t.Errorf("gomaCmdForAndroidCompileCmd(%q)=_, %t; want=_, %t", tc.in, ok, tc.ok) } } } pack.sh0100755 0000000 0000000 00000001477 13654546140 011103 0ustar000000000 0000000 #!/bin/sh # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e make kati ckati rm -fr out/kati mkdir out/kati git archive --prefix src/ master | tar -C out/kati -xvf - cd out/kati rm src/repo/android.tgz cp ../../m2n ../../kati ../../ckati . cd .. tar -cvzf ../kati.tgz kati parser.cc0100644 0000000 0000000 00000042737 13654546140 011435 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "parser.h" #include #include #include "expr.h" #include "file.h" #include "loc.h" #include "log.h" #include "stats.h" #include "stmt.h" #include "string_piece.h" #include "strutil.h" enum struct ParserState { NOT_AFTER_RULE = 0, AFTER_RULE, MAYBE_AFTER_RULE, }; class Parser { struct IfState { IfStmt* stmt; bool is_in_else; int num_nest; }; typedef void (Parser::*DirectiveHandler)(StringPiece line, StringPiece directive); typedef unordered_map DirectiveMap; public: Parser(StringPiece buf, const char* filename, vector* stmts) : buf_(buf), state_(ParserState::NOT_AFTER_RULE), stmts_(stmts), out_stmts_(stmts), num_define_nest_(0), num_if_nest_(0), loc_(filename, 0), fixed_lineno_(false) {} Parser(StringPiece buf, const Loc& loc, vector* stmts) : buf_(buf), state_(ParserState::NOT_AFTER_RULE), stmts_(stmts), out_stmts_(stmts), num_if_nest_(0), loc_(loc), fixed_lineno_(true) {} ~Parser() {} void Parse() { l_ = 0; for (l_ = 0; l_ < buf_.size();) { size_t lf_cnt = 0; size_t e = FindEndOfLine(&lf_cnt); if (!fixed_lineno_) loc_.lineno++; StringPiece line(buf_.data() + l_, e - l_); if (line.get(line.size() - 1) == '\r') line.remove_suffix(1); orig_line_with_directives_ = line; ParseLine(line); if (!fixed_lineno_) loc_.lineno += lf_cnt - 1; if (e == buf_.size()) break; l_ = e + 1; } if (!if_stack_.empty()) ERROR_LOC(Loc(loc_.filename, loc_.lineno + 1), "*** missing `endif'."); if (!define_name_.empty()) ERROR_LOC(Loc(loc_.filename, define_start_line_), "*** missing `endef', unterminated `define'."); } static void Init() { make_directives_ = new DirectiveMap; (*make_directives_)["include"] = &Parser::ParseInclude; (*make_directives_)["-include"] = &Parser::ParseInclude; (*make_directives_)["sinclude"] = &Parser::ParseInclude; (*make_directives_)["define"] = &Parser::ParseDefine; (*make_directives_)["ifdef"] = &Parser::ParseIfdef; (*make_directives_)["ifndef"] = &Parser::ParseIfdef; (*make_directives_)["ifeq"] = &Parser::ParseIfeq; (*make_directives_)["ifneq"] = &Parser::ParseIfeq; (*make_directives_)["else"] = &Parser::ParseElse; (*make_directives_)["endif"] = &Parser::ParseEndif; (*make_directives_)["override"] = &Parser::ParseOverride; (*make_directives_)["export"] = &Parser::ParseExport; (*make_directives_)["unexport"] = &Parser::ParseUnexport; else_if_directives_ = new DirectiveMap; (*else_if_directives_)["ifdef"] = &Parser::ParseIfdef; (*else_if_directives_)["ifndef"] = &Parser::ParseIfdef; (*else_if_directives_)["ifeq"] = &Parser::ParseIfeq; (*else_if_directives_)["ifneq"] = &Parser::ParseIfeq; assign_directives_ = new DirectiveMap; (*assign_directives_)["define"] = &Parser::ParseDefine; (*assign_directives_)["export"] = &Parser::ParseExport; (*assign_directives_)["override"] = &Parser::ParseOverride; shortest_directive_len_ = 9999; longest_directive_len_ = 0; for (auto p : *make_directives_) { size_t len = p.first.size(); shortest_directive_len_ = min(len, shortest_directive_len_); longest_directive_len_ = max(len, longest_directive_len_); } } static void Quit() { delete make_directives_; } void set_state(ParserState st) { state_ = st; } static vector parse_errors; private: void Error(const string& msg) { ParseErrorStmt* stmt = new ParseErrorStmt(); stmt->set_loc(loc_); stmt->msg = msg; out_stmts_->push_back(stmt); parse_errors.push_back(stmt); } size_t FindEndOfLine(size_t* lf_cnt) { return ::FindEndOfLine(buf_, l_, lf_cnt); } Value* ParseExpr(StringPiece s, ParseExprOpt opt = ParseExprOpt::NORMAL) { return ::ParseExpr(loc_, s, opt); } void ParseLine(StringPiece line) { if (!define_name_.empty()) { ParseInsideDefine(line); return; } if (line.empty() || (line.size() == 1 && line[0] == '\r')) return; current_directive_ = AssignDirective::NONE; if (line[0] == '\t' && state_ != ParserState::NOT_AFTER_RULE) { CommandStmt* stmt = new CommandStmt(); stmt->set_loc(loc_); stmt->expr = ParseExpr(line.substr(1), ParseExprOpt::COMMAND); stmt->orig = line; out_stmts_->push_back(stmt); return; } line = TrimLeftSpace(line); if (line[0] == '#') return; if (HandleDirective(line, make_directives_)) { return; } ParseRuleOrAssign(line); } void ParseRuleOrAssign(StringPiece line) { size_t sep = FindThreeOutsideParen(line, ':', '=', ';'); if (sep == string::npos || line[sep] == ';') { ParseRule(line, string::npos); } else if (line[sep] == '=') { ParseAssign(line, sep); } else if (line.get(sep + 1) == '=') { ParseAssign(line, sep + 1); } else if (line[sep] == ':') { ParseRule(line, sep); } else { CHECK(false); } } void ParseRule(StringPiece line, size_t sep) { if (current_directive_ != AssignDirective::NONE) { if (IsInExport()) return; if (sep != string::npos) { sep += orig_line_with_directives_.size() - line.size(); } line = orig_line_with_directives_; } line = TrimLeftSpace(line); if (line.empty()) return; if (orig_line_with_directives_[0] == '\t') { Error("*** commands commence before first target."); return; } const bool is_rule = sep != string::npos && line[sep] == ':'; RuleStmt* rule_stmt = new RuleStmt(); rule_stmt->set_loc(loc_); size_t found = FindTwoOutsideParen(line.substr(sep + 1), '=', ';'); if (found != string::npos) { found += sep + 1; rule_stmt->lhs = ParseExpr(TrimSpace(line.substr(0, found))); if (line[found] == ';') { rule_stmt->sep = RuleStmt::SEP_SEMICOLON; } else if (line[found] == '=') { if (line.size() > (found + 2) && line[found + 1] == '$' && line[found + 2] == '=') { rule_stmt->sep = RuleStmt::SEP_FINALEQ; found += 2; } else { rule_stmt->sep = RuleStmt::SEP_EQ; } } ParseExprOpt opt = rule_stmt->sep == RuleStmt::SEP_SEMICOLON ? ParseExprOpt::COMMAND : ParseExprOpt::NORMAL; rule_stmt->rhs = ParseExpr(TrimLeftSpace(line.substr(found + 1)), opt); } else { rule_stmt->lhs = ParseExpr(line); rule_stmt->sep = RuleStmt::SEP_NULL; rule_stmt->rhs = NULL; } out_stmts_->push_back(rule_stmt); state_ = is_rule ? ParserState::AFTER_RULE : ParserState::MAYBE_AFTER_RULE; } void ParseAssign(StringPiece line, size_t separator_pos) { if (separator_pos == 0) { Error("*** empty variable name ***"); return; } StringPiece lhs; StringPiece rhs; AssignOp op; ParseAssignStatement(line, separator_pos, &lhs, &rhs, &op); // If rhs starts with '$=', this is 'final assignment', // e.g., a combination of the assignment and // .KATI_READONLY := // statement. Note that we assume that ParseAssignStatement // trimmed the left bool is_final = (rhs.size() >= 2 && rhs[0] == '$' && rhs[1] == '='); if (is_final) { rhs = TrimLeftSpace(rhs.substr(2)); } AssignStmt* stmt = new AssignStmt(); stmt->set_loc(loc_); stmt->lhs = ParseExpr(lhs); stmt->rhs = ParseExpr(rhs); stmt->orig_rhs = rhs; stmt->op = op; stmt->directive = current_directive_; stmt->is_final = is_final; out_stmts_->push_back(stmt); state_ = ParserState::NOT_AFTER_RULE; } void ParseInclude(StringPiece line, StringPiece directive) { IncludeStmt* stmt = new IncludeStmt(); stmt->set_loc(loc_); stmt->expr = ParseExpr(line); stmt->should_exist = directive[0] == 'i'; out_stmts_->push_back(stmt); state_ = ParserState::NOT_AFTER_RULE; } void ParseDefine(StringPiece line, StringPiece) { if (line.empty()) { Error("*** empty variable name."); return; } define_name_ = line; num_define_nest_ = 1; define_start_ = 0; define_start_line_ = loc_.lineno; state_ = ParserState::NOT_AFTER_RULE; } void ParseInsideDefine(StringPiece line) { line = TrimLeftSpace(line); StringPiece directive = GetDirective(line); if (directive == "define") num_define_nest_++; else if (directive == "endef") num_define_nest_--; if (num_define_nest_ > 0) { if (define_start_ == 0) define_start_ = l_; return; } StringPiece rest = TrimRightSpace( RemoveComment(TrimLeftSpace(line.substr(sizeof("endef"))))); if (!rest.empty()) { WARN_LOC(loc_, "extraneous text after `endef' directive"); } AssignStmt* stmt = new AssignStmt(); stmt->set_loc(Loc(loc_.filename, define_start_line_)); stmt->lhs = ParseExpr(define_name_); StringPiece rhs; if (define_start_) rhs = buf_.substr(define_start_, l_ - define_start_ - 1); stmt->rhs = ParseExpr(rhs, ParseExprOpt::DEFINE); stmt->orig_rhs = rhs; stmt->op = AssignOp::EQ; stmt->directive = current_directive_; out_stmts_->push_back(stmt); define_name_.clear(); } void EnterIf(IfStmt* stmt) { IfState* st = new IfState(); st->stmt = stmt; st->is_in_else = false; st->num_nest = num_if_nest_; if_stack_.push(st); out_stmts_ = &stmt->true_stmts; } void ParseIfdef(StringPiece line, StringPiece directive) { IfStmt* stmt = new IfStmt(); stmt->set_loc(loc_); stmt->op = directive[2] == 'n' ? CondOp::IFNDEF : CondOp::IFDEF; stmt->lhs = ParseExpr(line); stmt->rhs = NULL; out_stmts_->push_back(stmt); EnterIf(stmt); } bool ParseIfEqCond(StringPiece s, IfStmt* stmt) { if (s.empty()) { return false; } if (s[0] == '(' && s[s.size() - 1] == ')') { s = s.substr(1, s.size() - 2); char terms[] = {',', '\0'}; size_t n; stmt->lhs = ParseExprImpl(loc_, s, terms, ParseExprOpt::NORMAL, &n, true); if (s[n] != ',') return false; s = TrimLeftSpace(s.substr(n + 1)); stmt->rhs = ParseExprImpl(loc_, s, NULL, ParseExprOpt::NORMAL, &n); s = TrimLeftSpace(s.substr(n)); } else { for (int i = 0; i < 2; i++) { if (s.empty()) return false; char quote = s[0]; if (quote != '\'' && quote != '"') return false; size_t end = s.find(quote, 1); if (end == string::npos) return false; Value* v = ParseExpr(s.substr(1, end - 1), ParseExprOpt::NORMAL); if (i == 0) stmt->lhs = v; else stmt->rhs = v; s = TrimLeftSpace(s.substr(end + 1)); } } if (!s.empty()) { WARN_LOC(loc_, "extraneous text after `ifeq' directive"); return true; } return true; } void ParseIfeq(StringPiece line, StringPiece directive) { IfStmt* stmt = new IfStmt(); stmt->set_loc(loc_); stmt->op = directive[2] == 'n' ? CondOp::IFNEQ : CondOp::IFEQ; if (!ParseIfEqCond(line, stmt)) { Error("*** invalid syntax in conditional."); return; } out_stmts_->push_back(stmt); EnterIf(stmt); } void ParseElse(StringPiece line, StringPiece) { if (!CheckIfStack("else")) return; IfState* st = if_stack_.top(); if (st->is_in_else) { Error("*** only one `else' per conditional."); return; } st->is_in_else = true; out_stmts_ = &st->stmt->false_stmts; StringPiece next_if = TrimLeftSpace(line); if (next_if.empty()) return; num_if_nest_ = st->num_nest + 1; if (!HandleDirective(next_if, else_if_directives_)) { WARN_LOC(loc_, "extraneous text after `else' directive"); } num_if_nest_ = 0; } void ParseEndif(StringPiece line, StringPiece) { if (!CheckIfStack("endif")) return; if (!line.empty()) { Error("extraneous text after `endif` directive"); return; } IfState st = *if_stack_.top(); for (int t = 0; t <= st.num_nest; t++) { delete if_stack_.top(); if_stack_.pop(); if (if_stack_.empty()) { out_stmts_ = stmts_; } else { IfState* st = if_stack_.top(); if (st->is_in_else) out_stmts_ = &st->stmt->false_stmts; else out_stmts_ = &st->stmt->true_stmts; } } } bool IsInExport() const { return (static_cast(current_directive_) & static_cast(AssignDirective::EXPORT)); } void CreateExport(StringPiece line, bool is_export) { ExportStmt* stmt = new ExportStmt; stmt->set_loc(loc_); stmt->expr = ParseExpr(line); stmt->is_export = is_export; out_stmts_->push_back(stmt); } void ParseOverride(StringPiece line, StringPiece) { current_directive_ = static_cast( (static_cast(current_directive_) | static_cast(AssignDirective::OVERRIDE))); if (HandleDirective(line, assign_directives_)) return; if (IsInExport()) { CreateExport(line, true); } ParseRuleOrAssign(line); } void ParseExport(StringPiece line, StringPiece) { current_directive_ = static_cast( (static_cast(current_directive_) | static_cast(AssignDirective::EXPORT))); if (HandleDirective(line, assign_directives_)) return; CreateExport(line, true); ParseRuleOrAssign(line); } void ParseUnexport(StringPiece line, StringPiece) { CreateExport(line, false); } bool CheckIfStack(const char* keyword) { if (if_stack_.empty()) { Error(StringPrintf("*** extraneous `%s'.", keyword)); return false; } return true; } StringPiece RemoveComment(StringPiece line) { size_t i = FindOutsideParen(line, '#'); if (i == string::npos) return line; return line.substr(0, i); } StringPiece GetDirective(StringPiece line) { if (line.size() < shortest_directive_len_) return StringPiece(); StringPiece prefix = line.substr(0, longest_directive_len_ + 1); size_t space_index = prefix.find_first_of(" \t#"); return prefix.substr(0, space_index); } bool HandleDirective(StringPiece line, const DirectiveMap* directive_map) { StringPiece directive = GetDirective(line); auto found = directive_map->find(directive); if (found == directive_map->end()) return false; StringPiece rest = TrimRightSpace( RemoveComment(TrimLeftSpace(line.substr(directive.size())))); (this->*found->second)(rest, directive); return true; } StringPiece buf_; size_t l_; ParserState state_; vector* stmts_; vector* out_stmts_; StringPiece define_name_; int num_define_nest_; size_t define_start_; int define_start_line_; StringPiece orig_line_with_directives_; AssignDirective current_directive_; int num_if_nest_; stack if_stack_; Loc loc_; bool fixed_lineno_; static DirectiveMap* make_directives_; static DirectiveMap* else_if_directives_; static DirectiveMap* assign_directives_; static size_t shortest_directive_len_; static size_t longest_directive_len_; }; void Parse(Makefile* mk) { COLLECT_STATS("parse file time"); Parser parser(StringPiece(mk->buf()), mk->filename().c_str(), mk->mutable_stmts()); parser.Parse(); } void Parse(StringPiece buf, const Loc& loc, vector* out_stmts) { COLLECT_STATS("parse eval time"); Parser parser(buf, loc, out_stmts); parser.Parse(); } void ParseNotAfterRule(StringPiece buf, const Loc& loc, vector* out_stmts) { Parser parser(buf, loc, out_stmts); parser.set_state(ParserState::NOT_AFTER_RULE); parser.Parse(); } void InitParser() { Parser::Init(); } void QuitParser() { Parser::Quit(); } Parser::DirectiveMap* Parser::make_directives_; Parser::DirectiveMap* Parser::else_if_directives_; Parser::DirectiveMap* Parser::assign_directives_; size_t Parser::shortest_directive_len_; size_t Parser::longest_directive_len_; vector Parser::parse_errors; void ParseAssignStatement(StringPiece line, size_t sep, StringPiece* lhs, StringPiece* rhs, AssignOp* op) { CHECK(sep != 0); *op = AssignOp::EQ; size_t lhs_end = sep; switch (line[sep - 1]) { case ':': lhs_end--; *op = AssignOp::COLON_EQ; break; case '+': lhs_end--; *op = AssignOp::PLUS_EQ; break; case '?': lhs_end--; *op = AssignOp::QUESTION_EQ; break; } *lhs = TrimSpace(line.substr(0, lhs_end)); *rhs = TrimLeftSpace(line.substr(sep + 1)); } const vector& GetParseErrors() { return Parser::parse_errors; } parser.go0100644 0000000 0000000 00000043534 13654546140 011451 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati //go:generate go run testcase/gen_testcase_parse_benchmark.go // // $ go generate // $ go test -bench . import ( "bufio" "bytes" "crypto/sha1" "errors" "fmt" "io" "io/ioutil" "strings" "sync" "time" "github.com/golang/glog" ) type makefile struct { filename string stmts []ast } type ifState struct { ast *ifAST inElse bool numNest int } type parser struct { rd *bufio.Reader mk makefile lineno int elineno int // lineno == elineno unless there is trailing '\'. linenoFixed bool done bool outStmts *[]ast inRecipe bool ifStack []ifState defineVar []byte inDef []byte defOpt string numIfNest int err error } func newParser(rd io.Reader, filename string) *parser { p := &parser{ rd: bufio.NewReader(rd), } p.mk.filename = filename p.outStmts = &p.mk.stmts return p } func (p *parser) srcpos() srcpos { return srcpos{ filename: p.mk.filename, lineno: p.lineno, } } func (p *parser) addStatement(stmt ast) { *p.outStmts = append(*p.outStmts, stmt) switch stmt.(type) { case *maybeRuleAST: p.inRecipe = true case *assignAST, *includeAST, *exportAST: p.inRecipe = false } } func (p *parser) readLine() []byte { if !p.linenoFixed { p.lineno = p.elineno + 1 } var line []byte for !p.done { buf, err := p.rd.ReadBytes('\n') if !p.linenoFixed { p.elineno++ } if err == io.EOF { p.done = true } else if err != nil { p.err = fmt.Errorf("readline %s: %v", p.srcpos(), err) p.done = true } line = append(line, buf...) buf = bytes.TrimRight(buf, "\r\n") glog.V(4).Infof("buf:%q", buf) backslash := false for len(buf) > 0 && buf[len(buf)-1] == '\\' { buf = buf[:len(buf)-1] backslash = !backslash } if !backslash { glog.V(4).Infof("no concat line:%q", buf) break } } line = bytes.TrimRight(line, "\r\n") return line } func newAssignAST(p *parser, lhsBytes []byte, rhsBytes []byte, op string) (*assignAST, error) { lhs, _, err := parseExpr(lhsBytes, nil, parseOp{alloc: true}) if err != nil { return nil, err } rhs, _, err := parseExpr(rhsBytes, nil, parseOp{alloc: true}) if err != nil { return nil, err } opt := "" if p != nil { opt = p.defOpt } return &assignAST{ lhs: lhs, rhs: rhs, op: op, opt: opt, }, nil } func (p *parser) handleDirective(line []byte, directives map[string]directiveFunc) bool { w, data := firstWord(line) if d, ok := directives[string(w)]; ok { d(p, data) return true } return false } func (p *parser) handleRuleOrAssign(line []byte) { rline := line var semi []byte if i := findLiteralChar(line, ';', 0, skipVar); i >= 0 { // preserve after semicolon semi = append(semi, line[i+1:]...) rline = concatline(line[:i]) } else { rline = concatline(line) } if p.handleAssign(line) { return } // not assignment. // ie. no '=' found or ':' found before '=' (except ':=') p.parseMaybeRule(rline, semi) return } func (p *parser) handleAssign(line []byte) bool { aline, _ := removeComment(concatline(line)) aline = trimLeftSpaceBytes(aline) if len(aline) == 0 { return false } // fmt.Printf("assign: %q=>%q\n", line, aline) i := findLiteralChar(aline, ':', '=', skipVar) if i >= 0 { if aline[i] == '=' { p.parseAssign(aline, i) return true } if aline[i] == ':' && i+1 < len(aline) && aline[i+1] == '=' { p.parseAssign(aline, i+1) return true } } return false } func (p *parser) parseAssign(line []byte, sep int) { lhs, op, rhs := line[:sep], line[sep:sep+1], line[sep+1:] if sep > 0 { switch line[sep-1] { case ':', '+', '?': lhs, op = line[:sep-1], line[sep-1:sep+1] } } glog.V(1).Infof("parseAssign %s op:%q opt:%s", line, op, p.defOpt) lhs = trimSpaceBytes(lhs) rhs = trimLeftSpaceBytes(rhs) aast, err := newAssignAST(p, lhs, rhs, string(op)) if err != nil { p.err = err return } aast.srcpos = p.srcpos() p.addStatement(aast) } func (p *parser) parseMaybeRule(line, semi []byte) { if len(line) == 0 { p.err = p.srcpos().errorf("*** missing rule before commands.") return } if line[0] == '\t' { p.err = p.srcpos().errorf("*** commands commence before first target.") return } var assign *assignAST ci := findLiteralChar(line, ':', 0, skipVar) if ci >= 0 { eqi := findLiteralChar(line[ci+1:], '=', 0, skipVar) if eqi == 0 { panic(fmt.Sprintf("unexpected eq after colon: %q", line)) } if eqi > 0 { var lhsbytes []byte op := "=" switch line[ci+1+eqi-1] { case ':', '+', '?': lhsbytes = append(lhsbytes, line[ci+1:ci+1+eqi-1]...) op = string(line[ci+1+eqi-1 : ci+1+eqi+1]) default: lhsbytes = append(lhsbytes, line[ci+1:ci+1+eqi]...) } lhsbytes = trimSpaceBytes(lhsbytes) lhs, _, err := parseExpr(lhsbytes, nil, parseOp{}) if err != nil { p.err = p.srcpos().error(err) return } var rhsbytes []byte rhsbytes = append(rhsbytes, line[ci+1+eqi+1:]...) if semi != nil { rhsbytes = append(rhsbytes, ';') rhsbytes = append(rhsbytes, concatline(semi)...) } rhsbytes = trimLeftSpaceBytes(rhsbytes) semi = nil rhs, _, err := parseExpr(rhsbytes, nil, parseOp{}) if err != nil { p.err = p.srcpos().error(err) return } // TODO(ukai): support override, export in target specific var. assign = &assignAST{ lhs: lhs, rhs: rhs, op: op, } assign.srcpos = p.srcpos() line = line[:ci+1] } } expr, _, err := parseExpr(line, nil, parseOp{}) if err != nil { p.err = p.srcpos().error(err) return } // TODO(ukai): remove ast, and eval here. rast := &maybeRuleAST{ isRule: ci >= 0, expr: expr, assign: assign, semi: semi, } rast.srcpos = p.srcpos() glog.V(1).Infof("stmt: %#v", rast) p.addStatement(rast) } func (p *parser) parseInclude(op string, line []byte) { // TODO(ukai): parse expr here iast := &includeAST{ expr: string(line), op: op, } iast.srcpos = p.srcpos() p.addStatement(iast) } func (p *parser) parseIfdef(op string, data []byte) { lhs, _, err := parseExpr(data, nil, parseOp{alloc: true}) if err != nil { p.err = p.srcpos().error(err) return } iast := &ifAST{ op: op, lhs: lhs, } iast.srcpos = p.srcpos() p.addStatement(iast) p.ifStack = append(p.ifStack, ifState{ast: iast, numNest: p.numIfNest}) p.outStmts = &iast.trueStmts } func (p *parser) parseTwoQuotes(s []byte) (string, string, []byte, bool) { var args []string for i := 0; i < 2; i++ { s = trimSpaceBytes(s) if len(s) == 0 { return "", "", nil, false } quote := s[0] if quote != '\'' && quote != '"' { return "", "", nil, false } end := bytes.IndexByte(s[1:], quote) + 1 if end < 0 { return "", "", nil, false } args = append(args, string(s[1:end])) s = s[end+1:] } return args[0], args[1], s, true } // parse // "(lhs, rhs)" // "lhs, rhs" func (p *parser) parseEq(s []byte) (string, string, []byte, bool) { if len(s) == 0 { return "", "", nil, false } if s[0] == '(' { in := s[1:] glog.V(1).Infof("parseEq ( %q )", in) term := []byte{','} v, n, err := parseExpr(in, term, parseOp{matchParen: true}) if err != nil { glog.V(1).Infof("parse eq: %q: %v", in, err) return "", "", nil, false } lhs := v.String() n++ n += skipSpaces(in[n:], nil) term = []byte{')'} in = in[n:] v, n, err = parseExpr(in, term, parseOp{matchParen: true}) if err != nil { glog.V(1).Infof("parse eq 2nd: %q: %v", in, err) return "", "", nil, false } rhs := v.String() in = in[n+1:] in = trimSpaceBytes(in) return lhs, rhs, in, true } return p.parseTwoQuotes(s) } func (p *parser) parseIfeq(op string, data []byte) { lhsBytes, rhsBytes, extra, ok := p.parseEq(data) if !ok { p.err = p.srcpos().errorf(`*** invalid syntax in conditional.`) return } if len(extra) > 0 { glog.V(1).Infof("extra %q", extra) warnNoPrefix(p.srcpos(), `extraneous text after %q directive`, op) } lhs, _, err := parseExpr([]byte(lhsBytes), nil, parseOp{matchParen: true}) if err != nil { p.err = p.srcpos().error(err) return } rhs, _, err := parseExpr([]byte(rhsBytes), nil, parseOp{matchParen: true}) if err != nil { p.err = p.srcpos().error(err) return } iast := &ifAST{ op: op, lhs: lhs, rhs: rhs, } iast.srcpos = p.srcpos() p.addStatement(iast) p.ifStack = append(p.ifStack, ifState{ast: iast, numNest: p.numIfNest}) p.outStmts = &iast.trueStmts } func (p *parser) checkIfStack(curKeyword string) error { if len(p.ifStack) == 0 { return p.srcpos().errorf(`*** extraneous %q.`, curKeyword) } return nil } func (p *parser) parseElse(data []byte) { err := p.checkIfStack("else") if err != nil { p.err = err return } state := &p.ifStack[len(p.ifStack)-1] if state.inElse { p.err = p.srcpos().errorf(`*** only one "else" per conditional.`) return } state.inElse = true p.outStmts = &state.ast.falseStmts nextIf := data if len(nextIf) == 0 { return } var ifDirectives = map[string]directiveFunc{ "ifdef": ifdefDirective, "ifndef": ifndefDirective, "ifeq": ifeqDirective, "ifneq": ifneqDirective, } p.numIfNest = state.numNest + 1 if p.handleDirective(nextIf, ifDirectives) { p.numIfNest = 0 return } p.numIfNest = 0 warnNoPrefix(p.srcpos(), "extraneous text after `else' directive") return } func (p *parser) parseEndif(data []byte) { err := p.checkIfStack("endif") if err != nil { p.err = err return } state := p.ifStack[len(p.ifStack)-1] for t := 0; t <= state.numNest; t++ { p.ifStack = p.ifStack[0 : len(p.ifStack)-1] if len(p.ifStack) == 0 { p.outStmts = &p.mk.stmts } else { state := p.ifStack[len(p.ifStack)-1] if state.inElse { p.outStmts = &state.ast.falseStmts } else { p.outStmts = &state.ast.trueStmts } } } if len(trimSpaceBytes(data)) > 0 { warnNoPrefix(p.srcpos(), "extraneous text after `endif' directive") } return } func (p *parser) parseDefine(data []byte) { p.defineVar = nil p.inDef = nil p.defineVar = append(p.defineVar, trimSpaceBytes(data)...) return } func (p *parser) parseVpath(data []byte) { vline, _ := removeComment(concatline(data)) vline = trimLeftSpaceBytes(vline) v, _, err := parseExpr(vline, nil, parseOp{}) if err != nil { p.err = p.srcpos().errorf("parse error %q: %v", string(vline), err) return } vast := &vpathAST{ expr: v, } vast.srcpos = p.srcpos() p.addStatement(vast) } type directiveFunc func(*parser, []byte) var makeDirectives map[string]directiveFunc func init() { makeDirectives = map[string]directiveFunc{ "include": includeDirective, "-include": sincludeDirective, "sinclude": sincludeDirective, "ifdef": ifdefDirective, "ifndef": ifndefDirective, "ifeq": ifeqDirective, "ifneq": ifneqDirective, "else": elseDirective, "endif": endifDirective, "define": defineDirective, "override": overrideDirective, "export": exportDirective, "unexport": unexportDirective, "vpath": vpathDirective, } } func includeDirective(p *parser, data []byte) { p.parseInclude("include", data) } func sincludeDirective(p *parser, data []byte) { p.parseInclude("-include", data) } func ifdefDirective(p *parser, data []byte) { p.parseIfdef("ifdef", data) } func ifndefDirective(p *parser, data []byte) { p.parseIfdef("ifndef", data) } func ifeqDirective(p *parser, data []byte) { p.parseIfeq("ifeq", data) } func ifneqDirective(p *parser, data []byte) { p.parseIfeq("ifneq", data) } func elseDirective(p *parser, data []byte) { p.parseElse(data) } func endifDirective(p *parser, data []byte) { p.parseEndif(data) } func defineDirective(p *parser, data []byte) { p.parseDefine(data) } func overrideDirective(p *parser, data []byte) { p.defOpt = "override" defineDirective := map[string]directiveFunc{ "define": defineDirective, } glog.V(1).Infof("override define? %q", data) if p.handleDirective(data, defineDirective) { return } // e.g. overrider foo := bar // line will be "foo := bar". if p.handleAssign(data) { return } p.defOpt = "" var line []byte line = append(line, []byte("override ")...) line = append(line, data...) p.handleRuleOrAssign(line) // TODO(ukai): evaluate now to detect invalid "override" directive here? } func handleExport(p *parser, data []byte, export bool) (hasEqual bool) { i := bytes.IndexByte(data, '=') if i > 0 { hasEqual = true switch data[i-1] { case ':', '+', '?': i-- } data = data[:i] } east := &exportAST{ expr: data, hasEqual: hasEqual, export: export, } east.srcpos = p.srcpos() glog.V(1).Infof("export %v", east) p.addStatement(east) return hasEqual } func exportDirective(p *parser, data []byte) { p.defOpt = "export" defineDirective := map[string]directiveFunc{ "define": defineDirective, } glog.V(1).Infof("export define? %q", data) if p.handleDirective(data, defineDirective) { return } if !handleExport(p, data, true) { return } // e.g. export foo := bar // line will be "foo := bar". p.handleAssign(data) } func unexportDirective(p *parser, data []byte) { handleExport(p, data, false) return } func vpathDirective(p *parser, data []byte) { p.parseVpath(data) } func (p *parser) parse() (mk makefile, err error) { for !p.done { line := p.readLine() if glog.V(1) { glog.Infof("%s: %q", p.srcpos(), line) } if p.defineVar != nil { p.processDefine(line) if p.err != nil { return makefile{}, p.err } continue } p.defOpt = "" if p.inRecipe { if len(line) > 0 && line[0] == '\t' { cast := &commandAST{cmd: string(line[1:])} cast.srcpos = p.srcpos() p.addStatement(cast) continue } } p.parseLine(line) if p.err != nil { return makefile{}, p.err } } return p.mk, p.err } func (p *parser) parseLine(line []byte) { cline := concatline(line) if len(cline) == 0 { return } if glog.V(1) { glog.Infof("concatline:%q", cline) } var dline []byte cline, _ = removeComment(cline) dline = append(dline, cline...) dline = trimSpaceBytes(dline) if len(dline) == 0 { return } if glog.V(1) { glog.Infof("directive?: %q", dline) } if p.handleDirective(dline, makeDirectives) { return } if glog.V(1) { glog.Infof("rule or assign?: %q", line) } p.handleRuleOrAssign(line) } func (p *parser) processDefine(line []byte) { line = append(line, '\n') line = concatline(line) if line[len(line)-1] != '\n' { line = append(line, '\n') } if glog.V(1) { glog.Infof("concatline:%q", line) } if !p.isEndef(line) { p.inDef = append(p.inDef, line...) if p.inDef == nil { p.inDef = []byte{} } return } if p.inDef[len(p.inDef)-1] == '\n' { p.inDef = p.inDef[:len(p.inDef)-1] } glog.V(1).Infof("multilineAssign %q %q", p.defineVar, p.inDef) aast, err := newAssignAST(p, p.defineVar, p.inDef, "=") if err != nil { p.err = p.srcpos().errorf("assign error %q=%q: %v", p.defineVar, p.inDef, err) return } aast.srcpos = p.srcpos() aast.srcpos.lineno -= bytes.Count(p.inDef, []byte{'\n'}) p.addStatement(aast) p.defineVar = nil p.inDef = nil return } func (p *parser) isEndef(line []byte) bool { if bytes.Equal(line, []byte("endef")) { return true } w, data := firstWord(line) if bytes.Equal(w, []byte("endef")) { data, _ = removeComment(data) data = trimLeftSpaceBytes(data) if len(data) > 0 { warnNoPrefix(p.srcpos(), `extraneous text after "endef" directive`) } return true } return false } func defaultMakefile() (string, error) { candidates := []string{"GNUmakefile", "makefile", "Makefile"} for _, filename := range candidates { if exists(filename) { return filename, nil } } return "", errors.New("no targets specified and no makefile found") } func parseMakefileReader(rd io.Reader, loc srcpos) (makefile, error) { parser := newParser(rd, loc.filename) parser.lineno = loc.lineno parser.elineno = loc.lineno parser.linenoFixed = true return parser.parse() } func parseMakefileString(s string, loc srcpos) (makefile, error) { return parseMakefileReader(strings.NewReader(s), loc) } func parseMakefileBytes(s []byte, loc srcpos) (makefile, error) { return parseMakefileReader(bytes.NewReader(s), loc) } type mkCacheEntry struct { mk makefile hash [sha1.Size]byte err error ts int64 } type makefileCacheT struct { mu sync.Mutex mk map[string]mkCacheEntry } var makefileCache = &makefileCacheT{ mk: make(map[string]mkCacheEntry), } func (mc *makefileCacheT) lookup(filename string) (makefile, [sha1.Size]byte, bool, error) { var hash [sha1.Size]byte mc.mu.Lock() c, present := mc.mk[filename] mc.mu.Unlock() if !present { return makefile{}, hash, false, nil } ts := getTimestamp(filename) if ts < 0 || ts >= c.ts { return makefile{}, hash, false, nil } return c.mk, c.hash, true, c.err } func (mc *makefileCacheT) parse(filename string) (makefile, [sha1.Size]byte, error) { glog.Infof("parse Makefile %q", filename) mk, hash, ok, err := makefileCache.lookup(filename) if ok { if glog.V(1) { glog.Infof("makefile cache hit for %q", filename) } return mk, hash, err } if glog.V(1) { glog.Infof("reading makefile %q", filename) } c, err := ioutil.ReadFile(filename) if err != nil { return makefile{}, hash, err } hash = sha1.Sum(c) mk, err = parseMakefile(c, filename) if err != nil { return makefile{}, hash, err } makefileCache.mu.Lock() makefileCache.mk[filename] = mkCacheEntry{ mk: mk, hash: hash, err: err, ts: time.Now().Unix(), } makefileCache.mu.Unlock() return mk, hash, err } func parseMakefile(s []byte, filename string) (makefile, error) { parser := newParser(bytes.NewReader(s), filename) return parser.parse() } parser.h0100644 0000000 0000000 00000002442 13654546140 011264 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef PARSER_H_ #define PARSER_H_ #include #include "loc.h" #include "stmt.h" #include "string_piece.h" using namespace std; class Makefile; void Parse(Makefile* mk); void Parse(StringPiece buf, const Loc& loc, vector* out_asts); void ParseNotAfterRule(StringPiece buf, const Loc& loc, vector* out_asts); void ParseAssignStatement(StringPiece line, size_t sep, StringPiece* lhs, StringPiece* rhs, AssignOp* op); void InitParser(); void QuitParser(); const vector& GetParseErrors(); #endif // PARSER_H_ pathutil.go0100644 0000000 0000000 00000050517 13654546140 012006 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "errors" "fmt" "io" "os" "path/filepath" "strconv" "strings" "sync" "syscall" "github.com/golang/glog" ) type fileid struct { dev, ino uint64 } var ( unknownFileid = fileid{} invalidFileid = fileid{dev: 1<<64 - 1, ino: 1<<64 - 1} ) type dirent struct { id fileid name string lmode os.FileMode mode os.FileMode // add other fields to support more find commands? } type fsCacheT struct { mu sync.Mutex ids map[string]fileid dirents map[fileid][]dirent } var fsCache = &fsCacheT{ ids: make(map[string]fileid), dirents: map[fileid][]dirent{ invalidFileid: nil, }, } func init() { fsCache.readdir(".", unknownFileid) } func (c *fsCacheT) dirs() int { c.mu.Lock() defer c.mu.Unlock() return len(c.dirents) } func (c *fsCacheT) files() int { c.mu.Lock() defer c.mu.Unlock() n := 0 for _, ents := range c.dirents { n += len(ents) } return n } func hasWildcardMeta(pat string) bool { return strings.IndexAny(pat, "*?[") >= 0 } func hasWildcardMetaByte(pat []byte) bool { return bytes.IndexAny(pat, "*?[") >= 0 } func wildcardUnescape(pat string) string { var buf bytes.Buffer for i := 0; i < len(pat); i++ { if pat[i] == '\\' && i+1 < len(pat) { switch pat[i+1] { case '*', '?', '[', '\\': buf.WriteByte(pat[i]) } continue } buf.WriteByte(pat[i]) } return buf.String() } func filepathJoin(names ...string) string { var dir string for i, n := range names { dir += n if i != len(names)-1 && n != "" && n[len(n)-1] != '/' { dir += "/" } } return dir } func filepathClean(path string) string { var names []string if filepath.IsAbs(path) { names = append(names, "") } paths := strings.Split(path, string(filepath.Separator)) Loop: for _, n := range paths { if n == "" || n == "." { continue Loop } if n == ".." && len(names) > 0 { dir, last := names[:len(names)-1], names[len(names)-1] parent := strings.Join(dir, string(filepath.Separator)) if parent == "" { parent = "." } _, ents := fsCache.readdir(parent, unknownFileid) for _, e := range ents { if e.name != last { continue } if e.lmode&os.ModeSymlink == os.ModeSymlink && e.mode&os.ModeDir == os.ModeDir { // preserve .. if last is symlink dir. names = append(names, "..") continue Loop } // last is not symlink, maybe safe to clean. names = names[:len(names)-1] continue Loop } // parent doesn't exists? preserve .. names = append(names, "..") continue Loop } names = append(names, n) } if len(names) == 0 { return "." } return strings.Join(names, string(filepath.Separator)) } func (c *fsCacheT) fileid(dir string) fileid { c.mu.Lock() id := c.ids[dir] c.mu.Unlock() return id } func (c *fsCacheT) readdir(dir string, id fileid) (fileid, []dirent) { glog.V(3).Infof("readdir: %s [%v]", dir, id) c.mu.Lock() if id == unknownFileid { id = c.ids[dir] } ents, ok := c.dirents[id] c.mu.Unlock() if ok { return id, ents } glog.V(3).Infof("opendir: %s", dir) d, err := os.Open(dir) if err != nil { c.mu.Lock() c.ids[dir] = invalidFileid c.mu.Unlock() return invalidFileid, nil } defer d.Close() fi, err := d.Stat() if err != nil { c.mu.Lock() c.ids[dir] = invalidFileid c.mu.Unlock() return invalidFileid, nil } if stat, ok := fi.Sys().(*syscall.Stat_t); ok { id = fileid{dev: uint64(stat.Dev), ino: stat.Ino} } names, _ := d.Readdirnames(-1) // need sort? ents = nil var path string for _, name := range names { path = filepath.Join(dir, name) fi, err := os.Lstat(path) if err != nil { glog.Warningf("readdir %s: %v", name, err) ents = append(ents, dirent{name: name}) continue } lmode := fi.Mode() mode := lmode var id fileid if stat, ok := fi.Sys().(*syscall.Stat_t); ok { id = fileid{dev: uint64(stat.Dev), ino: stat.Ino} } if lmode&os.ModeSymlink == os.ModeSymlink { fi, err = os.Stat(path) if err != nil { glog.Warningf("readdir %s: %v", name, err) } else { mode = fi.Mode() if stat, ok := fi.Sys().(*syscall.Stat_t); ok { id = fileid{dev: uint64(stat.Dev), ino: stat.Ino} } } } ents = append(ents, dirent{id: id, name: name, lmode: lmode, mode: mode}) } glog.V(3).Infof("readdir:%s => %v: %v", dir, id, ents) c.mu.Lock() c.ids[dir] = id c.dirents[id] = ents c.mu.Unlock() return id, ents } // glob searches for files matching pattern in the directory dir // and appends them to matches. ignore I/O errors. func (c *fsCacheT) glob(dir, pattern string, matches []string) ([]string, error) { _, ents := c.readdir(filepathClean(dir), unknownFileid) switch dir { case "", string(filepath.Separator): // nothing default: dir += string(filepath.Separator) // add trailing separator back } for _, ent := range ents { matched, err := filepath.Match(pattern, ent.name) if err != nil { return nil, err } if matched { matches = append(matches, dir+ent.name) } } return matches, nil } func (c *fsCacheT) Glob(pat string) ([]string, error) { // TODO(ukai): expand ~ to user's home directory. // TODO(ukai): use find cache for glob if exists // or use wildcardCache for find cache. pat = wildcardUnescape(pat) dir, file := filepath.Split(pat) switch dir { case "", string(filepath.Separator): // nothing default: dir = dir[:len(dir)-1] // chop off trailing separator } if !hasWildcardMeta(dir) { return c.glob(dir, file, nil) } m, err := c.Glob(dir) if err != nil { return nil, err } var matches []string for _, d := range m { matches, err = c.glob(d, file, matches) if err != nil { return nil, err } } return matches, nil } func wildcard(w evalWriter, pat string) error { files, err := fsCache.Glob(pat) if err != nil { return err } for _, file := range files { w.writeWordString(file) } return nil } type findOp interface { apply(evalWriter, string, dirent) (test bool, prune bool) } type findOpName string func (op findOpName) apply(w evalWriter, path string, ent dirent) (bool, bool) { matched, err := filepath.Match(string(op), ent.name) if err != nil { glog.Warningf("find -name %q: %v", string(op), err) return false, false } return matched, false } type findOpType struct { mode os.FileMode followSymlinks bool } func (op findOpType) apply(w evalWriter, path string, ent dirent) (bool, bool) { mode := ent.lmode if op.followSymlinks && ent.mode != 0 { mode = ent.mode } return op.mode&mode == op.mode, false } type findOpRegular struct { followSymlinks bool } func (op findOpRegular) apply(w evalWriter, path string, ent dirent) (bool, bool) { mode := ent.lmode if op.followSymlinks && ent.mode != 0 { mode = ent.mode } return mode.IsRegular(), false } type findOpNot struct { op findOp } func (op findOpNot) apply(w evalWriter, path string, ent dirent) (bool, bool) { test, prune := op.op.apply(w, path, ent) return !test, prune } type findOpAnd []findOp func (op findOpAnd) apply(w evalWriter, path string, ent dirent) (bool, bool) { var prune bool for _, o := range op { test, p := o.apply(w, path, ent) if p { prune = true } if !test { return test, prune } } return true, prune } type findOpOr struct { op1, op2 findOp } func (op findOpOr) apply(w evalWriter, path string, ent dirent) (bool, bool) { test, prune := op.op1.apply(w, path, ent) if test { return test, prune } return op.op2.apply(w, path, ent) } type findOpPrune struct{} func (op findOpPrune) apply(w evalWriter, path string, ent dirent) (bool, bool) { return true, true } type findOpPrint struct{} func (op findOpPrint) apply(w evalWriter, path string, ent dirent) (bool, bool) { var name string if path == "" { name = ent.name } else if ent.name == "." { name = path } else { name = filepathJoin(path, ent.name) } glog.V(3).Infof("find print: %s", name) w.writeWordString(name) return true, false } func (c *fsCacheT) find(w evalWriter, fc findCommand, path string, id fileid, depth int, seen map[fileid]string) { glog.V(2).Infof("find: path:%s id:%v depth:%d", path, id, depth) id, ents := c.readdir(filepathClean(filepathJoin(fc.chdir, path)), id) if ents == nil { glog.V(1).Infof("find: %s %s not found", fc.chdir, path) return } for _, ent := range ents { glog.V(3).Infof("find: path:%s ent:%s depth:%d", path, ent.name, depth) _, prune := fc.apply(w, path, ent) mode := ent.lmode if fc.followSymlinks { if mode&os.ModeSymlink == os.ModeSymlink { lpath := filepathJoin(path, ent.name) if p, ok := seen[ent.id]; ok { // stderr? glog.Errorf("find: File system loop detected; `%s' is part of the same file system loop as `%s'.", lpath, p) return } seen[ent.id] = lpath } mode = ent.mode } if !mode.IsDir() { glog.V(3).Infof("find: not dir: %s/%s", path, ent.name) continue } if prune { glog.V(3).Infof("find: prune: %s", path) continue } if depth >= fc.depth { glog.V(3).Infof("find: depth: %d >= %d", depth, fc.depth) continue } c.find(w, fc, filepathJoin(path, ent.name), ent.id, depth+1, seen) } } type findCommand struct { testdir string // before chdir chdir string finddirs []string // after chdir followSymlinks bool ops []findOp depth int } func parseFindCommand(cmd string) (findCommand, error) { if !strings.Contains(cmd, "find") { return findCommand{}, errNotFind } fcp := findCommandParser{ shellParser: shellParser{ cmd: cmd, }, } err := fcp.parse() if err != nil { return fcp.fc, err } if len(fcp.fc.finddirs) == 0 { fcp.fc.finddirs = append(fcp.fc.finddirs, ".") } if fcp.fc.chdir != "" { fcp.fc.chdir = filepathClean(fcp.fc.chdir) } if filepath.IsAbs(fcp.fc.chdir) { return fcp.fc, errFindAbspath } for _, dir := range fcp.fc.finddirs { if filepath.IsAbs(dir) { return fcp.fc, errFindAbspath } } glog.V(3).Infof("find command: %#v", fcp.fc) // TODO(ukai): handle this in run() instead of fallback shell. _, ents := fsCache.readdir(filepathClean(fcp.fc.testdir), unknownFileid) if ents == nil { glog.V(1).Infof("find: testdir %s - not dir", fcp.fc.testdir) return fcp.fc, errFindNoSuchDir } _, ents = fsCache.readdir(filepathClean(fcp.fc.chdir), unknownFileid) if ents == nil { glog.V(1).Infof("find: cd %s: No such file or directory", fcp.fc.chdir) return fcp.fc, errFindNoSuchDir } return fcp.fc, nil } func (fc findCommand) run(w evalWriter) { glog.V(3).Infof("find: %#v", fc) for _, dir := range fc.finddirs { seen := make(map[fileid]string) id, _ := fsCache.readdir(filepathClean(filepathJoin(fc.chdir, dir)), unknownFileid) _, prune := fc.apply(w, dir, dirent{id: id, name: ".", mode: os.ModeDir, lmode: os.ModeDir}) if prune { glog.V(3).Infof("find: prune: %s", dir) continue } if 0 >= fc.depth { glog.V(3).Infof("find: depth: 0 >= %d", fc.depth) continue } fsCache.find(w, fc, dir, id, 1, seen) } } func (fc findCommand) apply(w evalWriter, path string, ent dirent) (test, prune bool) { var p bool for _, op := range fc.ops { test, p = op.apply(w, path, ent) if p { prune = true } if !test { break } } glog.V(2).Infof("apply path:%s ent:%v => test=%t, prune=%t", path, ent, test, prune) return test, prune } var ( errNotFind = errors.New("not find command") errFindBackground = errors.New("find command: background") errFindUnbalancedQuote = errors.New("find command: unbalanced quote") errFindDupChdir = errors.New("find command: dup chdir") errFindDupTestdir = errors.New("find command: dup testdir") errFindExtra = errors.New("find command: extra") errFindUnexpectedEnd = errors.New("find command: unexpected end") errFindAbspath = errors.New("find command: abs path") errFindNoSuchDir = errors.New("find command: no such dir") ) type findCommandParser struct { fc findCommand shellParser } func (p *findCommandParser) parse() error { p.fc.depth = 1<<31 - 1 // max int32 var hasIf bool var hasFind bool for { tok, err := p.token() if err == io.EOF || tok == "" { if !hasFind { return errNotFind } return nil } if err != nil { return err } switch tok { case "cd": if p.fc.chdir != "" { return errFindDupChdir } p.fc.chdir, err = p.token() if err != nil { return err } err = p.expect(";", "&&") if err != nil { return err } case "if": err = p.expect("[") if err != nil { return err } if hasIf { return errFindDupTestdir } err = p.parseTest() if err != nil { return err } err = p.expectSeq("]", ";", "then") if err != nil { return err } hasIf = true case "test": if hasIf { return errFindDupTestdir } err = p.parseTest() if err != nil { return err } err = p.expect("&&") if err != nil { return err } case "find": err = p.parseFind() if err != nil { return err } if hasIf { err = p.expect("fi") if err != nil { return err } } tok, err = p.token() if err != io.EOF || tok != "" { return errFindExtra } hasFind = true return nil } } } func (p *findCommandParser) parseTest() error { if p.fc.testdir != "" { return errFindDupTestdir } err := p.expect("-d") if err != nil { return err } p.fc.testdir, err = p.token() return err } func (p *findCommandParser) parseFind() error { for { tok, err := p.token() if err == io.EOF || tok == "" || tok == ";" { var print findOpPrint if len(p.fc.ops) == 0 || p.fc.ops[len(p.fc.ops)-1] != print { p.fc.ops = append(p.fc.ops, print) } return nil } if err != nil { return err } if tok != "" && (tok[0] == '-' || tok == "\\(") { p.unget(tok) op, err := p.parseFindCond() if err != nil { return err } if op != nil { p.fc.ops = append(p.fc.ops, op) } continue } p.fc.finddirs = append(p.fc.finddirs, tok) } } func (p *findCommandParser) parseFindCond() (findOp, error) { return p.parseExpr() } func (p *findCommandParser) parseExpr() (findOp, error) { op, err := p.parseTerm() if err != nil { return nil, err } if op == nil { return nil, nil } for { tok, err := p.token() if err == io.EOF || tok == "" { return op, nil } if err != nil { return nil, err } if tok != "-or" && tok != "-o" { p.unget(tok) return op, nil } op2, err := p.parseTerm() if err != nil { return nil, err } op = findOpOr{op, op2} } } func (p *findCommandParser) parseTerm() (findOp, error) { op, err := p.parseFact() if err != nil { return nil, err } if op == nil { return nil, nil } var ops []findOp ops = append(ops, op) for { tok, err := p.token() if err == io.EOF || tok == "" { if len(ops) == 1 { return ops[0], nil } return findOpAnd(ops), nil } if err != nil { return nil, err } if tok != "-and" && tok != "-a" { p.unget(tok) } op, err = p.parseFact() if err != nil { return nil, err } if op == nil { if len(ops) == 1 { return ops[0], nil } return findOpAnd(ops), nil } ops = append(ops, op) // findAndOp? } } func (p *findCommandParser) parseFact() (findOp, error) { tok, err := p.token() if err != nil { return nil, err } switch tok { case "-L": p.fc.followSymlinks = true return nil, nil case "-prune": return findOpPrune{}, nil case "-print": return findOpPrint{}, nil case "-maxdepth": tok, err = p.token() if err != nil { return nil, err } i, err := strconv.ParseInt(tok, 10, 32) if err != nil { return nil, err } if i < 0 { return nil, fmt.Errorf("find commnad: -maxdepth negative: %d", i) } p.fc.depth = int(i) return nil, nil case "-not", "\\!": op, err := p.parseFact() if err != nil { return nil, err } return findOpNot{op}, nil case "\\(": op, err := p.parseExpr() if err != nil { return nil, err } err = p.expect("\\)") if err != nil { return nil, err } return op, nil case "-name": tok, err = p.token() if err != nil { return nil, err } return findOpName(tok), nil case "-type": tok, err = p.token() if err != nil { return nil, err } var m os.FileMode switch tok { case "b": m = os.ModeDevice case "c": m = os.ModeDevice | os.ModeCharDevice case "d": m = os.ModeDir case "p": m = os.ModeNamedPipe case "l": m = os.ModeSymlink case "f": return findOpRegular{p.fc.followSymlinks}, nil case "s": m = os.ModeSocket default: return nil, fmt.Errorf("find command: unsupported -type %s", tok) } return findOpType{m, p.fc.followSymlinks}, nil case "-o", "-or", "-a", "-and": p.unget(tok) return nil, nil default: if tok != "" && tok[0] == '-' { return nil, fmt.Errorf("find command: unsupported %s", tok) } p.unget(tok) return nil, nil } } type findleavesCommand struct { name string dirs []string prunes []string mindepth int } func parseFindleavesCommand(cmd string) (findleavesCommand, error) { if !strings.Contains(cmd, "build/tools/findleaves.py") { return findleavesCommand{}, errNotFindleaves } fcp := findleavesCommandParser{ shellParser: shellParser{ cmd: cmd, }, } err := fcp.parse() if err != nil { return fcp.fc, err } glog.V(3).Infof("findleaves command: %#v", fcp.fc) return fcp.fc, nil } func (fc findleavesCommand) run(w evalWriter) { glog.V(3).Infof("findleaves: %#v", fc) for _, dir := range fc.dirs { seen := make(map[fileid]string) id, _ := fsCache.readdir(filepathClean(dir), unknownFileid) fc.walk(w, dir, id, 1, seen) } } func (fc findleavesCommand) walk(w evalWriter, dir string, id fileid, depth int, seen map[fileid]string) { glog.V(3).Infof("findleaves walk: dir:%d id:%v depth:%d", dir, id, depth) id, ents := fsCache.readdir(filepathClean(dir), id) var subdirs []dirent for _, ent := range ents { if ent.mode.IsDir() { if fc.isPrune(ent.name) { glog.V(3).Infof("findleaves prune %s in %s", ent.name, dir) continue } subdirs = append(subdirs, ent) continue } if depth < fc.mindepth { glog.V(3).Infof("findleaves depth=%d mindepth=%d", depth, fc.mindepth) continue } if ent.name == fc.name { glog.V(2).Infof("findleaves %s in %s", ent.name, dir) w.writeWordString(filepathJoin(dir, ent.name)) // no recurse subdirs return } } for _, subdir := range subdirs { if subdir.lmode&os.ModeSymlink == os.ModeSymlink { lpath := filepathJoin(dir, subdir.name) if p, ok := seen[subdir.id]; ok { // symlink loop detected. glog.Errorf("findleaves: loop detected %q was %q", lpath, p) continue } seen[subdir.id] = lpath } fc.walk(w, filepathJoin(dir, subdir.name), subdir.id, depth+1, seen) } } func (fc findleavesCommand) isPrune(name string) bool { for _, p := range fc.prunes { if p == name { return true } } return false } var ( errNotFindleaves = errors.New("not findleaves command") errFindleavesEmptyPrune = errors.New("findleaves: empty prune") errFindleavesNoFilename = errors.New("findleaves: no filename") ) type findleavesCommandParser struct { fc findleavesCommand shellParser } func (p *findleavesCommandParser) parse() error { var args []string p.fc.mindepth = -1 tok, err := p.token() if err != nil { return err } if tok != "build/tools/findleaves.py" { return errNotFindleaves } for { tok, err := p.token() if err == io.EOF || tok == "" { break } if err != nil { return err } switch { case strings.HasPrefix(tok, "--prune="): prune := filepath.Base(strings.TrimPrefix(tok, "--prune=")) if prune == "" { return errFindleavesEmptyPrune } p.fc.prunes = append(p.fc.prunes, prune) case strings.HasPrefix(tok, "--mindepth="): md := strings.TrimPrefix(tok, "--mindepth=") i, err := strconv.ParseInt(md, 10, 32) if err != nil { return err } p.fc.mindepth = int(i) default: args = append(args, tok) } } if len(args) < 2 { return errFindleavesNoFilename } p.fc.dirs, p.fc.name = args[:len(args)-1], args[len(args)-1] return nil } pathutil_test.go0100644 0000000 0000000 00000057110 13654546140 013041 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "os" "path/filepath" "reflect" "strings" "testing" ) type mockfs struct { id fileid ofscache *fsCacheT } func newFS() *mockfs { fs := &mockfs{ ofscache: fsCache, } fsCache = &fsCacheT{ ids: make(map[string]fileid), dirents: make(map[fileid][]dirent), } fsCache.ids["."] = fs.dir(".").id return fs } func (m *mockfs) dump(t *testing.T) { t.Log("fs ids:") for name, id := range fsCache.ids { t.Logf(" %q=%v", name, id) } t.Log("fs dirents:") for id, ents := range fsCache.dirents { t.Logf(" %v:", id) for _, ent := range ents { t.Logf(" %#v", ent) } } } func (m *mockfs) close() { fsCache = m.ofscache } func (m *mockfs) dirent(name string, mode os.FileMode) dirent { id := m.id m.id.ino++ return dirent{id: id, name: name, mode: mode, lmode: mode} } func (m *mockfs) addent(name string, ent dirent) { dir, name := filepath.Split(name) dir = strings.TrimSuffix(dir, string(filepath.Separator)) if dir == "" { dir = "." } di, ok := fsCache.ids[dir] if !ok { if dir == "." { panic(". not found:" + name) } de := m.add(m.dir, dir) fsCache.ids[dir] = de.id di = de.id } for _, e := range fsCache.dirents[di] { if e.name == ent.name { return } } fsCache.dirents[di] = append(fsCache.dirents[di], ent) } func (m *mockfs) add(t func(string) dirent, name string) dirent { ent := t(filepath.Base(name)) m.addent(name, ent) return ent } func (m *mockfs) symlink(name string, ent dirent) { lent := ent lent.lmode = os.ModeSymlink lent.name = filepath.Base(name) m.addent(name, lent) } func (m *mockfs) dirref(name string) dirent { id := fsCache.ids[name] return dirent{id: id, name: filepath.Base(name), mode: os.ModeDir, lmode: os.ModeDir} } func (m *mockfs) notfound() dirent { return dirent{id: invalidFileid} } func (m *mockfs) dir(name string) dirent { return m.dirent(name, os.ModeDir) } func (m *mockfs) file(name string) dirent { return m.dirent(name, os.FileMode(0644)) } func TestFilepathClean(t *testing.T) { fs := newFS() defer fs.close() di := fs.add(fs.dir, "dir") fs.symlink("link", di) fs.dump(t) for _, tc := range []struct { path string want string }{ {path: "foo", want: "foo"}, {path: ".", want: "."}, {path: "./", want: "."}, {path: ".///", want: "."}, {path: "", want: "."}, {path: "foo/bar", want: "foo/bar"}, {path: "./foo", want: "foo"}, {path: "foo///", want: "foo"}, {path: "foo//bar", want: "foo/bar"}, {path: "foo/../bar", want: "foo/../bar"}, // foo doesn't exist {path: "dir/../bar", want: "bar"}, // dir is real dir {path: "link/../bar", want: "link/../bar"}, // link is symlink {path: "foo/./bar", want: "foo/bar"}, {path: "/foo/bar", want: "/foo/bar"}, } { if got, want := filepathClean(tc.path), tc.want; got != want { t.Errorf("filepathClean(%q)=%q; want=%q", tc.path, got, want) } } } func TestParseFindCommand(t *testing.T) { fs := newFS() defer fs.close() fs.add(fs.dir, "testdir") maxdepth := 1<<31 - 1 for _, tc := range []struct { cmd string want findCommand }{ { cmd: "find testdir", want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: "find .", want: findCommand{ finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: "find ", want: findCommand{ finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: "find testdir/../testdir", want: findCommand{ finddirs: []string{"testdir/../testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: "find testdir -print", want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: "find testdir -name foo", want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpName("foo"), findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "file1"`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpName("file1"), findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1"`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpName("*1"), findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -and -name "file*"`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpAnd{findOpName("*1"), findOpName("file*")}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -or -name "file*"`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpName("file*")}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -or -type f`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpRegular{}}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -or -not -type f`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpNot{findOpRegular{}}}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -or \! -type f`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpNot{findOpRegular{}}}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -or -type d`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeDir}}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -or -type l`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeSymlink}}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name "*1" -a -type l -o -name "dir*"`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("*1"), findOpType{mode: os.ModeSymlink}}), findOpName("dir*")}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir \( -name "dir*" -o -name "*1" \) -a -type f`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpAnd([]findOp{findOpOr{findOpName("dir*"), findOpName("*1")}, findOpRegular{}}), findOpPrint{}}, depth: maxdepth, }, }, { cmd: `cd testdir && find`, want: findCommand{ chdir: "testdir", finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: `test -d testdir && find testdir`, want: findCommand{ testdir: "testdir", finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: `if [ -d testdir ] ; then find testdir ; fi`, want: findCommand{ testdir: "testdir", finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: `if [ -d testdir ]; then find testdir; fi`, want: findCommand{ testdir: "testdir", finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: `if [ -d testdir ]; then cd testdir && find .; fi`, want: findCommand{ chdir: "testdir", testdir: "testdir", finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -name dir2 -prune -o -name file1`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("dir2"), findOpPrune{}}), findOpName("file1")}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir testdir`, want: findCommand{ finddirs: []string{"testdir", "testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find -L testdir -type f`, want: findCommand{ finddirs: []string{"testdir"}, followSymlinks: true, ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `cd testdir; find -L . -type f`, want: findCommand{ chdir: "testdir", finddirs: []string{"."}, followSymlinks: true, ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}}, depth: maxdepth, }, }, { cmd: `find testdir -maxdepth 1`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: 1, }, }, { cmd: `find testdir -maxdepth 0`, want: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: 0, }, }, } { fc, err := parseFindCommand(tc.cmd) if err != nil { t.Errorf("parseFindCommand(%q)=_, %v; want=_, ", tc.cmd, err) continue } if got, want := fc, tc.want; !reflect.DeepEqual(got, want) { t.Errorf("parseFindCommand(%q)=%#v\n want=%#v\n", tc.cmd, got, want) } } } func TestParseFindCommandFail(t *testing.T) { for _, cmd := range []string{ `find testdir -maxdepth hoge`, `find testdir -maxdepth 1hoge`, `find testdir -maxdepth -1`, } { _, err := parseFindCommand(cmd) if err == nil { t.Errorf("parseFindCommand(%q)=_, ; want=_, err", cmd) } } } func TestFind(t *testing.T) { fs := newFS() defer fs.close() fs.add(fs.file, "Makefile") fs.add(fs.file, "testdir/file1") fs.add(fs.file, "testdir/file2") file1 := fs.add(fs.file, "testdir/dir1/file1") dir1 := fs.dirref("testdir/dir1") fs.add(fs.file, "testdir/dir1/file2") fs.add(fs.file, "testdir/dir2/file1") fs.add(fs.file, "testdir/dir2/file2") fs.symlink("testdir/dir2/link1", file1) fs.symlink("testdir/dir2/link2", dir1) fs.symlink("testdir/dir2/link3", fs.notfound()) fs.dump(t) maxdepth := 1<<31 - 1 for _, tc := range []struct { fc findCommand want string }{ { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `. ./Makefile ./testdir ./testdir/file1 ./testdir/file2 ./testdir/dir1 ./testdir/dir1/file1 ./testdir/dir1/file2 ./testdir/dir2 ./testdir/dir2/file1 ./testdir/dir2/file2 ./testdir/dir2/link1 ./testdir/dir2/link2 ./testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"./"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `./ ./Makefile ./testdir ./testdir/file1 ./testdir/file2 ./testdir/dir1 ./testdir/dir1/file1 ./testdir/dir1/file2 ./testdir/dir2 ./testdir/dir2/file1 ./testdir/dir2/file2 ./testdir/dir2/link1 ./testdir/dir2/link2 ./testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{".///"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `./// .///Makefile .///testdir .///testdir/file1 .///testdir/file2 .///testdir/dir1 .///testdir/dir1/file1 .///testdir/dir1/file2 .///testdir/dir2 .///testdir/dir2/file1 .///testdir/dir2/file2 .///testdir/dir2/link1 .///testdir/dir2/link2 .///testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"./."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `./. ././Makefile ././testdir ././testdir/file1 ././testdir/file2 ././testdir/dir1 ././testdir/dir1/file1 ././testdir/dir1/file2 ././testdir/dir2 ././testdir/dir2/file1 ././testdir/dir2/file2 ././testdir/dir2/link1 ././testdir/dir2/link2 ././testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"././"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `././ ././Makefile ././testdir ././testdir/file1 ././testdir/file2 ././testdir/dir1 ././testdir/dir1/file1 ././testdir/dir1/file2 ././testdir/dir2 ././testdir/dir2/file1 ././testdir/dir2/file2 ././testdir/dir2/link1 ././testdir/dir2/link2 ././testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"testdir/../testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `testdir/../testdir testdir/../testdir/file1 testdir/../testdir/file2 testdir/../testdir/dir1 testdir/../testdir/dir1/file1 testdir/../testdir/dir1/file2 testdir/../testdir/dir2 testdir/../testdir/dir2/file1 testdir/../testdir/dir2/file2 testdir/../testdir/dir2/link1 testdir/../testdir/dir2/link2 testdir/../testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpName("foo"), findOpPrint{}}, depth: maxdepth, }, want: ``, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpName("file1"), findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/dir1/file1 testdir/dir2/file1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpAnd{findOpName("*1"), findOpName("file*")}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/dir1/file1 testdir/dir2/file1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpName("file*")}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpRegular{}}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpNot{findOpRegular{}}}, findOpPrint{}}, depth: maxdepth, }, want: `testdir testdir/file1 testdir/dir1 testdir/dir1/file1 testdir/dir2 testdir/dir2/file1 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeDir}}, findOpPrint{}}, depth: maxdepth, }, want: `testdir testdir/file1 testdir/dir1 testdir/dir1/file1 testdir/dir2 testdir/dir2/file1 testdir/dir2/link1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeSymlink}}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/dir1 testdir/dir1/file1 testdir/dir2/file1 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("*1"), findOpType{mode: os.ModeSymlink}}), findOpName("dir*")}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/dir1 testdir/dir2 testdir/dir2/link1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("*1"), findOpType{mode: os.ModeSymlink}}), findOpName("dir*")}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/dir1 testdir/dir2 testdir/dir2/link1`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpAnd([]findOp{findOpOr{findOpName("dir*"), findOpName("*1")}, findOpRegular{}}), findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/dir1/file1 testdir/dir2/file1`, }, { fc: findCommand{ chdir: "testdir", finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `. ./file1 ./file2 ./dir1 ./dir1/file1 ./dir1/file2 ./dir2 ./dir2/file1 ./dir2/file2 ./dir2/link1 ./dir2/link2 ./dir2/link3`, }, { fc: findCommand{ chdir: "testdir", finddirs: []string{"../testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `../testdir ../testdir/file1 ../testdir/file2 ../testdir/dir1 ../testdir/dir1/file1 ../testdir/dir1/file2 ../testdir/dir2 ../testdir/dir2/file1 ../testdir/dir2/file2 ../testdir/dir2/link1 ../testdir/dir2/link2 ../testdir/dir2/link3`, }, { fc: findCommand{ testdir: "testdir", finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`, }, { fc: findCommand{ chdir: "testdir", testdir: "testdir", finddirs: []string{"."}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `. ./file1 ./file2 ./dir1 ./dir1/file1 ./dir1/file2 ./dir2 ./dir2/file1 ./dir2/file2 ./dir2/link1 ./dir2/link2 ./dir2/link3`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("dir2"), findOpPrune{}}), findOpName("file1")}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/dir1/file1 testdir/dir2`, }, { fc: findCommand{ finddirs: []string{"testdir", "testdir"}, ops: []findOp{findOpPrint{}}, depth: maxdepth, }, want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3 testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`, }, // symlink { fc: findCommand{ finddirs: []string{"testdir"}, followSymlinks: true, ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/file1 testdir/file2 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2/file1 testdir/dir2/link2/file2`, }, { fc: findCommand{ finddirs: []string{"testdir"}, followSymlinks: true, ops: []findOp{findOpType{mode: os.ModeDir, followSymlinks: true}, findOpPrint{}}, depth: maxdepth, }, want: `testdir testdir/dir1 testdir/dir2 testdir/dir2/link2`, }, { fc: findCommand{ finddirs: []string{"testdir"}, followSymlinks: true, ops: []findOp{findOpType{mode: os.ModeSymlink, followSymlinks: true}, findOpPrint{}}, depth: maxdepth, }, want: `testdir/dir2/link3`, }, { fc: findCommand{ chdir: "testdir", finddirs: []string{"."}, followSymlinks: true, ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}}, depth: maxdepth, }, want: `./file1 ./file2 ./dir1/file1 ./dir1/file2 ./dir2/file1 ./dir2/file2 ./dir2/link1 ./dir2/link2/file1 ./dir2/link2/file2`, }, // maxdepth { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: 1, }, want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir2`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: 2, }, want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`, }, { fc: findCommand{ finddirs: []string{"testdir"}, ops: []findOp{findOpPrint{}}, depth: 0, }, want: `testdir`, }, } { var wb wordBuffer tc.fc.run(&wb) if got, want := wb.buf.String(), tc.want; got != want { t.Errorf("%#v\n got %q\n want %q", tc.fc, got, want) } } } func TestParseFindleavesCommand(t *testing.T) { for _, tc := range []struct { cmd string want findleavesCommand }{ { cmd: `build/tools/findleaves.py --prune=out --prune=.repo --prune=.git . CleanSpec.mk`, want: findleavesCommand{ name: "CleanSpec.mk", dirs: []string{"."}, prunes: []string{"out", ".repo", ".git"}, mindepth: -1, }, }, { cmd: `build/tools/findleaves.py --prune=out --prune=.repo --prune=.git --mindepth=2 art bionic Android.mk`, want: findleavesCommand{ name: "Android.mk", dirs: []string{"art", "bionic"}, prunes: []string{"out", ".repo", ".git"}, mindepth: 2, }, }, } { fc, err := parseFindleavesCommand(tc.cmd) if err != nil { t.Errorf("parseFindleavesCommand(%q)=_, %v; want=_, 0 { fmt.Fprintf(w, "%*corder_onlys:\n", indent, ' ') for _, d := range n.OrderOnlys { showDeps(w, d, indent+1, seen) } } } func showNode(w io.Writer, n *DepNode) { fmt.Fprintf(w, "%s:", n.Output) for _, i := range n.ActualInputs { fmt.Fprintf(w, " %s", i) } fmt.Fprintf(w, "\n") for _, c := range n.Cmds { fmt.Fprintf(w, "\t%s\n", c) } for k, v := range n.TargetSpecificVars { fmt.Fprintf(w, "%s: %s=%s\n", n.Output, k, v.String()) } fmt.Fprintf(w, "\n") fmt.Fprintf(w, "location: %s:%d\n", n.Filename, n.Lineno) if n.IsPhony { fmt.Fprintf(w, "phony: true\n") } seen := make(map[string]int) fmt.Fprintf(w, "dependencies:\n") showDeps(w, n, 1, seen) } func handleNodeQuery(w io.Writer, q string, nodes []*DepNode) { for _, n := range nodes { if n.Output == q { showNode(w, n) break } } } // Query queries q in g. func Query(w io.Writer, q string, g *DepGraph) { if q == "$MAKEFILE_LIST" { for _, mk := range g.accessedMks { fmt.Fprintf(w, "%s: state=%d\n", mk.Filename, mk.State) } return } if q == "$*" { for k, v := range g.vars { fmt.Fprintf(w, "%s=%s\n", k, v.String()) } return } if q == "*" { for _, n := range g.nodes { fmt.Fprintf(w, "%s\n", n.Output) } return } handleNodeQuery(w, q, g.nodes) } regen.cc0100644 0000000 0000000 00000032744 13654546140 011236 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "regen.h" #include #include #include #include #include #include "affinity.h" #include "fileutil.h" #include "find.h" #include "func.h" #include "io.h" #include "log.h" #include "ninja.h" #include "stats.h" #include "strutil.h" #include "thread_pool.h" namespace { #define RETURN_TRUE \ do { \ if (g_flags.dump_kati_stamp) \ needs_regen_ = true; \ else \ return true; \ } while (0) bool ShouldIgnoreDirty(StringPiece s) { Pattern pat(g_flags.ignore_dirty_pattern); Pattern nopat(g_flags.no_ignore_dirty_pattern); return pat.Match(s) && !nopat.Match(s); } class StampChecker { struct GlobResult { string pat; vector result; }; struct ShellResult { CommandOp op; string shell; string shellflag; string cmd; string result; vector missing_dirs; vector files; vector read_dirs; }; public: StampChecker() : needs_regen_(false) {} ~StampChecker() { for (GlobResult* gr : globs_) { delete gr; } for (ShellResult* sr : commands_) { delete sr; } } bool NeedsRegen(double start_time, const string& orig_args) { if (IsMissingOutputs()) RETURN_TRUE; if (CheckStep1(orig_args)) RETURN_TRUE; if (CheckStep2()) RETURN_TRUE; if (!needs_regen_) { FILE* fp = fopen(GetNinjaStampFilename().c_str(), "rb+"); if (!fp) return true; ScopedFile sfp(fp); if (fseek(fp, 0, SEEK_SET) < 0) PERROR("fseek"); size_t r = fwrite(&start_time, sizeof(start_time), 1, fp); CHECK(r == 1); } return needs_regen_; } private: bool IsMissingOutputs() { if (!Exists(GetNinjaFilename())) { fprintf(stderr, "%s is missing, regenerating...\n", GetNinjaFilename().c_str()); return true; } if (!Exists(GetNinjaShellScriptFilename())) { fprintf(stderr, "%s is missing, regenerating...\n", GetNinjaShellScriptFilename().c_str()); return true; } return false; } bool CheckStep1(const string& orig_args) { #define LOAD_INT(fp) \ ({ \ int v = LoadInt(fp); \ if (v < 0) { \ fprintf(stderr, "incomplete kati_stamp, regenerating...\n"); \ RETURN_TRUE; \ } \ v; \ }) #define LOAD_STRING(fp, s) \ ({ \ if (!LoadString(fp, s)) { \ fprintf(stderr, "incomplete kati_stamp, regenerating...\n"); \ RETURN_TRUE; \ } \ }) const string& stamp_filename = GetNinjaStampFilename(); FILE* fp = fopen(stamp_filename.c_str(), "rb"); if (!fp) { if (g_flags.regen_debug) printf("%s: %s\n", stamp_filename.c_str(), strerror(errno)); return true; } ScopedFile sfp(fp); double gen_time; size_t r = fread(&gen_time, sizeof(gen_time), 1, fp); gen_time_ = gen_time; if (r != 1) { fprintf(stderr, "incomplete kati_stamp, regenerating...\n"); RETURN_TRUE; } if (g_flags.regen_debug) printf("Generated time: %f\n", gen_time); string s, s2; int num_files = LOAD_INT(fp); for (int i = 0; i < num_files; i++) { LOAD_STRING(fp, &s); double ts = GetTimestamp(s); if (gen_time < ts) { if (g_flags.regen_ignoring_kati_binary) { string kati_binary; GetExecutablePath(&kati_binary); if (s == kati_binary) { fprintf(stderr, "%s was modified, ignored.\n", s.c_str()); continue; } } if (ShouldIgnoreDirty(s)) { if (g_flags.regen_debug) printf("file %s: ignored (%f)\n", s.c_str(), ts); continue; } if (g_flags.dump_kati_stamp) printf("file %s: dirty (%f)\n", s.c_str(), ts); else fprintf(stderr, "%s was modified, regenerating...\n", s.c_str()); RETURN_TRUE; } else if (g_flags.dump_kati_stamp) { printf("file %s: clean (%f)\n", s.c_str(), ts); } } int num_undefineds = LOAD_INT(fp); for (int i = 0; i < num_undefineds; i++) { LOAD_STRING(fp, &s); if (getenv(s.c_str())) { if (g_flags.dump_kati_stamp) { printf("env %s: dirty (unset => %s)\n", s.c_str(), getenv(s.c_str())); } else { fprintf(stderr, "Environment variable %s was set, regenerating...\n", s.c_str()); } RETURN_TRUE; } else if (g_flags.dump_kati_stamp) { printf("env %s: clean (unset)\n", s.c_str()); } } int num_envs = LOAD_INT(fp); for (int i = 0; i < num_envs; i++) { LOAD_STRING(fp, &s); StringPiece val(getenv(s.c_str())); LOAD_STRING(fp, &s2); if (val != s2) { if (g_flags.dump_kati_stamp) { printf("env %s: dirty (%s => %.*s)\n", s.c_str(), s2.c_str(), SPF(val)); } else { fprintf(stderr, "Environment variable %s was modified (%s => %.*s), " "regenerating...\n", s.c_str(), s2.c_str(), SPF(val)); } RETURN_TRUE; } else if (g_flags.dump_kati_stamp) { printf("env %s: clean (%.*s)\n", s.c_str(), SPF(val)); } } int num_globs = LOAD_INT(fp); string pat; for (int i = 0; i < num_globs; i++) { GlobResult* gr = new GlobResult; globs_.push_back(gr); LOAD_STRING(fp, &gr->pat); int num_files = LOAD_INT(fp); gr->result.resize(num_files); for (int j = 0; j < num_files; j++) { LOAD_STRING(fp, &gr->result[j]); } } int num_crs = LOAD_INT(fp); for (int i = 0; i < num_crs; i++) { ShellResult* sr = new ShellResult; commands_.push_back(sr); sr->op = static_cast(LOAD_INT(fp)); LOAD_STRING(fp, &sr->shell); LOAD_STRING(fp, &sr->shellflag); LOAD_STRING(fp, &sr->cmd); LOAD_STRING(fp, &sr->result); if (sr->op == CommandOp::FIND) { int num_missing_dirs = LOAD_INT(fp); for (int j = 0; j < num_missing_dirs; j++) { LOAD_STRING(fp, &s); sr->missing_dirs.push_back(s); } int num_files = LOAD_INT(fp); for (int j = 0; j < num_files; j++) { LOAD_STRING(fp, &s); sr->files.push_back(s); } int num_read_dirs = LOAD_INT(fp); for (int j = 0; j < num_read_dirs; j++) { LOAD_STRING(fp, &s); sr->read_dirs.push_back(s); } } } LoadString(fp, &s); if (orig_args != s) { fprintf(stderr, "arguments changed, regenerating...\n"); RETURN_TRUE; } return needs_regen_; } bool CheckGlobResult(const GlobResult* gr, string* err) { COLLECT_STATS("glob time (regen)"); vector* files; Glob(gr->pat.c_str(), &files); bool needs_regen = files->size() != gr->result.size(); for (size_t i = 0; i < gr->result.size(); i++) { if (!needs_regen) { if ((*files)[i] != gr->result[i]) { needs_regen = true; break; } } } if (needs_regen) { if (ShouldIgnoreDirty(gr->pat)) { if (g_flags.dump_kati_stamp) { printf("wildcard %s: ignored\n", gr->pat.c_str()); } return false; } if (g_flags.dump_kati_stamp) { printf("wildcard %s: dirty\n", gr->pat.c_str()); } else { *err = StringPrintf("wildcard(%s) was changed, regenerating...\n", gr->pat.c_str()); } } else if (g_flags.dump_kati_stamp) { printf("wildcard %s: clean\n", gr->pat.c_str()); } return needs_regen; } bool ShouldRunCommand(const ShellResult* sr) { if (sr->op != CommandOp::FIND) return true; COLLECT_STATS("stat time (regen)"); for (const string& dir : sr->missing_dirs) { if (Exists(dir)) return true; } for (const string& file : sr->files) { if (!Exists(file)) return true; } for (const string& dir : sr->read_dirs) { // We assume we rarely do a significant change for the top // directory which affects the results of find command. if (dir == "" || dir == "." || ShouldIgnoreDirty(dir)) continue; struct stat st; if (lstat(dir.c_str(), &st) != 0) { return true; } double ts = GetTimestampFromStat(st); if (gen_time_ < ts) { return true; } if (S_ISLNK(st.st_mode)) { ts = GetTimestamp(dir); if (ts < 0 || gen_time_ < ts) return true; } } return false; } bool CheckShellResult(const ShellResult* sr, string* err) { if (sr->op == CommandOp::READ_MISSING) { if (Exists(sr->cmd)) { if (g_flags.dump_kati_stamp) printf("file %s: dirty\n", sr->cmd.c_str()); else *err = StringPrintf("$(file <%s) was changed, regenerating...\n", sr->cmd.c_str()); return true; } if (g_flags.dump_kati_stamp) printf("file %s: clean\n", sr->cmd.c_str()); return false; } if (sr->op == CommandOp::READ) { double ts = GetTimestamp(sr->cmd); if (gen_time_ < ts) { if (g_flags.dump_kati_stamp) printf("file %s: dirty\n", sr->cmd.c_str()); else *err = StringPrintf("$(file <%s) was changed, regenerating...\n", sr->cmd.c_str()); return true; } if (g_flags.dump_kati_stamp) printf("file %s: clean\n", sr->cmd.c_str()); return false; } if (sr->op == CommandOp::WRITE || sr->op == CommandOp::APPEND) { FILE* f = fopen(sr->cmd.c_str(), (sr->op == CommandOp::WRITE) ? "wb" : "ab"); if (f == NULL) { PERROR("fopen"); } if (fwrite(&sr->result[0], sr->result.size(), 1, f) != 1) { PERROR("fwrite"); } if (fclose(f) != 0) { PERROR("fclose"); } if (g_flags.dump_kati_stamp) printf("file %s: clean (write)\n", sr->cmd.c_str()); return false; } if (!ShouldRunCommand(sr)) { if (g_flags.regen_debug) printf("shell %s: clean (no rerun)\n", sr->cmd.c_str()); return false; } FindCommand fc; if (fc.Parse(sr->cmd) && !fc.chdir.empty() && ShouldIgnoreDirty(fc.chdir)) { if (g_flags.dump_kati_stamp) printf("shell %s: ignored\n", sr->cmd.c_str()); return false; } COLLECT_STATS_WITH_SLOW_REPORT("shell time (regen)", sr->cmd.c_str()); string result; RunCommand(sr->shell, sr->shellflag, sr->cmd, RedirectStderr::DEV_NULL, &result); FormatForCommandSubstitution(&result); if (sr->result != result) { if (g_flags.dump_kati_stamp) { printf("shell %s: dirty\n", sr->cmd.c_str()); } else { *err = StringPrintf("$(shell %s) was changed, regenerating...\n", sr->cmd.c_str()); //*err += StringPrintf("%s => %s\n", expected.c_str(), result.c_str()); } return true; } else if (g_flags.regen_debug) { printf("shell %s: clean (rerun)\n", sr->cmd.c_str()); } return false; } bool CheckStep2() { unique_ptr tp(NewThreadPool(g_flags.num_jobs)); tp->Submit([this]() { string err; // TODO: Make glob cache thread safe and create a task for each glob. SetAffinityForSingleThread(); for (GlobResult* gr : globs_) { if (CheckGlobResult(gr, &err)) { unique_lock lock(mu_); if (!needs_regen_) { needs_regen_ = true; msg_ = err; } break; } } }); tp->Submit([this]() { SetAffinityForSingleThread(); for (ShellResult* sr : commands_) { string err; if (CheckShellResult(sr, &err)) { unique_lock lock(mu_); if (!needs_regen_) { needs_regen_ = true; msg_ = err; } } } }); tp->Wait(); if (needs_regen_) { fprintf(stderr, "%s", msg_.c_str()); } return needs_regen_; } private: double gen_time_; vector globs_; vector commands_; mutex mu_; bool needs_regen_; string msg_; }; } // namespace bool NeedsRegen(double start_time, const string& orig_args) { return StampChecker().NeedsRegen(start_time, orig_args); } regen.h0100644 0000000 0000000 00000001400 13654546140 011061 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef REGEN_H_ #define REGEN_H_ #include using namespace std; bool NeedsRegen(double start_time, const string& orig_args); #endif // REGEN_H_ regen_dump.cc0100644 0000000 0000000 00000005241 13654546140 012253 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore // This command will dump the contents of a kati stamp file into a more portable // format for use by other tools. For now, it just exports the files read. // Later, this will be expanded to include the Glob and Shell commands, but // those require a more complicated output format. #include #include #include "io.h" #include "log.h" #include "strutil.h" int main(int argc, char* argv[]) { bool dump_files = false; bool dump_env = false; if (argc == 1) { fprintf(stderr, "Usage: ckati_stamp_dump [--env] [--files] \n"); return 1; } for (int i = 1; i < argc - 1; i++) { const char* arg = argv[i]; if (!strcmp(arg, "--env")) { dump_env = true; } else if (!strcmp(arg, "--files")) { dump_files = true; } else { fprintf(stderr, "Unknown option: %s", arg); return 1; } } if (!dump_files && !dump_env) { dump_files = true; } FILE* fp = fopen(argv[argc - 1], "rb"); if (!fp) PERROR("fopen"); ScopedFile sfp(fp); double gen_time; size_t r = fread(&gen_time, sizeof(gen_time), 1, fp); if (r != 1) ERROR("Incomplete stamp file"); int num_files = LoadInt(fp); if (num_files < 0) ERROR("Incomplete stamp file"); for (int i = 0; i < num_files; i++) { string s; if (!LoadString(fp, &s)) ERROR("Incomplete stamp file"); if (dump_files) printf("%s\n", s.c_str()); } int num_undefineds = LoadInt(fp); if (num_undefineds < 0) ERROR("Incomplete stamp file"); for (int i = 0; i < num_undefineds; i++) { string s; if (!LoadString(fp, &s)) ERROR("Incomplete stamp file"); if (dump_env) printf("undefined: %s\n", s.c_str()); } int num_envs = LoadInt(fp); if (num_envs < 0) ERROR("Incomplete stamp file"); for (int i = 0; i < num_envs; i++) { string name; string val; if (!LoadString(fp, &name)) ERROR("Incomplete stamp file"); if (!LoadString(fp, &val)) ERROR("Incomplete stamp file"); if (dump_env) printf("%s: %s\n", name.c_str(), val.c_str()); } return 0; } rule.cc0100644 0000000 0000000 00000010004 13654546140 011066 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "rule.h" #include "expr.h" #include "log.h" #include "parser.h" #include "stringprintf.h" #include "strutil.h" #include "symtab.h" Rule::Rule() : is_double_colon(false), is_suffix_rule(false), cmd_lineno(0) {} void Rule::ParseInputs(const StringPiece& inputs_str) { bool is_order_only = false; for (auto const& input : WordScanner(inputs_str)) { if (input == "|") { is_order_only = true; continue; } Symbol input_sym = Intern(TrimLeadingCurdir(input)); (is_order_only ? order_only_inputs : inputs).push_back(input_sym); } } void Rule::ParsePrerequisites(const StringPiece& line, size_t separator_pos, const RuleStmt* rule_stmt) { // line is either // prerequisites [ ; command ] // or // target-prerequisites : prereq-patterns [ ; command ] // First, separate command. At this point separator_pos should point to ';' // unless null. StringPiece prereq_string = line; if (separator_pos != string::npos && rule_stmt->sep != RuleStmt::SEP_SEMICOLON) { CHECK(line[separator_pos] == ';'); // TODO: Maybe better to avoid Intern here? cmds.push_back(Value::NewLiteral( Intern(TrimLeftSpace(line.substr(separator_pos + 1))).str())); prereq_string = line.substr(0, separator_pos); } if ((separator_pos = prereq_string.find(':')) == string::npos) { // Simple prerequisites ParseInputs(prereq_string); return; } // Static pattern rule. if (!output_patterns.empty()) { ERROR_LOC(loc, "*** mixed implicit and normal rules: deprecated syntax"); } // Empty static patterns should not produce rules, but need to eat the // commands So return a rule with no outputs nor output_patterns if (outputs.empty()) { return; } StringPiece target_prereq = prereq_string.substr(0, separator_pos); StringPiece prereq_patterns = prereq_string.substr(separator_pos + 1); for (StringPiece target_pattern : WordScanner(target_prereq)) { target_pattern = TrimLeadingCurdir(target_pattern); for (Symbol target : outputs) { if (!Pattern(target_pattern).Match(target.str())) { WARN_LOC(loc, "target `%s' doesn't match the target pattern", target.c_str()); } } output_patterns.push_back(Intern(target_pattern)); } if (output_patterns.empty()) { ERROR_LOC(loc, "*** missing target pattern."); } if (output_patterns.size() > 1) { ERROR_LOC(loc, "*** multiple target patterns."); } if (!IsPatternRule(output_patterns[0].str())) { ERROR_LOC(loc, "*** target pattern contains no '%%'."); } ParseInputs(prereq_patterns); } string Rule::DebugString() const { vector v; v.push_back(StringPrintf("outputs=[%s]", JoinSymbols(outputs, ",").c_str())); v.push_back(StringPrintf("inputs=[%s]", JoinSymbols(inputs, ",").c_str())); if (!order_only_inputs.empty()) { v.push_back(StringPrintf("order_only_inputs=[%s]", JoinSymbols(order_only_inputs, ",").c_str())); } if (!output_patterns.empty()) { v.push_back(StringPrintf("output_patterns=[%s]", JoinSymbols(output_patterns, ",").c_str())); } if (is_double_colon) v.push_back("is_double_colon"); if (is_suffix_rule) v.push_back("is_suffix_rule"); if (!cmds.empty()) { v.push_back(StringPrintf("cmds=[%s]", JoinValues(cmds, ",").c_str())); } return JoinStrings(v, " "); } rule.h0100644 0000000 0000000 00000003113 13654546140 010733 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef RULE_H_ #define RULE_H_ #include #include #include #include "loc.h" #include "log.h" #include "stmt.h" #include "string_piece.h" #include "symtab.h" using namespace std; class Value; class Rule { public: Rule(); Loc cmd_loc() const { return Loc(loc.filename, cmd_lineno); } string DebugString() const; void ParseInputs(const StringPiece& inputs_string); void ParsePrerequisites(const StringPiece& line, size_t pos, const RuleStmt* rule_stmt); static bool IsPatternRule(const StringPiece& target_string) { return target_string.find('%') != string::npos; } vector outputs; vector inputs; vector order_only_inputs; vector output_patterns; bool is_double_colon; bool is_suffix_rule; vector cmds; Loc loc; int cmd_lineno; private: void Error(const string& msg) { ERROR_LOC(loc, "%s", msg.c_str()); } }; #endif // RULE_H_ rule_parser.go0100644 0000000 0000000 00000013650 13654546140 012474 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "errors" "fmt" "strings" ) type pattern struct { prefix, suffix string } func (p pattern) String() string { return p.prefix + "%" + p.suffix } func (p pattern) match(s string) bool { return strings.HasPrefix(s, p.prefix) && strings.HasSuffix(s, p.suffix) } func (p pattern) subst(repl, str string) string { in := str trimed := str if p.prefix != "" { trimed = strings.TrimPrefix(in, p.prefix) if trimed == in { return str } } in = trimed if p.suffix != "" { trimed = strings.TrimSuffix(in, p.suffix) if trimed == in { return str } } rs := strings.SplitN(repl, "%", 2) if len(rs) != 2 { return repl } return rs[0] + trimed + rs[1] } type rule struct { srcpos // outputs is output of the rule. // []string{} for ': xxx' // nil for empty line. outputs []string inputs []string orderOnlyInputs []string outputPatterns []pattern isDoubleColon bool isSuffixRule bool cmds []string cmdLineno int } func (r *rule) cmdpos() srcpos { return srcpos{filename: r.filename, lineno: r.cmdLineno} } func isPatternRule(s []byte) (pattern, bool) { i := findLiteralChar(s, '%', 0, noSkipVar) if i < 0 { return pattern{}, false } return pattern{prefix: string(s[:i]), suffix: string(s[i+1:])}, true } func unescapeInput(s []byte) []byte { // only "\ ", "\=" becoms " ", "=" respectively? // other \-escape, such as "\:" keeps "\:". for i := 0; i < len(s); i++ { if s[i] != '\\' { continue } if i+1 < len(s) && s[i+1] == ' ' || s[i+1] == '=' { copy(s[i:], s[i+1:]) s = s[:len(s)-1] } } return s } func unescapeTarget(s []byte) []byte { for i := 0; i < len(s); i++ { if s[i] != '\\' { continue } copy(s[i:], s[i+1:]) s = s[:len(s)-1] } return s } func (r *rule) parseInputs(s []byte) { ws := newWordScanner(s) ws.esc = true add := func(t string) { r.inputs = append(r.inputs, t) } for ws.Scan() { input := ws.Bytes() if len(input) == 1 && input[0] == '|' { add = func(t string) { r.orderOnlyInputs = append(r.orderOnlyInputs, t) } continue } input = unescapeInput(input) if !hasWildcardMetaByte(input) { add(internBytes(input)) continue } m, _ := fsCache.Glob(string(input)) if len(m) == 0 { add(internBytes(input)) continue } for _, t := range m { add(intern(t)) } } } func (r *rule) parseVar(s []byte, rhs expr) (*assignAST, error) { var lhsBytes []byte var op string // TODO(ukai): support override, export. if s[len(s)-1] != '=' { panic(fmt.Sprintf("unexpected lhs %q", s)) } switch s[len(s)-2] { // s[len(s)-1] is '=' case ':': lhsBytes = trimSpaceBytes(s[:len(s)-2]) op = ":=" case '+': lhsBytes = trimSpaceBytes(s[:len(s)-2]) op = "+=" case '?': lhsBytes = trimSpaceBytes(s[:len(s)-2]) op = "?=" default: lhsBytes = trimSpaceBytes(s[:len(s)-1]) op = "=" } assign := &assignAST{ lhs: literal(string(lhsBytes)), rhs: compactExpr(rhs), op: op, } assign.srcpos = r.srcpos return assign, nil } // parse parses rule line. // line is rule line until '=', or before ';'. // line was already expaned, so probably no need to skip var $(xxx) when // finding literal char. i.e. $ is parsed as literal '$'. // assign is not nil, if line was known as target specific var ': =' // rhs is not nil, if line ended with '=' (target specific var after evaluated) func (r *rule) parse(line []byte, assign *assignAST, rhs expr) (*assignAST, error) { line = trimLeftSpaceBytes(line) // See semicolon.mk. if rhs == nil && (len(line) == 0 || line[0] == ';') { return nil, nil } r.outputs = []string{} index := findLiteralChar(line, ':', 0, noSkipVar) if index < 0 { return nil, errors.New("*** missing separator.") } first := line[:index] ws := newWordScanner(first) ws.esc = true pat, isFirstPattern := isPatternRule(first) if isFirstPattern { n := 0 for ws.Scan() { n++ if n > 1 { return nil, errors.New("*** mixed implicit and normal rules: deprecated syntax") } } r.outputPatterns = []pattern{pat} } else { for ws.Scan() { // TODO(ukai): expand raw wildcard for output. any usage? r.outputs = append(r.outputs, internBytes(unescapeTarget(ws.Bytes()))) } } index++ if index < len(line) && line[index] == ':' { r.isDoubleColon = true index++ } rest := line[index:] if assign != nil { if len(rest) > 0 { panic(fmt.Sprintf("pattern specific var? line:%q", line)) } return assign, nil } if rhs != nil { assign, err := r.parseVar(rest, rhs) if err != nil { return nil, err } return assign, nil } index = bytes.IndexByte(rest, ';') if index >= 0 { r.cmds = append(r.cmds, string(rest[index+1:])) rest = rest[:index-1] } index = findLiteralChar(rest, ':', 0, noSkipVar) if index < 0 { r.parseInputs(rest) return nil, nil } // %.x: %.y: %.z if isFirstPattern { return nil, errors.New("*** mixed implicit and normal rules: deprecated syntax") } second := rest[:index] third := rest[index+1:] // r.outputs is already set. ws = newWordScanner(second) if !ws.Scan() { return nil, errors.New("*** missing target pattern.") } outpat, ok := isPatternRule(ws.Bytes()) if !ok { return nil, errors.New("*** target pattern contains no '%'.") } r.outputPatterns = []pattern{outpat} if ws.Scan() { return nil, errors.New("*** multiple target patterns.") } r.parseInputs(third) return nil, nil } rule_parser_test.go0100644 0000000 0000000 00000011324 13654546140 013527 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "reflect" "testing" ) func TestRuleParser(t *testing.T) { for _, tc := range []struct { in string tsv *assignAST rhs expr want rule assign *assignAST err string }{ { in: "foo: bar", want: rule{ outputs: []string{"foo"}, inputs: []string{"bar"}, }, }, { in: "foo: bar baz", want: rule{ outputs: []string{"foo"}, inputs: []string{"bar", "baz"}, }, }, { in: "foo:: bar", want: rule{ outputs: []string{"foo"}, inputs: []string{"bar"}, isDoubleColon: true, }, }, { in: "foo", err: "*** missing separator.", }, { in: "%.o: %.c", want: rule{ outputs: []string{}, outputPatterns: []pattern{pattern{suffix: ".o"}}, inputs: []string{"%.c"}, }, }, { in: "foo %.o: %.c", err: "*** mixed implicit and normal rules: deprecated syntax", }, { in: "foo.o: %.o: %.c %.h", want: rule{ outputs: []string{"foo.o"}, outputPatterns: []pattern{pattern{suffix: ".o"}}, inputs: []string{"%.c", "%.h"}, }, }, { in: "%.x: %.y: %.z", err: "*** mixed implicit and normal rules: deprecated syntax", }, { in: "foo.o: : %.c", err: "*** missing target pattern.", }, { in: "foo.o: %.o %.o: %.c", err: "*** multiple target patterns.", }, { in: "foo.o: foo.o: %.c", err: "*** target pattern contains no '%'.", }, { in: "foo: bar | baz", want: rule{ outputs: []string{"foo"}, inputs: []string{"bar"}, orderOnlyInputs: []string{"baz"}, }, }, { in: "foo: CFLAGS =", rhs: expr{literal("-g")}, want: rule{ outputs: []string{"foo"}, }, assign: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: "=", }, }, { in: "foo:", tsv: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: "=", }, want: rule{ outputs: []string{"foo"}, }, assign: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: "=", }, }, { in: "foo: CFLAGS=", rhs: expr{literal("-g")}, want: rule{ outputs: []string{"foo"}, }, assign: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: "=", }, }, { in: "foo: CFLAGS :=", rhs: expr{literal("-g")}, want: rule{ outputs: []string{"foo"}, }, assign: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: ":=", }, }, { in: "%.o: CFLAGS :=", rhs: expr{literal("-g")}, want: rule{ outputs: []string{}, outputPatterns: []pattern{pattern{suffix: ".o"}}, }, assign: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: ":=", }, }, { in: "%.o:", tsv: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: ":=", }, want: rule{ outputs: []string{}, outputPatterns: []pattern{pattern{suffix: ".o"}}, }, assign: &assignAST{ lhs: literal("CFLAGS"), rhs: literal("-g"), op: ":=", }, }, /* TODO { in: "foo.o: %.c: %.c", err: "*** target 'foo.o' doesn't match the target pattern", }, */ } { got := &rule{} assign, err := got.parse([]byte(tc.in), tc.tsv, tc.rhs) if tc.err != "" { if err == nil { t.Errorf(`r.parse(%q, %v)=_, , want _, %q`, tc.in, tc.rhs, tc.err) continue } if got, want := err.Error(), tc.err; got != want { t.Errorf(`r.parse(%q, %v)=_, %s, want %s`, tc.in, tc.rhs, got, want) } continue } if err != nil { t.Errorf(`r.parse(%q, %v)=_, %v; want nil error`, tc.in, tc.rhs, err) continue } if !reflect.DeepEqual(*got, tc.want) { t.Errorf(`r.parse(%q, %v); r=%#v, want %#v`, tc.in, tc.rhs, *got, tc.want) } if tc.assign != nil { if assign == nil { t.Errorf(`r.parse(%q, %v)=; want=%#v`, tc.in, tc.rhs, tc.assign) continue } if got, want := assign, tc.assign; !reflect.DeepEqual(got, want) { t.Errorf(`r.parse(%q, %v)=%#v; want=%#v`, tc.in, tc.rhs, got, want) } continue } if assign != nil { t.Errorf(`r.parse(%q, %v)=%v; want=`, tc.in, tc.rhs, assign) } } } runtest.rb0100755 0000000 0000000 00000026725 13654546140 011665 0ustar000000000 0000000 #!/usr/bin/env ruby # coding: binary # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'fileutils' # suppress GNU make jobserver magic when calling "make" ENV.delete('MAKEFLAGS') ENV.delete('MAKELEVEL') while true if ARGV[0] == '-s' test_serialization = true ARGV.shift elsif ARGV[0] == '-c' ckati = true ARGV.shift ENV['KATI_VARIANT'] = 'c' elsif ARGV[0] == '-n' via_ninja = true ARGV.shift ENV['NINJA_STATUS'] = 'NINJACMD: ' elsif ARGV[0] == '-a' gen_all_targets = true ARGV.shift elsif ARGV[0] == '-v' show_failing = true ARGV.shift elsif ARGV[0] == "-q" hide_passing = true ARGV.shift else break end end def get_output_filenames files = Dir.glob('*') files.delete('Makefile') files.delete('build.ninja') files.delete('env.sh') files.delete('ninja.sh') files.delete('gmon.out') files.delete('submake') files.reject!{|f|f =~ /\.json$/} files.reject!{|f|f =~ /^kati\.*/} files end def cleanup (get_output_filenames + Dir.glob('.*')).each do |fname| next if fname == '.' || fname == '..' FileUtils.rm_rf fname end end def move_circular_dep(l) # We don't care when circular dependency detection happens. circ = '' while l.sub!(/Circular .* dropped\.\n/, '') do circ += $& end circ + l end expected_failures = [] unexpected_passes = [] failures = [] passes = [] if !ARGV.empty? test_files = ARGV.map do |test| "testcase/#{File.basename(test)}" end else test_files = Dir.glob('testcase/*.mk').sort test_files += Dir.glob('testcase/*.sh').sort end def run_in_testdir(test_filename) c = File.read(test_filename) name = File.basename(test_filename) dir = "out/#{name}" FileUtils.mkdir_p(dir) Dir.glob("#{dir}/*").each do |fname| FileUtils.rm_rf(fname) end Dir.chdir(dir) do yield name end end def normalize_ninja_log(log, mk) log.gsub!(/^NINJACMD: .*\n/, '') log.gsub!(/^ninja: no work to do\.\n/, '') log.gsub!(/^ninja: error: (.*, needed by .*),.*/, '*** No rule to make target \\1.') log.gsub!(/^ninja: warning: multiple rules generate (.*)\. builds involving this target will not be correct.*$/, 'ninja: warning: multiple rules generate \\1.') if mk =~ /err_error_in_recipe.mk/ # This test expects ninja fails. Strip ninja specific error logs. ninja_failed_subst = '' elsif mk =~ /\/fail_/ # Recipes in these tests fail. ninja_failed_subst = "*** [test] Error 1\n" end if ninja_failed_subst log.gsub!(/^FAILED: (.*\n\/bin\/bash)?.*\n/, ninja_failed_subst) log.gsub!(/^ninja: .*\n/, '') end log end def normalize_quotes(log) log.gsub!(/[`'"]/, '"') # For recent GNU find, which uses Unicode characters. log.gsub!(/(\xe2\x80\x98|\xe2\x80\x99)/, '"') log end def normalize_make_log(expected, mk, via_ninja) expected = normalize_quotes(expected) expected.gsub!(/^make(?:\[\d+\])?: (Entering|Leaving) directory.*\n/, '') expected.gsub!(/^make(?:\[\d+\])?: /, '') expected = move_circular_dep(expected) # Normalizations for old/new GNU make. expected.gsub!(' recipe for target ', ' commands for target ') expected.gsub!(' recipe commences ', ' commands commence ') expected.gsub!('missing rule before recipe.', 'missing rule before commands.') expected.gsub!(' (did you mean TAB instead of 8 spaces?)', '') expected.gsub!('Extraneous text after', 'extraneous text after') # Not sure if this is useful. expected.gsub!(/\s+Stop\.$/, '') # GNU make 4.0 has this output. expected.gsub!(/Makefile:\d+: commands for target ".*?" failed\n/, '') # We treat some warnings as errors. expected.gsub!(/^\/bin\/(ba)?sh: line 0: /, '') # We print out some ninja warnings in some tests to match what we expect # ninja to produce. Remove them if we're not testing ninja. if !via_ninja expected.gsub!(/^ninja: warning: .*\n/, '') end # Normalization for "include foo" with C++ kati. expected.gsub!(/(: )(\S+): (No such file or directory)\n\*\*\* No rule to make target "\2"./, '\1\2: \3') expected end def normalize_kati_log(output) output = normalize_quotes(output) output = move_circular_dep(output) # kati specific log messages. output.gsub!(/^\*kati\*.*\n/, '') output.gsub!(/^c?kati: /, '') output.gsub!(/\/bin\/sh: ([^:]*): command not found/, "\\1: Command not found") output.gsub!(/.*: warning for parse error in an unevaluated line: .*\n/, '') output.gsub!(/^([^ ]+: )?FindEmulator: /, '') output.gsub!(/^\/bin\/sh: line 0: /, '') output.gsub!(/ (\.\/+)+kati\.\S+/, '') # kati log files in find_command.mk output.gsub!(/ (\.\/+)+test\S+.json/, '') # json files in find_command.mk # Normalization for "include foo" with Go kati. output.gsub!(/(: )open (\S+): n(o such file or directory)\nNOTE:.*/, "\\1\\2: N\\3") # Bionic libc has different error messages than glibc output.gsub!(/Too many symbolic links encountered/, 'Too many levels of symbolic links') output end bash_var = ' SHELL=/bin/bash' run_make_test = proc do |mk| c = File.read(mk) expected_failure = false if c =~ /\A# TODO(?:\(([-a-z|]+)\))?/ if $1 todos = $1.split('|') if todos.include?('go') && !ckati expected_failure = true end if todos.include?('c') && ckati expected_failure = true end if todos.include?('go-ninja') && !ckati && via_ninja expected_failure = true end if todos.include?('c-ninja') && ckati && via_ninja expected_failure = true end if todos.include?('c-exec') && ckati && !via_ninja expected_failure = true end if todos.include?('ninja') && via_ninja expected_failure = true end else expected_failure = true end end run_in_testdir(mk) do |name| File.open("Makefile", 'w') do |ofile| ofile.print(c) end File.symlink('../../testcase/submake', 'submake') expected = '' output = '' testcases = c.scan(/^test\d*/).sort.uniq if testcases.empty? testcases = [''] end is_silent_test = mk =~ /\/submake_/ cleanup testcases.each do |tc| cmd = 'make' if via_ninja || is_silent_test cmd += ' -s' end cmd += bash_var cmd += " #{tc} 2>&1" res = IO.popen(cmd, 'r:binary', &:read) res = normalize_make_log(res, mk, via_ninja) expected += "=== #{tc} ===\n" + res expected_files = get_output_filenames expected += "\n=== FILES ===\n#{expected_files * "\n"}\n" end cleanup testcases.each do |tc| json = "#{tc.empty? ? 'test' : tc}" cmd = "../../kati -save_json=#{json}.json -log_dir=. --use_find_emulator" if ckati cmd = "../../ckati --use_find_emulator" end if via_ninja cmd += ' --ninja' end if gen_all_targets if !ckati || !via_ninja raise "-a should be used with -c -n" end cmd += ' --gen_all_targets' end if is_silent_test cmd += ' -s' end cmd += bash_var if !gen_all_targets || mk =~ /makecmdgoals/ cmd += " #{tc}" end cmd += " 2>&1" res = IO.popen(cmd, 'r:binary', &:read) if via_ninja && File.exist?('build.ninja') && File.exists?('ninja.sh') cmd = './ninja.sh -j1 -v' if gen_all_targets cmd += " #{tc}" end cmd += ' 2>&1' log = IO.popen(cmd, 'r:binary', &:read) res += normalize_ninja_log(log, mk) end res = normalize_kati_log(res) output += "=== #{tc} ===\n" + res output_files = get_output_filenames output += "\n=== FILES ===\n#{output_files * "\n"}\n" end File.open('out.make', 'w'){|ofile|ofile.print(expected)} File.open('out.kati', 'w'){|ofile|ofile.print(output)} if expected =~ /FAIL/ puts %Q(#{name} has a string "FAIL" in its expectation) exit 1 end if expected != output if expected_failure if !hide_passing puts "#{name}: FAIL (expected)" end expected_failures << name else puts "#{name}: FAIL" failures << name end if !expected_failure || show_failing puts `diff -u out.make out.kati` end else if expected_failure puts "#{name}: PASS (unexpected)" unexpected_passes << name else if !hide_passing puts "#{name}: PASS" end passes << name end end if name !~ /^err_/ && test_serialization && !expected_failure testcases.each do |tc| json = "#{tc.empty? ? 'test' : tc}" cmd = "../../kati -save_json=#{json}_2.json -load_json=#{json}.json -n -log_dir=. #{tc} 2>&1" res = IO.popen(cmd, 'r:binary', &:read) if !File.exist?("#{json}.json") || !File.exist?("#{json}_2.json") puts "#{name}##{json}: Serialize failure (not exist)" puts res else json1 = File.read("#{json}.json") json2 = File.read("#{json}_2.json") if json1 != json2 puts "#{name}##{json}: Serialize failure" puts res end end end end end end run_shell_test = proc do |sh| is_ninja_test = sh =~ /\/ninja_/ if is_ninja_test && (!ckati || !via_ninja) next end run_in_testdir(sh) do |name| cleanup cmd = "bash ../../#{sh} make" if is_ninja_test cmd += ' -s' end cmd += bash_var expected = IO.popen(cmd, 'r:binary', &:read) cleanup if is_ninja_test if ckati cmd = "bash ../../#{sh} ../../ckati --ninja --regen" else next end else if ckati cmd = "bash ../../#{sh} ../../ckati" else cmd = "bash ../../#{sh} ../../kati --use_cache -log_dir=." end end cmd += bash_var output = IO.popen(cmd, 'r:binary', &:read) expected = normalize_make_log(expected, sh, is_ninja_test) output = normalize_kati_log(output) if is_ninja_test output = normalize_ninja_log(output, sh) end File.open('out.make', 'w'){|ofile|ofile.print(expected)} File.open('out.kati', 'w'){|ofile|ofile.print(output)} if expected != output puts "#{name}: FAIL" puts `diff -u out.make out.kati` failures << name else if !hide_passing puts "#{name}: PASS" end passes << name end end end test_files.each do |test| if /\.mk$/ =~ test run_make_test.call(test) elsif /\.sh$/ =~ test run_shell_test.call(test) else raise "Unknown test type: #{test}" end end puts if !expected_failures.empty? && !hide_passing puts "=== Expected failures ===" expected_failures.each do |n| puts n end end if !unexpected_passes.empty? puts "=== Unexpected passes ===" unexpected_passes.each do |n| puts n end end if !failures.empty? puts "=== Failures ===" failures.each do |n| puts n end end puts if !unexpected_passes.empty? || !failures.empty? puts "FAIL! (#{failures.size + unexpected_passes.size} fails #{passes.size} passes)" exit 1 else puts 'PASS!' end serialize.go0100644 0000000 0000000 00000043026 13654546140 012140 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "crypto/sha1" "encoding/binary" "encoding/gob" "encoding/json" "fmt" "io" "io/ioutil" "net/url" "os" "sort" "strconv" "strings" "time" "github.com/golang/glog" ) const ( valueTypeRecursive = 'R' valueTypeSimple = 'S' valueTypeTSV = 'T' valueTypeUndefined = 'U' valueTypeAssign = 'a' valueTypeExpr = 'e' valueTypeFunc = 'f' valueTypeLiteral = 'l' valueTypeNop = 'n' valueTypeParamref = 'p' valueTypeVarref = 'r' valueTypeVarsubst = 's' valueTypeTmpval = 't' ) // JSON is a json loader/saver. var JSON LoadSaver // GOB is a gob loader/saver. var GOB LoadSaver func init() { JSON = jsonLoadSaver{} GOB = gobLoadSaver{} } type jsonLoadSaver struct{} type gobLoadSaver struct{} type dumpbuf struct { w bytes.Buffer err error } func (d *dumpbuf) Int(i int) { if d.err != nil { return } v := int32(i) d.err = binary.Write(&d.w, binary.LittleEndian, &v) } func (d *dumpbuf) Str(s string) { if d.err != nil { return } d.Int(len(s)) if d.err != nil { return } _, d.err = io.WriteString(&d.w, s) } func (d *dumpbuf) Bytes(b []byte) { if d.err != nil { return } d.Int(len(b)) if d.err != nil { return } _, d.err = d.w.Write(b) } func (d *dumpbuf) Byte(b byte) { if d.err != nil { return } d.err = writeByte(&d.w, b) } type serializableVar struct { Type string V string Origin string Children []serializableVar } type serializableDepNode struct { Output int Cmds []string Deps []int OrderOnlys []int Parents []int HasRule bool IsPhony bool ActualInputs []int TargetSpecificVars []int Filename string Lineno int } type serializableTargetSpecificVar struct { Name string Value serializableVar } type serializableGraph struct { Nodes []*serializableDepNode Vars map[string]serializableVar Tsvs []serializableTargetSpecificVar Targets []string Roots []string AccessedMks []*accessedMakefile Exports map[string]bool } func encGob(v interface{}) (string, error) { var buf bytes.Buffer e := gob.NewEncoder(&buf) err := e.Encode(v) if err != nil { return "", err } return buf.String(), nil } func encVar(k string, v Var) (string, error) { var dump dumpbuf dump.Str(k) v.dump(&dump) return dump.w.String(), dump.err } type depNodesSerializer struct { nodes []*serializableDepNode tsvs []serializableTargetSpecificVar tsvMap map[string]int targets []string targetMap map[string]int done map[string]bool err error } func newDepNodesSerializer() *depNodesSerializer { return &depNodesSerializer{ tsvMap: make(map[string]int), targetMap: make(map[string]int), done: make(map[string]bool), } } func (ns *depNodesSerializer) serializeTarget(t string) int { id, present := ns.targetMap[t] if present { return id } id = len(ns.targets) ns.targetMap[t] = id ns.targets = append(ns.targets, t) return id } func (ns *depNodesSerializer) serializeDepNodes(nodes []*DepNode) { if ns.err != nil { return } for _, n := range nodes { if ns.done[n.Output] { continue } ns.done[n.Output] = true var deps []int for _, d := range n.Deps { deps = append(deps, ns.serializeTarget(d.Output)) } var orderonlys []int for _, d := range n.OrderOnlys { orderonlys = append(orderonlys, ns.serializeTarget(d.Output)) } var parents []int for _, d := range n.Parents { parents = append(parents, ns.serializeTarget(d.Output)) } var actualInputs []int for _, i := range n.ActualInputs { actualInputs = append(actualInputs, ns.serializeTarget(i)) } // Sort keys for consistent serialization. var tsvKeys []string for k := range n.TargetSpecificVars { tsvKeys = append(tsvKeys, k) } sort.Strings(tsvKeys) var vars []int for _, k := range tsvKeys { v := n.TargetSpecificVars[k] sv := serializableTargetSpecificVar{Name: k, Value: v.serialize()} //gob := encGob(sv) gob, err := encVar(k, v) if err != nil { ns.err = err return } id, present := ns.tsvMap[gob] if !present { id = len(ns.tsvs) ns.tsvMap[gob] = id ns.tsvs = append(ns.tsvs, sv) } vars = append(vars, id) } ns.nodes = append(ns.nodes, &serializableDepNode{ Output: ns.serializeTarget(n.Output), Cmds: n.Cmds, Deps: deps, OrderOnlys: orderonlys, Parents: parents, HasRule: n.HasRule, IsPhony: n.IsPhony, ActualInputs: actualInputs, TargetSpecificVars: vars, Filename: n.Filename, Lineno: n.Lineno, }) ns.serializeDepNodes(n.Deps) if ns.err != nil { return } ns.serializeDepNodes(n.OrderOnlys) if ns.err != nil { return } } } func makeSerializableVars(vars Vars) (r map[string]serializableVar) { r = make(map[string]serializableVar) for k, v := range vars { r[k] = v.serialize() } return r } func makeSerializableGraph(g *DepGraph, roots []string) (serializableGraph, error) { ns := newDepNodesSerializer() ns.serializeDepNodes(g.nodes) v := makeSerializableVars(g.vars) return serializableGraph{ Nodes: ns.nodes, Vars: v, Tsvs: ns.tsvs, Targets: ns.targets, Roots: roots, AccessedMks: g.accessedMks, Exports: g.exports, }, ns.err } func (jsonLoadSaver) Save(g *DepGraph, filename string, roots []string) error { startTime := time.Now() sg, err := makeSerializableGraph(g, roots) if err != nil { return err } o, err := json.MarshalIndent(sg, " ", " ") if err != nil { return err } f, err := os.Create(filename) if err != nil { return err } _, err = f.Write(o) if err != nil { f.Close() return err } err = f.Close() if err != nil { return err } logStats("json serialize time: %q", time.Since(startTime)) return nil } func (gobLoadSaver) Save(g *DepGraph, filename string, roots []string) error { startTime := time.Now() f, err := os.Create(filename) if err != nil { return err } e := gob.NewEncoder(f) var sg serializableGraph { startTime := time.Now() sg, err = makeSerializableGraph(g, roots) if err != nil { return err } logStats("gob serialize prepare time: %q", time.Since(startTime)) } { startTime := time.Now() err = e.Encode(sg) if err != nil { return err } logStats("gob serialize output time: %q", time.Since(startTime)) } err = f.Close() if err != nil { return err } logStats("gob serialize time: %q", time.Since(startTime)) return nil } func cacheFilename(mk string, roots []string) string { filename := ".kati_cache." + mk for _, r := range roots { filename += "." + r } return url.QueryEscape(filename) } func saveCache(g *DepGraph, roots []string) error { if len(g.accessedMks) == 0 { return fmt.Errorf("no Makefile is read") } cacheFile := cacheFilename(g.accessedMks[0].Filename, roots) for _, mk := range g.accessedMks { // Inconsistent, do not dump this result. if mk.State == fileInconsistent { if exists(cacheFile) { os.Remove(cacheFile) } return nil } } return GOB.Save(g, cacheFile, roots) } func deserializeSingleChild(sv serializableVar) (Value, error) { if len(sv.Children) != 1 { return nil, fmt.Errorf("unexpected number of children: %q", sv) } return deserializeVar(sv.Children[0]) } func deserializeVar(sv serializableVar) (r Value, err error) { switch sv.Type { case "literal": return literal(sv.V), nil case "tmpval": return tmpval([]byte(sv.V)), nil case "expr": var e expr for _, v := range sv.Children { dv, err := deserializeVar(v) if err != nil { return nil, err } e = append(e, dv) } return e, nil case "varref": dv, err := deserializeSingleChild(sv) if err != nil { return nil, err } return &varref{varname: dv, paren: sv.V[0]}, nil case "paramref": v, err := strconv.Atoi(sv.V) if err != nil { return nil, err } return paramref(v), nil case "varsubst": varname, err := deserializeVar(sv.Children[0]) if err != nil { return nil, err } pat, err := deserializeVar(sv.Children[1]) if err != nil { return nil, err } subst, err := deserializeVar(sv.Children[2]) if err != nil { return nil, err } return varsubst{ varname: varname, pat: pat, subst: subst, paren: sv.V[0], }, nil case "func": dv, err := deserializeVar(sv.Children[0]) if err != nil { return nil, err } name, ok := dv.(literal) if !ok { return nil, fmt.Errorf("func name is not literal %s: %T", dv, dv) } f := funcMap[string(name[1:])]() f.AddArg(name) for _, a := range sv.Children[1:] { dv, err := deserializeVar(a) if err != nil { return nil, err } f.AddArg(dv) } return f, nil case "funcEvalAssign": rhs, err := deserializeVar(sv.Children[2]) if err != nil { return nil, err } return &funcEvalAssign{ lhs: sv.Children[0].V, op: sv.Children[1].V, rhs: rhs, }, nil case "funcNop": return &funcNop{expr: sv.V}, nil case "simple": return &simpleVar{ value: strings.Split(sv.V, " "), origin: sv.Origin, }, nil case "recursive": expr, err := deserializeSingleChild(sv) if err != nil { return nil, err } return &recursiveVar{ expr: expr, origin: sv.Origin, }, nil case ":=", "=", "+=", "?=": dv, err := deserializeSingleChild(sv) if err != nil { return nil, err } v, ok := dv.(Var) if !ok { return nil, fmt.Errorf("not var: target specific var %s %T", dv, dv) } return &targetSpecificVar{ v: v, op: sv.Type, }, nil default: return nil, fmt.Errorf("unknown serialized variable type: %q", sv) } } func deserializeVars(vars map[string]serializableVar) (Vars, error) { r := make(Vars) for k, v := range vars { dv, err := deserializeVar(v) if err != nil { return nil, err } vv, ok := dv.(Var) if !ok { return nil, fmt.Errorf("not var: %s: %T", dv, dv) } r[k] = vv } return r, nil } func deserializeNodes(g serializableGraph) (r []*DepNode, err error) { nodes := g.Nodes tsvs := g.Tsvs targets := g.Targets // Deserialize all TSVs first so that multiple rules can share memory. var tsvValues []Var for _, sv := range tsvs { dv, err := deserializeVar(sv.Value) if err != nil { return nil, err } vv, ok := dv.(Var) if !ok { return nil, fmt.Errorf("not var: %s %T", dv, dv) } tsvValues = append(tsvValues, vv) } nodeMap := make(map[string]*DepNode) for _, n := range nodes { var actualInputs []string for _, i := range n.ActualInputs { actualInputs = append(actualInputs, targets[i]) } d := &DepNode{ Output: targets[n.Output], Cmds: n.Cmds, HasRule: n.HasRule, IsPhony: n.IsPhony, ActualInputs: actualInputs, Filename: n.Filename, Lineno: n.Lineno, TargetSpecificVars: make(Vars), } for _, id := range n.TargetSpecificVars { sv := tsvs[id] d.TargetSpecificVars[sv.Name] = tsvValues[id] } nodeMap[targets[n.Output]] = d r = append(r, d) } for _, n := range nodes { d := nodeMap[targets[n.Output]] for _, o := range n.Deps { c, present := nodeMap[targets[o]] if !present { return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o]) } d.Deps = append(d.Deps, c) } for _, o := range n.OrderOnlys { c, present := nodeMap[targets[o]] if !present { return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o]) } d.OrderOnlys = append(d.OrderOnlys, c) } for _, o := range n.Parents { c, present := nodeMap[targets[o]] if !present { return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o]) } d.Parents = append(d.Parents, c) } } return r, nil } func human(n int) string { if n >= 10*1000*1000*1000 { return fmt.Sprintf("%.2fGB", float32(n)/1000/1000/1000) } if n >= 10*1000*1000 { return fmt.Sprintf("%.2fMB", float32(n)/1000/1000) } if n >= 10*1000 { return fmt.Sprintf("%.2fkB", float32(n)/1000) } return fmt.Sprintf("%dB", n) } func showSerializedNodesStats(nodes []*serializableDepNode) { outputSize := 0 cmdSize := 0 depsSize := 0 orderOnlysSize := 0 actualInputSize := 0 tsvSize := 0 filenameSize := 0 linenoSize := 0 for _, n := range nodes { outputSize += 4 for _, c := range n.Cmds { cmdSize += len(c) } depsSize += 4 * len(n.Deps) orderOnlysSize += 4 * len(n.OrderOnlys) actualInputSize += 4 * len(n.ActualInputs) tsvSize += 4 * len(n.TargetSpecificVars) filenameSize += len(n.Filename) linenoSize += 4 } size := outputSize + cmdSize + depsSize + orderOnlysSize + actualInputSize + tsvSize + filenameSize + linenoSize logStats("%d nodes %s", len(nodes), human(size)) logStats(" output %s", human(outputSize)) logStats(" command %s", human(cmdSize)) logStats(" deps %s", human(depsSize)) logStats(" orderonlys %s", human(orderOnlysSize)) logStats(" inputs %s", human(actualInputSize)) logStats(" tsv %s", human(tsvSize)) logStats(" filename %s", human(filenameSize)) logStats(" lineno %s", human(linenoSize)) } func (v serializableVar) size() int { size := 0 size += len(v.Type) size += len(v.V) size += len(v.Origin) for _, c := range v.Children { size += c.size() } return size } func showSerializedVarsStats(vars map[string]serializableVar) { nameSize := 0 valueSize := 0 for k, v := range vars { nameSize += len(k) valueSize += v.size() } size := nameSize + valueSize logStats("%d vars %s", len(vars), human(size)) logStats(" name %s", human(nameSize)) logStats(" value %s", human(valueSize)) } func showSerializedTsvsStats(vars []serializableTargetSpecificVar) { nameSize := 0 valueSize := 0 for _, v := range vars { nameSize += len(v.Name) valueSize += v.Value.size() } size := nameSize + valueSize logStats("%d tsvs %s", len(vars), human(size)) logStats(" name %s", human(nameSize)) logStats(" value %s", human(valueSize)) } func showSerializedTargetsStats(targets []string) { size := 0 for _, t := range targets { size += len(t) } logStats("%d targets %s", len(targets), human(size)) } func showSerializedAccessedMksStats(accessedMks []*accessedMakefile) { size := 0 for _, rm := range accessedMks { size += len(rm.Filename) + len(rm.Hash) + 4 } logStats("%d makefiles %s", len(accessedMks), human(size)) } func showSerializedGraphStats(g serializableGraph) { showSerializedNodesStats(g.Nodes) showSerializedVarsStats(g.Vars) showSerializedTsvsStats(g.Tsvs) showSerializedTargetsStats(g.Targets) showSerializedAccessedMksStats(g.AccessedMks) } func deserializeGraph(g serializableGraph) (*DepGraph, error) { if StatsFlag { showSerializedGraphStats(g) } nodes, err := deserializeNodes(g) if err != nil { return nil, err } vars, err := deserializeVars(g.Vars) if err != nil { return nil, err } return &DepGraph{ nodes: nodes, vars: vars, accessedMks: g.AccessedMks, exports: g.Exports, }, nil } func (jsonLoadSaver) Load(filename string) (*DepGraph, error) { startTime := time.Now() f, err := os.Open(filename) if err != nil { return nil, err } defer f.Close() d := json.NewDecoder(f) g := serializableGraph{Vars: make(map[string]serializableVar)} err = d.Decode(&g) if err != nil { return nil, err } dg, err := deserializeGraph(g) if err != nil { return nil, err } logStats("gob deserialize time: %q", time.Since(startTime)) return dg, nil } func (gobLoadSaver) Load(filename string) (*DepGraph, error) { startTime := time.Now() f, err := os.Open(filename) if err != nil { return nil, err } defer f.Close() d := gob.NewDecoder(f) g := serializableGraph{Vars: make(map[string]serializableVar)} err = d.Decode(&g) if err != nil { return nil, err } dg, err := deserializeGraph(g) if err != nil { return nil, err } logStats("json deserialize time: %q", time.Since(startTime)) return dg, nil } func loadCache(makefile string, roots []string) (*DepGraph, error) { startTime := time.Now() defer func() { logStats("Cache lookup time: %q", time.Since(startTime)) }() filename := cacheFilename(makefile, roots) if !exists(filename) { glog.Warningf("Cache not found %q", filename) return nil, fmt.Errorf("cache not found: %s", filename) } g, err := GOB.Load(filename) if err != nil { glog.Warning("Cache load error %q: %v", filename, err) return nil, err } for _, mk := range g.accessedMks { if mk.State != fileExists && mk.State != fileNotExists { return nil, fmt.Errorf("internal error: broken state: %d", mk.State) } if mk.State == fileNotExists { if exists(mk.Filename) { glog.Infof("Cache expired: %s", mk.Filename) return nil, fmt.Errorf("cache expired: %s", mk.Filename) } } else { c, err := ioutil.ReadFile(mk.Filename) if err != nil { glog.Infof("Cache expired: %s", mk.Filename) return nil, fmt.Errorf("cache expired: %s", mk.Filename) } h := sha1.Sum(c) if !bytes.Equal(h[:], mk.Hash[:]) { glog.Infof("Cache expired: %s", mk.Filename) return nil, fmt.Errorf("cache expired: %s", mk.Filename) } } } glog.Info("Cache found in %q", filename) return g, nil } shellutil.go0100644 0000000 0000000 00000010711 13654546140 012151 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "errors" "fmt" "io" "strings" "time" ) var shBuiltins = []struct { name string pattern expr compact func(*funcShell, []Value) Value }{ { name: "android:rot13", // in repo/android/build/core/definisions.mk // echo $(1) | tr 'a-zA-Z' 'n-za-mN-ZA-M' pattern: expr{ literal("echo "), matchVarref{}, literal(" | tr 'a-zA-Z' 'n-za-mN-ZA-M'"), }, compact: func(sh *funcShell, matches []Value) Value { return &funcShellAndroidRot13{ funcShell: sh, v: matches[0], } }, }, { name: "shell-date", pattern: expr{ mustLiteralRE(`date \+(\S+)`), }, compact: compactShellDate, }, { name: "shell-date-quoted", pattern: expr{ mustLiteralRE(`date "\+([^"]+)"`), }, compact: compactShellDate, }, } type funcShellAndroidRot13 struct { *funcShell v Value } func rot13(buf []byte) { for i, b := range buf { // tr 'a-zA-Z' 'n-za-mN-ZA-M' if b >= 'a' && b <= 'z' { b += 'n' - 'a' if b > 'z' { b -= 'z' - 'a' + 1 } } else if b >= 'A' && b <= 'Z' { b += 'N' - 'A' if b > 'Z' { b -= 'Z' - 'A' + 1 } } buf[i] = b } } func (f *funcShellAndroidRot13) Eval(w evalWriter, ev *Evaluator) error { abuf := newEbuf() fargs, err := ev.args(abuf, f.v) if err != nil { return err } rot13(fargs[0]) w.Write(fargs[0]) abuf.release() return nil } var ( // ShellDateTimestamp is an timestamp used for $(shell date). ShellDateTimestamp time.Time shellDateFormatRef = map[string]string{ "%Y": "2006", "%m": "01", "%d": "02", "%H": "15", "%M": "04", "%S": "05", "%b": "Jan", "%k": "15", // XXX } ) type funcShellDate struct { *funcShell format string } func compactShellDate(sh *funcShell, v []Value) Value { if ShellDateTimestamp.IsZero() { return sh } tf, ok := v[0].(literal) if !ok { return sh } tfstr := string(tf) for k, v := range shellDateFormatRef { tfstr = strings.Replace(tfstr, k, v, -1) } return &funcShellDate{ funcShell: sh, format: tfstr, } } func (f *funcShellDate) Eval(w evalWriter, ev *Evaluator) error { fmt.Fprint(w, ShellDateTimestamp.Format(f.format)) return nil } type buildinCommand interface { run(w evalWriter) } var errFindEmulatorDisabled = errors.New("builtin: find emulator disabled") func parseBuiltinCommand(cmd string) (buildinCommand, error) { if !UseFindEmulator { return nil, errFindEmulatorDisabled } if strings.HasPrefix(trimLeftSpace(cmd), "build/tools/findleaves") { return parseFindleavesCommand(cmd) } return parseFindCommand(cmd) } type shellParser struct { cmd string ungetToken string } func (p *shellParser) token() (string, error) { if p.ungetToken != "" { tok := p.ungetToken p.ungetToken = "" return tok, nil } p.cmd = trimLeftSpace(p.cmd) if len(p.cmd) == 0 { return "", io.EOF } if p.cmd[0] == ';' { tok := p.cmd[0:1] p.cmd = p.cmd[1:] return tok, nil } if p.cmd[0] == '&' { if len(p.cmd) == 1 || p.cmd[1] != '&' { return "", errFindBackground } tok := p.cmd[0:2] p.cmd = p.cmd[2:] return tok, nil } // TODO(ukai): redirect token. i := 0 for i < len(p.cmd) { if isWhitespace(rune(p.cmd[i])) || p.cmd[i] == ';' || p.cmd[i] == '&' { break } i++ } tok := p.cmd[0:i] p.cmd = p.cmd[i:] c := tok[0] if c == '\'' || c == '"' { if len(tok) < 2 || tok[len(tok)-1] != c { return "", errFindUnbalancedQuote } // todo: unquote? tok = tok[1 : len(tok)-1] } return tok, nil } func (p *shellParser) unget(s string) { if s != "" { p.ungetToken = s } } func (p *shellParser) expect(toks ...string) error { tok, err := p.token() if err != nil { return err } for _, t := range toks { if tok == t { return nil } } return fmt.Errorf("shell: token=%q; want=%q", tok, toks) } func (p *shellParser) expectSeq(toks ...string) error { for _, tok := range toks { err := p.expect(tok) if err != nil { return err } } return nil } shellutil_test.go0100644 0000000 0000000 00000004371 13654546140 013215 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "testing" "time" ) func TestRot13(t *testing.T) { for _, tc := range []struct { in string want string }{ { in: "PRODUCT_PACKAGE_OVERLAYS", want: "CEBQHPG_CNPXNTR_BIREYNLF", }, { in: "product_name", want: "cebqhpg_anzr", }, } { buf := []byte(tc.in) rot13(buf) if got, want := string(buf), tc.want; got != want { t.Errorf("rot13(%q) got=%q; want=%q", tc.in, got, want) } } } func TestShellDate(t *testing.T) { ts := ShellDateTimestamp ShellDateTimestamp = time.Now() defer func() { ShellDateTimestamp = ts }() for _, tc := range []struct { sharg literal format string }{ { sharg: literal("date +%Y-%m-%d"), format: "2006-01-02", }, { sharg: literal("date +%Y%m%d.%H%M%S"), format: "20060102.150405", }, { sharg: literal(`date "+%d %b %Y %k:%M"`), format: "02 Jan 2006 15:04", }, } { var matched bool for _, b := range shBuiltins { if b.name != "shell-date" && b.name != "shell-date-quoted" { continue } m, ok := matchExpr(expr{tc.sharg}, b.pattern) if !ok { t.Logf("%s not match with %s", b.name, tc.sharg) continue } f := &funcShell{ fclosure: fclosure{ args: []Value{ literal("(shell"), tc.sharg, }, }, } v := b.compact(f, m) sd, ok := v.(*funcShellDate) if !ok { t.Errorf("%s: matched %s but not compacted", tc.sharg, b.name) continue } if got, want := sd.format, tc.format; got != want { t.Errorf("%s: format=%q, want=%q - %s", tc.sharg, got, want, b.name) continue } matched = true break } if !matched { t.Errorf("%s: not matched", tc.sharg) } } } stats.cc0100644 0000000 0000000 00000005131 13654546140 011262 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "stats.h" #include #include #include #include "flags.h" #include "log.h" #include "stringprintf.h" #include "thread_local.h" #include "timeutil.h" namespace { mutex g_mu; vector* g_stats; DEFINE_THREAD_LOCAL(double, g_start_time); } // namespace Stats::Stats(const char* name) : name_(name), elapsed_(0), cnt_(0) { unique_lock lock(g_mu); if (g_stats == NULL) g_stats = new vector; g_stats->push_back(this); } void Stats::DumpTop() const { unique_lock lock(mu_); if (detailed_.size() > 0) { vector> v(detailed_.begin(), detailed_.end()); sort( v.begin(), v.end(), [](const pair a, const pair b) -> bool { return a.second > b.second; }); for (unsigned int i = 0; i < 10 && i < v.size(); i++) { LOG_STAT(" %5.3f %s", v[i].first.c_str(), v[i].second); } } } string Stats::String() const { unique_lock lock(mu_); return StringPrintf("%s: %f / %d", name_, elapsed_, cnt_); } void Stats::Start() { CHECK(!TLS_REF(g_start_time)); TLS_REF(g_start_time) = GetTime(); unique_lock lock(mu_); cnt_++; } double Stats::End(const char* msg) { CHECK(TLS_REF(g_start_time)); double e = GetTime() - TLS_REF(g_start_time); TLS_REF(g_start_time) = 0; unique_lock lock(mu_); elapsed_ += e; if (msg != 0) { detailed_[string(msg)] += e; } return e; } ScopedStatsRecorder::ScopedStatsRecorder(Stats* st, const char* msg) : st_(st), msg_(msg) { if (!g_flags.enable_stat_logs) return; st_->Start(); } ScopedStatsRecorder::~ScopedStatsRecorder() { if (!g_flags.enable_stat_logs) return; double e = st_->End(msg_); if (msg_ && e > 3.0) { LOG_STAT("slow %s (%f): %s", st_->name_, e, msg_); } } void ReportAllStats() { if (!g_stats) return; for (Stats* st : *g_stats) { LOG_STAT("%s", st->String().c_str()); st->DumpTop(); } delete g_stats; } stats.go0100644 0000000 0000000 00000007227 13654546140 011312 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "io" "os" "sort" "sync" "time" ) type traceEventT struct { mu sync.Mutex f io.WriteCloser t0 time.Time pid int } const ( traceEventMain = iota + 1 // add new ones to use new goroutine. ) var traceEvent traceEventT // TraceEventStart starts trace event. func TraceEventStart(f io.WriteCloser) { traceEvent.start(f) } // TraceEventStop stops trace event. func TraceEventStop() { traceEvent.stop() } func (t *traceEventT) start(f io.WriteCloser) { t.f = f t.t0 = time.Now() fmt.Fprint(t.f, "[ ") } func (t *traceEventT) enabled() bool { return t.f != nil } func (t *traceEventT) stop() { fmt.Fprint(t.f, "\n]\n") t.f.Close() } type event struct { name, v string tid int t time.Time emit bool } func (t *traceEventT) begin(name string, v Value, tid int) event { var e event e.tid = tid e.t = time.Now() if t.f != nil || EvalStatsFlag { e.name = name e.v = v.String() } if t.f != nil { e.emit = name == "include" || name == "shell" if e.emit { t.emit("B", e, e.t.Sub(t.t0)) } } return e } func (t *traceEventT) emit(ph string, e event, ts time.Duration) { t.mu.Lock() defer t.mu.Unlock() if t.pid == 0 { t.pid = os.Getpid() } else { fmt.Fprintf(t.f, ",\n") } fmt.Fprintf(t.f, `{"pid":%d,"tid":%d,"ts":%d,"ph":%q,"cat":%q,"name":%q,"args":{}}`, t.pid, e.tid, ts.Nanoseconds()/1e3, ph, e.name, e.v, ) } func (t *traceEventT) end(e event) { if t.f != nil { if e.emit { t.emit("E", e, time.Since(t.t0)) } } stats.add(e.name, e.v, e.t) } type statsData struct { Name string Count int Longest time.Duration Total time.Duration } type statsT struct { mu sync.Mutex data map[string]statsData } var stats = &statsT{ data: make(map[string]statsData), } func (s *statsT) add(name, v string, t time.Time) { if !EvalStatsFlag { return } d := time.Since(t) key := fmt.Sprintf("%s:%s", name, v) s.mu.Lock() sd := s.data[key] if d > sd.Longest { sd.Longest = d } sd.Total += d sd.Count++ s.data[key] = sd s.mu.Unlock() } // DumpStats dumps statistics collected if EvalStatsFlag is set. func DumpStats() { if !EvalStatsFlag { return } var sv byTotalTime for k, v := range stats.data { v.Name = k sv = append(sv, v) } sort.Sort(sv) fmt.Println("count,longest(ns),total(ns),longest,total,name") for _, s := range sv { fmt.Printf("%d,%d,%d,%v,%v,%s\n", s.Count, s.Longest, s.Total, s.Longest, s.Total, s.Name) } } type byTotalTime []statsData func (b byTotalTime) Len() int { return len(b) } func (b byTotalTime) Swap(i, j int) { b[i], b[j] = b[j], b[i] } func (b byTotalTime) Less(i, j int) bool { return b[i].Total > b[j].Total } type shellStatsT struct { mu sync.Mutex duration time.Duration count int } var shellStats = &shellStatsT{} func (s *shellStatsT) add(d time.Duration) { s.mu.Lock() s.duration += d s.count++ s.mu.Unlock() } func (s *shellStatsT) Duration() time.Duration { s.mu.Lock() defer s.mu.Unlock() return s.duration } func (s *shellStatsT) Count() int { s.mu.Lock() defer s.mu.Unlock() return s.count } stats.h0100644 0000000 0000000 00000002750 13654546140 011130 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef STATS_H_ #define STATS_H_ #include #include #include using namespace std; class Stats { public: explicit Stats(const char* name); void DumpTop() const; string String() const; private: void Start(); double End(const char* msg); friend class ScopedStatsRecorder; const char* name_; double elapsed_; int cnt_; mutable mutex mu_; unordered_map detailed_; }; class ScopedStatsRecorder { public: explicit ScopedStatsRecorder(Stats* st, const char* msg = 0); ~ScopedStatsRecorder(); private: Stats* st_; const char* msg_; }; void ReportAllStats(); #define COLLECT_STATS(name) \ static Stats stats(name); \ ScopedStatsRecorder ssr(&stats) #define COLLECT_STATS_WITH_SLOW_REPORT(name, msg) \ static Stats stats(name); \ ScopedStatsRecorder ssr(&stats, msg) #endif // STATS_H_ stmt.cc0100644 0000000 0000000 00000010267 13654546140 011121 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "stmt.h" #include "eval.h" #include "expr.h" #include "stringprintf.h" #include "strutil.h" Stmt::Stmt() {} Stmt::~Stmt() {} string RuleStmt::DebugString() const { return StringPrintf("RuleStmt(lhs=%s sep=%d rhs=%s loc=%s:%d)", Value::DebugString(lhs).c_str(), sep, Value::DebugString(rhs).c_str(), LOCF(loc())); } string AssignStmt::DebugString() const { const char* opstr = "???"; switch (op) { case AssignOp::EQ: opstr = "EQ"; break; case AssignOp::COLON_EQ: opstr = "COLON_EQ"; break; case AssignOp::PLUS_EQ: opstr = "PLUS_EQ"; break; case AssignOp::QUESTION_EQ: opstr = "QUESTION_EQ"; break; } const char* dirstr = "???"; switch (directive) { case AssignDirective::NONE: dirstr = ""; break; case AssignDirective::OVERRIDE: dirstr = "override"; break; case AssignDirective::EXPORT: dirstr = "export"; break; } return StringPrintf( "AssignStmt(lhs=%s rhs=%s (%s) " "opstr=%s dir=%s loc=%s:%d)", Value::DebugString(lhs).c_str(), Value::DebugString(rhs).c_str(), NoLineBreak(orig_rhs.as_string()).c_str(), opstr, dirstr, LOCF(loc())); } Symbol AssignStmt::GetLhsSymbol(Evaluator* ev) const { if (!lhs->IsLiteral()) { string buf; lhs->Eval(ev, &buf); return Intern(buf); } if (!lhs_sym_cache_.IsValid()) { lhs_sym_cache_ = Intern(lhs->GetLiteralValueUnsafe()); } return lhs_sym_cache_; } string CommandStmt::DebugString() const { return StringPrintf("CommandStmt(%s, loc=%s:%d)", Value::DebugString(expr).c_str(), LOCF(loc())); } string IfStmt::DebugString() const { const char* opstr = "???"; switch (op) { case CondOp::IFEQ: opstr = "ifeq"; break; case CondOp::IFNEQ: opstr = "ifneq"; break; case CondOp::IFDEF: opstr = "ifdef"; break; case CondOp::IFNDEF: opstr = "ifndef"; break; } return StringPrintf("IfStmt(op=%s, lhs=%s, rhs=%s t=%zu f=%zu loc=%s:%d)", opstr, Value::DebugString(lhs).c_str(), Value::DebugString(rhs).c_str(), true_stmts.size(), false_stmts.size(), LOCF(loc())); } string IncludeStmt::DebugString() const { return StringPrintf("IncludeStmt(%s, loc=%s:%d)", Value::DebugString(expr).c_str(), LOCF(loc())); } string ExportStmt::DebugString() const { return StringPrintf("ExportStmt(%s, %d, loc=%s:%d)", Value::DebugString(expr).c_str(), is_export, LOCF(loc())); } string ParseErrorStmt::DebugString() const { return StringPrintf("ParseErrorStmt(%s, loc=%s:%d)", msg.c_str(), LOCF(loc())); } RuleStmt::~RuleStmt() { delete lhs; delete rhs; } void RuleStmt::Eval(Evaluator* ev) const { ev->EvalRule(this); } AssignStmt::~AssignStmt() { delete lhs; delete rhs; } void AssignStmt::Eval(Evaluator* ev) const { ev->EvalAssign(this); } CommandStmt::~CommandStmt() { delete expr; } void CommandStmt::Eval(Evaluator* ev) const { ev->EvalCommand(this); } IfStmt::~IfStmt() { delete lhs; delete rhs; } void IfStmt::Eval(Evaluator* ev) const { ev->EvalIf(this); } IncludeStmt::~IncludeStmt() { delete expr; } void IncludeStmt::Eval(Evaluator* ev) const { ev->EvalInclude(this); } ExportStmt::~ExportStmt() { delete expr; } void ExportStmt::Eval(Evaluator* ev) const { ev->EvalExport(this); } ParseErrorStmt::~ParseErrorStmt() {} void ParseErrorStmt::Eval(Evaluator* ev) const { ev->set_loc(loc()); ev->Error(msg); } stmt.h0100644 0000000 0000000 00000006237 13654546140 010765 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef STMT_H_ #define STMT_H_ #include #include #include "loc.h" #include "string_piece.h" #include "symtab.h" using namespace std; class Evaluator; class Value; enum struct AssignOp : char { EQ, COLON_EQ, PLUS_EQ, QUESTION_EQ, }; enum struct AssignDirective { NONE = 0, OVERRIDE = 1, EXPORT = 2, }; enum struct CondOp { IFEQ, IFNEQ, IFDEF, IFNDEF, }; struct Stmt { public: virtual ~Stmt(); Loc loc() const { return loc_; } void set_loc(Loc loc) { loc_ = loc; } StringPiece orig() const { return orig_; } virtual void Eval(Evaluator* ev) const = 0; virtual string DebugString() const = 0; protected: Stmt(); private: Loc loc_; StringPiece orig_; }; /* Parsed "rule statement" before evaluation is kept as * * where and as Value instances. is either command * separator (';') or an assignment ('=' or '=$='). * Until we evaluate , we don't know whether it is a rule or * a rule-specific variable assignment. */ struct RuleStmt : public Stmt { Value* lhs; enum { SEP_NULL, SEP_SEMICOLON, SEP_EQ, SEP_FINALEQ } sep; Value* rhs; virtual ~RuleStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; }; struct AssignStmt : public Stmt { Value* lhs; Value* rhs; StringPiece orig_rhs; AssignOp op; AssignDirective directive; bool is_final; AssignStmt() : is_final(false) {} virtual ~AssignStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; Symbol GetLhsSymbol(Evaluator* ev) const; private: mutable Symbol lhs_sym_cache_; }; struct CommandStmt : public Stmt { Value* expr; StringPiece orig; virtual ~CommandStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; }; struct IfStmt : public Stmt { CondOp op; Value* lhs; Value* rhs; vector true_stmts; vector false_stmts; virtual ~IfStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; }; struct IncludeStmt : public Stmt { Value* expr; bool should_exist; virtual ~IncludeStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; }; struct ExportStmt : public Stmt { Value* expr; bool is_export; virtual ~ExportStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; }; struct ParseErrorStmt : public Stmt { string msg; virtual ~ParseErrorStmt(); virtual void Eval(Evaluator* ev) const; virtual string DebugString() const; }; #endif // STMT_H_ string_piece.cc0100644 0000000 0000000 00000015203 13654546140 012600 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Copied from strings/stringpiece.cc with modifications // +build ignore #include #include #include #include #include #include "string_piece.h" typedef StringPiece::size_type size_type; bool operator==(const StringPiece& x, const StringPiece& y) { if (x.size() != y.size()) return false; size_t len = x.size(); if (len >= sizeof(uint64_t)) { len -= sizeof(uint64_t); uint64_t xt = *reinterpret_cast(x.data() + len); uint64_t yt = *reinterpret_cast(y.data() + len); if (xt != yt) return false; } return StringPiece::wordmemcmp(x.data(), y.data(), len) == 0; } void StringPiece::CopyToString(std::string* target) const { target->assign(!empty() ? data() : "", size()); } void StringPiece::AppendToString(std::string* target) const { if (!empty()) target->append(data(), size()); } size_type StringPiece::copy(char* buf, size_type n, size_type pos) const { size_type ret = std::min(length_ - pos, n); memcpy(buf, ptr_ + pos, ret); return ret; } size_type StringPiece::find(const StringPiece& s, size_type pos) const { if (pos > length_) return npos; const char* result = std::search(ptr_ + pos, ptr_ + length_, s.ptr_, s.ptr_ + s.length_); const size_type xpos = result - ptr_; return xpos + s.length_ <= length_ ? xpos : npos; } size_type StringPiece::find(char c, size_type pos) const { if (pos >= length_) return npos; const char* result = std::find(ptr_ + pos, ptr_ + length_, c); return result != ptr_ + length_ ? static_cast(result - ptr_) : npos; } size_type StringPiece::rfind(const StringPiece& s, size_type pos) const { if (length_ < s.length_) return npos; if (s.empty()) return std::min(length_, pos); const char* last = ptr_ + std::min(length_ - s.length_, pos) + s.length_; const char* result = std::find_end(ptr_, last, s.ptr_, s.ptr_ + s.length_); return result != last ? static_cast(result - ptr_) : npos; } size_type StringPiece::rfind(char c, size_type pos) const { if (length_ == 0) return npos; for (size_type i = std::min(pos, length_ - 1);; --i) { if (ptr_[i] == c) return i; if (i == 0) break; } return npos; } // For each character in characters_wanted, sets the index corresponding // to the ASCII code of that character to 1 in table. This is used by // the find_.*_of methods below to tell whether or not a character is in // the lookup table in constant time. // The argument `table' must be an array that is large enough to hold all // the possible values of an unsigned char. Thus it should be be declared // as follows: // bool table[UCHAR_MAX + 1] static inline void BuildLookupTable(const StringPiece& characters_wanted, bool* table) { const size_type length = characters_wanted.length(); const char* const data = characters_wanted.data(); for (size_type i = 0; i < length; ++i) { table[static_cast(data[i])] = true; } } size_type StringPiece::find_first_of(const StringPiece& s, size_type pos) const { if (length_ == 0 || s.length_ == 0) return npos; // Avoid the cost of BuildLookupTable() for a single-character search. if (s.length_ == 1) return find_first_of(s.ptr_[0], pos); bool lookup[UCHAR_MAX + 1] = {false}; BuildLookupTable(s, lookup); for (size_type i = pos; i < length_; ++i) { if (lookup[static_cast(ptr_[i])]) { return i; } } return npos; } size_type StringPiece::find_first_not_of(const StringPiece& s, size_type pos) const { if (length_ == 0) return npos; if (s.length_ == 0) return 0; // Avoid the cost of BuildLookupTable() for a single-character search. if (s.length_ == 1) return find_first_not_of(s.ptr_[0], pos); bool lookup[UCHAR_MAX + 1] = {false}; BuildLookupTable(s, lookup); for (size_type i = pos; i < length_; ++i) { if (!lookup[static_cast(ptr_[i])]) { return i; } } return npos; } size_type StringPiece::find_first_not_of(char c, size_type pos) const { if (length_ == 0) return npos; for (; pos < length_; ++pos) { if (ptr_[pos] != c) { return pos; } } return npos; } size_type StringPiece::find_last_of(const StringPiece& s, size_type pos) const { if (length_ == 0 || s.length_ == 0) return npos; // Avoid the cost of BuildLookupTable() for a single-character search. if (s.length_ == 1) return find_last_of(s.ptr_[0], pos); bool lookup[UCHAR_MAX + 1] = {false}; BuildLookupTable(s, lookup); for (size_type i = std::min(pos, length_ - 1);; --i) { if (lookup[static_cast(ptr_[i])]) return i; if (i == 0) break; } return npos; } size_type StringPiece::find_last_not_of(const StringPiece& s, size_type pos) const { if (length_ == 0) return npos; size_type i = std::min(pos, length_ - 1); if (s.length_ == 0) return i; // Avoid the cost of BuildLookupTable() for a single-character search. if (s.length_ == 1) return find_last_not_of(s.ptr_[0], pos); bool lookup[UCHAR_MAX + 1] = {false}; BuildLookupTable(s, lookup); for (;; --i) { if (!lookup[static_cast(ptr_[i])]) return i; if (i == 0) break; } return npos; } size_type StringPiece::find_last_not_of(char c, size_type pos) const { if (length_ == 0) return npos; for (size_type i = std::min(pos, length_ - 1);; --i) { if (ptr_[i] != c) return i; if (i == 0) break; } return npos; } StringPiece StringPiece::substr(size_type pos, size_type n) const { if (pos > length_) pos = length_; if (n > length_ - pos) n = length_ - pos; return StringPiece(ptr_ + pos, n); } const StringPiece::size_type StringPiece::npos = size_type(-1); string_piece.h0100644 0000000 0000000 00000016016 13654546140 012445 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Copied from strings/stringpiece.h with modifications // // A string-like object that points to a sized piece of memory. // // Functions or methods may use const StringPiece& parameters to accept either // a "const char*" or a "string" value that will be implicitly converted to // a StringPiece. The implicit conversion means that it is often appropriate // to include this .h file in other files rather than forward-declaring // StringPiece as would be appropriate for most other Google classes. // // Systematic usage of StringPiece is encouraged as it will reduce unnecessary // conversions from "const char*" to "string" and back again. // #ifndef BASE_STRING_PIECE_H_ #define BASE_STRING_PIECE_H_ #pragma once #include #include #include //#include "base/base_api.h" //#include "base/basictypes.h" class StringPiece { public: // standard STL container boilerplate typedef size_t size_type; typedef char value_type; typedef const char* pointer; typedef const char& reference; typedef const char& const_reference; typedef ptrdiff_t difference_type; typedef const char* const_iterator; typedef const char* iterator; typedef std::reverse_iterator const_reverse_iterator; typedef std::reverse_iterator reverse_iterator; static const size_type npos; public: // We provide non-explicit singleton constructors so users can pass // in a "const char*" or a "string" wherever a "StringPiece" is // expected. StringPiece() : ptr_(NULL), length_(0) {} StringPiece(const char* str) : ptr_(str), length_((str == NULL) ? 0 : strlen(str)) {} StringPiece(const std::string& str) : ptr_(str.data()), length_(str.size()) {} StringPiece(const std::string&& str) : ptr_(str.data()), length_(str.size()) {} StringPiece(const char* offset, size_type len) : ptr_(offset), length_(len) {} // data() may return a pointer to a buffer with embedded NULs, and the // returned buffer may or may not be null terminated. Therefore it is // typically a mistake to pass data() to a routine that expects a NUL // terminated string. const char* data() const { return ptr_; } size_type size() const { return length_; } size_type length() const { return length_; } bool empty() const { return length_ == 0; } void clear() { ptr_ = NULL; length_ = 0; } void set(const char* data, size_type len) { ptr_ = data; length_ = len; } void set(const char* str) { ptr_ = str; length_ = str ? strlen(str) : 0; } void set(const void* data, size_type len) { ptr_ = reinterpret_cast(data); length_ = len; } char operator[](size_type i) const { return ptr_[i]; } void remove_prefix(size_type n) { ptr_ += n; length_ -= n; } void remove_suffix(size_type n) { length_ -= n; } int compare(const StringPiece& x) const { int r = wordmemcmp(ptr_, x.ptr_, (length_ < x.length_ ? length_ : x.length_)); if (r == 0) { if (length_ < x.length_) r = -1; else if (length_ > x.length_) r = +1; } return r; } std::string as_string() const { // std::string doesn't like to take a NULL pointer even with a 0 size. return std::string(!empty() ? data() : "", size()); } void CopyToString(std::string* target) const; void AppendToString(std::string* target) const; // Does "this" start with "x" bool starts_with(const StringPiece& x) const { return ((length_ >= x.length_) && (wordmemcmp(ptr_, x.ptr_, x.length_) == 0)); } // Does "this" end with "x" bool ends_with(const StringPiece& x) const { return ((length_ >= x.length_) && (wordmemcmp(ptr_ + (length_ - x.length_), x.ptr_, x.length_) == 0)); } iterator begin() const { return ptr_; } iterator end() const { return ptr_ + length_; } const_reverse_iterator rbegin() const { return const_reverse_iterator(ptr_ + length_); } const_reverse_iterator rend() const { return const_reverse_iterator(ptr_); } size_type max_size() const { return length_; } size_type capacity() const { return length_; } size_type copy(char* buf, size_type n, size_type pos = 0) const; size_type find(const StringPiece& s, size_type pos = 0) const; size_type find(char c, size_type pos = 0) const; size_type rfind(const StringPiece& s, size_type pos = npos) const; size_type rfind(char c, size_type pos = npos) const; size_type find_first_of(const StringPiece& s, size_type pos = 0) const; size_type find_first_of(char c, size_type pos = 0) const { return find(c, pos); } size_type find_first_not_of(const StringPiece& s, size_type pos = 0) const; size_type find_first_not_of(char c, size_type pos = 0) const; size_type find_last_of(const StringPiece& s, size_type pos = npos) const; size_type find_last_of(char c, size_type pos = npos) const { return rfind(c, pos); } size_type find_last_not_of(const StringPiece& s, size_type pos = npos) const; size_type find_last_not_of(char c, size_type pos = npos) const; StringPiece substr(size_type pos, size_type n = npos) const; static int wordmemcmp(const char* p, const char* p2, size_type N) { return memcmp(p, p2, N); } // kati specific functions will follow. char get(size_type i) const { return i < length_ ? ptr_[i] : 0; } private: const char* ptr_; size_type length_; }; bool operator==(const StringPiece& x, const StringPiece& y); inline bool operator!=(const StringPiece& x, const StringPiece& y) { return !(x == y); } inline bool operator<(const StringPiece& x, const StringPiece& y) { const int r = StringPiece::wordmemcmp( x.data(), y.data(), (x.size() < y.size() ? x.size() : y.size())); return ((r < 0) || ((r == 0) && (x.size() < y.size()))); } inline bool operator>(const StringPiece& x, const StringPiece& y) { return y < x; } inline bool operator<=(const StringPiece& x, const StringPiece& y) { return !(x > y); } inline bool operator>=(const StringPiece& x, const StringPiece& y) { return !(x < y); } namespace std { template <> struct hash { size_t operator()(const StringPiece& s) const { size_t result = 0; for (char c : s) { result = (result * 131) + c; } return result; } }; } // namespace std #define SPF(s) static_cast((s).size()), (s).data() #endif // BASE_STRING_PIECE_H_ string_piece_test.cc0100644 0000000 0000000 00000002265 13654546140 013643 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "string_piece.h" #include #include using namespace std; int main() { unordered_set sps; sps.insert(StringPiece("foo")); sps.insert(StringPiece("foo")); sps.insert(StringPiece("bar")); assert(sps.size() == 2); assert(sps.count(StringPiece("foo")) == 1); assert(sps.count(StringPiece("bar")) == 1); assert(StringPiece("hogefugahige") == StringPiece("hogefugahige")); assert(StringPiece("hogefugahoge") != StringPiece("hogefugahige")); assert(StringPiece("hogefugahige") != StringPiece("higefugahige")); } stringprintf.cc0100644 0000000 0000000 00000002145 13654546140 012657 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "stringprintf.h" #include #include string StringPrintf(const char* format, ...) { string str; str.resize(128); for (int i = 0; i < 2; i++) { va_list args; va_start(args, format); int ret = vsnprintf(&str[0], str.size(), format, args); va_end(args); assert(ret >= 0); if (static_cast(ret) < str.size()) { str.resize(ret); return str; } str.resize(ret + 1); } assert(false); __builtin_unreachable(); } stringprintf.h0100644 0000000 0000000 00000001403 13654546140 012515 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef STRINGPRINTF_H_ #define STRINGPRINTF_H_ #include using namespace std; string StringPrintf(const char* fmt, ...); #endif // STRINGPRINTF_H_ strutil.cc0100644 0000000 0000000 00000031711 13654546140 011635 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "strutil.h" #include #include #include #include #include #include #include #ifdef __SSE4_2__ #include #endif #include "log.h" static bool isSpace(char c) { return (9 <= c && c <= 13) || c == 32; } #ifdef __SSE4_2__ static int SkipUntilSSE42(const char* s, int len, const char* ranges, int ranges_size) { __m128i ranges16 = _mm_loadu_si128((const __m128i*)ranges); len &= ~15; int i = 0; while (i < len) { __m128i b16 = _mm_loadu_si128((const __m128i*)(s + i)); int r = _mm_cmpestri( ranges16, ranges_size, b16, len - i, _SIDD_LEAST_SIGNIFICANT | _SIDD_CMP_RANGES | _SIDD_UBYTE_OPS); if (r != 16) { return i + r; } i += 16; } return len; } #endif template static int SkipUntil(const char* s, int len, const char* ranges UNUSED, int ranges_size UNUSED, Cond cond) { int i = 0; #ifdef __SSE4_2__ i += SkipUntilSSE42(s, len, ranges, ranges_size); #endif for (; i < len; i++) { if (cond(s[i])) break; } return i; } WordScanner::Iterator& WordScanner::Iterator::operator++() { int len = static_cast(in->size()); for (s = i + 1; s < len; s++) { if (!isSpace((*in)[s])) break; } if (s >= len) { in = NULL; s = 0; i = 0; return *this; } static const char ranges[] = "\x09\x0d "; // It's intentional we are not using isSpace here. It seems with // lambda the compiler generates better code. i = s + SkipUntil(in->data() + s, len - s, ranges, 4, [](char c) { return (9 <= c && c <= 13) || c == 32; }); return *this; } StringPiece WordScanner::Iterator::operator*() const { return in->substr(s, i - s); } WordScanner::WordScanner(StringPiece in) : in_(in) {} WordScanner::Iterator WordScanner::begin() const { Iterator iter; iter.in = &in_; iter.s = 0; iter.i = -1; ++iter; return iter; } WordScanner::Iterator WordScanner::end() const { Iterator iter; iter.in = NULL; iter.s = 0; iter.i = 0; return iter; } void WordScanner::Split(vector* o) { for (StringPiece t : *this) o->push_back(t); } WordWriter::WordWriter(string* o) : out_(o), needs_space_(false) {} void WordWriter::MaybeAddWhitespace() { if (needs_space_) { out_->push_back(' '); } else { needs_space_ = true; } } void WordWriter::Write(StringPiece s) { MaybeAddWhitespace(); AppendString(s, out_); } ScopedTerminator::ScopedTerminator(StringPiece s) : s_(s), c_(s[s.size()]) { const_cast(s_.data())[s_.size()] = '\0'; } ScopedTerminator::~ScopedTerminator() { const_cast(s_.data())[s_.size()] = c_; } void AppendString(StringPiece str, string* out) { out->append(str.begin(), str.end()); } bool HasPrefix(StringPiece str, StringPiece prefix) { ssize_t size_diff = str.size() - prefix.size(); return size_diff >= 0 && str.substr(0, prefix.size()) == prefix; } bool HasSuffix(StringPiece str, StringPiece suffix) { ssize_t size_diff = str.size() - suffix.size(); return size_diff >= 0 && str.substr(size_diff) == suffix; } bool HasWord(StringPiece str, StringPiece w) { size_t found = str.find(w); if (found == string::npos) return false; if (found != 0 && !isSpace(str[found - 1])) return false; size_t end = found + w.size(); if (end != str.size() && !isSpace(str[end])) return false; return true; } StringPiece TrimPrefix(StringPiece str, StringPiece prefix) { ssize_t size_diff = str.size() - prefix.size(); if (size_diff < 0 || str.substr(0, prefix.size()) != prefix) return str; return str.substr(prefix.size()); } StringPiece TrimSuffix(StringPiece str, StringPiece suffix) { ssize_t size_diff = str.size() - suffix.size(); if (size_diff < 0 || str.substr(size_diff) != suffix) return str; return str.substr(0, size_diff); } Pattern::Pattern(StringPiece pat) : pat_(pat), percent_index_(pat.find('%')) {} bool Pattern::Match(StringPiece str) const { if (percent_index_ == string::npos) return str == pat_; return MatchImpl(str); } bool Pattern::MatchImpl(StringPiece str) const { return (HasPrefix(str, pat_.substr(0, percent_index_)) && HasSuffix(str, pat_.substr(percent_index_ + 1))); } StringPiece Pattern::Stem(StringPiece str) const { if (!Match(str)) return ""; return str.substr(percent_index_, str.size() - (pat_.size() - percent_index_ - 1)); } void Pattern::AppendSubst(StringPiece str, StringPiece subst, string* out) const { if (percent_index_ == string::npos) { if (str == pat_) { AppendString(subst, out); return; } else { AppendString(str, out); return; } } if (MatchImpl(str)) { size_t subst_percent_index = subst.find('%'); if (subst_percent_index == string::npos) { AppendString(subst, out); return; } else { AppendString(subst.substr(0, subst_percent_index), out); AppendString(str.substr(percent_index_, str.size() - pat_.size() + 1), out); AppendString(subst.substr(subst_percent_index + 1), out); return; } } AppendString(str, out); } void Pattern::AppendSubstRef(StringPiece str, StringPiece subst, string* out) const { if (percent_index_ != string::npos && subst.find('%') != string::npos) { AppendSubst(str, subst, out); return; } StringPiece s = TrimSuffix(str, pat_); out->append(s.begin(), s.end()); out->append(subst.begin(), subst.end()); } string NoLineBreak(const string& s) { size_t index = s.find('\n'); if (index == string::npos) return s; string r = s; while (index != string::npos) { r = r.substr(0, index) + "\\n" + r.substr(index + 1); index = r.find('\n', index + 2); } return r; } StringPiece TrimLeftSpace(StringPiece s) { size_t i = 0; for (; i < s.size(); i++) { if (isSpace(s[i])) continue; char n = s.get(i + 1); if (s[i] == '\\' && (n == '\r' || n == '\n')) { i++; continue; } break; } return s.substr(i, s.size() - i); } StringPiece TrimRightSpace(StringPiece s) { size_t i = 0; for (; i < s.size(); i++) { char c = s[s.size() - 1 - i]; if (isSpace(c)) { if ((c == '\r' || c == '\n') && s.get(s.size() - 2 - i) == '\\') i++; continue; } break; } return s.substr(0, s.size() - i); } StringPiece TrimSpace(StringPiece s) { return TrimRightSpace(TrimLeftSpace(s)); } StringPiece Dirname(StringPiece s) { size_t found = s.rfind('/'); if (found == string::npos) return StringPiece("."); if (found == 0) return StringPiece(""); return s.substr(0, found); } StringPiece Basename(StringPiece s) { size_t found = s.rfind('/'); if (found == string::npos || found == 0) return s; return s.substr(found + 1); } StringPiece GetExt(StringPiece s) { size_t found = s.rfind('.'); if (found == string::npos) return StringPiece(""); return s.substr(found); } StringPiece StripExt(StringPiece s) { size_t slash_index = s.rfind('/'); size_t found = s.rfind('.'); if (found == string::npos || (slash_index != string::npos && found < slash_index)) return s; return s.substr(0, found); } void NormalizePath(string* o) { if (o->empty()) return; size_t start_index = 0; if ((*o)[0] == '/') start_index++; size_t j = start_index; size_t prev_start = start_index; for (size_t i = start_index; i <= o->size(); i++) { char c = (*o)[i]; if (c != '/' && c != 0) { (*o)[j] = c; j++; continue; } StringPiece prev_dir = StringPiece(o->data() + prev_start, j - prev_start); if (prev_dir == ".") { j--; } else if (prev_dir == ".." && j != 2 /* .. */) { if (j == 3) { // /.. j = start_index; } else { size_t orig_j = j; j -= 4; j = o->rfind('/', j); if (j == string::npos) { j = start_index; } else { j++; } if (StringPiece(o->data() + j, 3) == "../") { j = orig_j; (*o)[j] = c; j++; } } } else if (!prev_dir.empty()) { if (c) { (*o)[j] = c; j++; } } prev_start = j; } if (j > 1 && (*o)[j - 1] == '/') j--; o->resize(j); } void AbsPath(StringPiece s, string* o) { if (s.get(0) == '/') { o->clear(); } else { char buf[PATH_MAX]; if (!getcwd(buf, PATH_MAX)) { fprintf(stderr, "getcwd failed\n"); CHECK(false); } CHECK(buf[0] == '/'); *o = buf; *o += '/'; } AppendString(s, o); NormalizePath(o); } template size_t FindOutsideParenImpl(StringPiece s, Cond cond) { bool prev_backslash = false; stack paren_stack; for (size_t i = 0; i < s.size(); i++) { char c = s[i]; if (cond(c) && paren_stack.empty() && !prev_backslash) { return i; } switch (c) { case '(': paren_stack.push(')'); break; case '{': paren_stack.push('}'); break; case ')': case '}': if (!paren_stack.empty() && c == paren_stack.top()) { paren_stack.pop(); } break; } prev_backslash = c == '\\' && !prev_backslash; } return string::npos; } size_t FindOutsideParen(StringPiece s, char c) { return FindOutsideParenImpl(s, [&c](char d) { return c == d; }); } size_t FindTwoOutsideParen(StringPiece s, char c1, char c2) { return FindOutsideParenImpl( s, [&c1, &c2](char d) { return d == c1 || d == c2; }); } size_t FindThreeOutsideParen(StringPiece s, char c1, char c2, char c3) { return FindOutsideParenImpl( s, [&c1, &c2, &c3](char d) { return d == c1 || d == c2 || d == c3; }); } size_t FindEndOfLine(StringPiece s, size_t e, size_t* lf_cnt) { static const char ranges[] = "\0\0\n\n\\\\"; while (e < s.size()) { e += SkipUntil(s.data() + e, s.size() - e, ranges, 6, [](char c) { return c == 0 || c == '\n' || c == '\\'; }); if (e >= s.size()) { CHECK(s.size() == e); break; } char c = s[e]; if (c == '\0') break; if (c == '\\') { if (s[e + 1] == '\n') { e += 2; ++*lf_cnt; } else if (s[e + 1] == '\r' && s[e + 2] == '\n') { e += 3; ++*lf_cnt; } else if (s[e + 1] == '\\') { e += 2; } else { e++; } } else if (c == '\n') { ++*lf_cnt; return e; } } return e; } StringPiece TrimLeadingCurdir(StringPiece s) { while (s.substr(0, 2) == "./") s = s.substr(2); return s; } void FormatForCommandSubstitution(string* s) { while ((*s)[s->size() - 1] == '\n') s->pop_back(); for (size_t i = 0; i < s->size(); i++) { if ((*s)[i] == '\n') (*s)[i] = ' '; } } string SortWordsInString(StringPiece s) { vector toks; for (StringPiece tok : WordScanner(s)) { toks.push_back(tok.as_string()); } sort(toks.begin(), toks.end()); return JoinStrings(toks, " "); } string ConcatDir(StringPiece b, StringPiece n) { string r; if (!b.empty()) { b.AppendToString(&r); r += '/'; } n.AppendToString(&r); NormalizePath(&r); return r; } string EchoEscape(const string& str) { const char* in = str.c_str(); string buf; for (; *in; in++) { switch (*in) { case '\\': buf += "\\\\\\\\"; break; case '\n': buf += "\\n"; break; case '"': buf += "\\\""; break; default: buf += *in; } } return buf; } static bool NeedsShellEscape(char c) { return c == 0 || c == '"' || c == '$' || c == '\\' || c == '`'; } void EscapeShell(string* s) { static const char ranges[] = "\0\0\"\"$$\\\\``"; size_t prev = 0; size_t i = SkipUntil(s->c_str(), s->size(), ranges, 10, NeedsShellEscape); if (i == s->size()) return; string r; for (; i < s->size();) { StringPiece(*s).substr(prev, i - prev).AppendToString(&r); char c = (*s)[i]; r += '\\'; if (c == '$') { if ((*s)[i + 1] == '$') { r += '$'; i++; } } r += c; i++; prev = i; i += SkipUntil(s->c_str() + i, s->size() - i, ranges, 10, NeedsShellEscape); } StringPiece(*s).substr(prev).AppendToString(&r); s->swap(r); } strutil.go0100644 0000000 0000000 00000017451 13654546140 011662 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "path/filepath" "strings" "github.com/golang/glog" ) var wsbytes = [256]bool{' ': true, '\t': true, '\n': true, '\r': true} // TODO(ukai): use unicode.IsSpace? func isWhitespace(ch rune) bool { if int(ch) >= len(wsbytes) { return false } return wsbytes[ch] } func splitSpaces(s string) []string { var r []string tokStart := -1 for i, ch := range s { if isWhitespace(ch) { if tokStart >= 0 { r = append(r, s[tokStart:i]) tokStart = -1 } } else { if tokStart < 0 { tokStart = i } } } if tokStart >= 0 { r = append(r, s[tokStart:]) } glog.V(2).Infof("splitSpace(%q)=%q", s, r) return r } func splitSpacesBytes(s []byte) (r [][]byte) { tokStart := -1 for i, ch := range s { if isWhitespace(rune(ch)) { if tokStart >= 0 { r = append(r, s[tokStart:i]) tokStart = -1 } } else { if tokStart < 0 { tokStart = i } } } if tokStart >= 0 { r = append(r, s[tokStart:]) } glog.V(2).Infof("splitSpace(%q)=%q", s, r) return r } // TODO(ukai): use bufio.Scanner? type wordScanner struct { in []byte s int // word starts i int // current pos esc bool // handle \-escape } func newWordScanner(in []byte) *wordScanner { return &wordScanner{ in: in, } } func (ws *wordScanner) next() bool { for ws.s = ws.i; ws.s < len(ws.in); ws.s++ { if !wsbytes[ws.in[ws.s]] { break } } if ws.s == len(ws.in) { return false } return true } func (ws *wordScanner) Scan() bool { if !ws.next() { return false } for ws.i = ws.s; ws.i < len(ws.in); ws.i++ { if ws.esc && ws.in[ws.i] == '\\' { ws.i++ continue } if wsbytes[ws.in[ws.i]] { break } } return true } func (ws *wordScanner) Bytes() []byte { return ws.in[ws.s:ws.i] } func (ws *wordScanner) Remain() []byte { if !ws.next() { return nil } return ws.in[ws.s:] } func matchPattern(pat, str string) bool { i := strings.IndexByte(pat, '%') if i < 0 { return pat == str } return strings.HasPrefix(str, pat[:i]) && strings.HasSuffix(str, pat[i+1:]) } func matchPatternBytes(pat, str []byte) bool { i := bytes.IndexByte(pat, '%') if i < 0 { return bytes.Equal(pat, str) } return bytes.HasPrefix(str, pat[:i]) && bytes.HasSuffix(str, pat[i+1:]) } func substPattern(pat, repl, str string) string { ps := strings.SplitN(pat, "%", 2) if len(ps) != 2 { if str == pat { return repl } return str } in := str trimed := str if ps[0] != "" { trimed = strings.TrimPrefix(in, ps[0]) if trimed == in { return str } } in = trimed if ps[1] != "" { trimed = strings.TrimSuffix(in, ps[1]) if trimed == in { return str } } rs := strings.SplitN(repl, "%", 2) if len(rs) != 2 { return repl } return rs[0] + trimed + rs[1] } func substPatternBytes(pat, repl, str []byte) (pre, subst, post []byte) { i := bytes.IndexByte(pat, '%') if i < 0 { if bytes.Equal(str, pat) { return repl, nil, nil } return str, nil, nil } in := str trimed := str if i > 0 { trimed = bytes.TrimPrefix(in, pat[:i]) if bytes.Equal(trimed, in) { return str, nil, nil } } in = trimed if i < len(pat)-1 { trimed = bytes.TrimSuffix(in, pat[i+1:]) if bytes.Equal(trimed, in) { return str, nil, nil } } i = bytes.IndexByte(repl, '%') if i < 0 { return repl, nil, nil } return repl[:i], trimed, repl[i+1:] } func substRef(pat, repl, str string) string { if strings.IndexByte(pat, '%') >= 0 && strings.IndexByte(repl, '%') >= 0 { return substPattern(pat, repl, str) } str = strings.TrimSuffix(str, pat) return str + repl } func stripExt(s string) string { suf := filepath.Ext(s) return s[:len(s)-len(suf)] } func trimLeftSpace(s string) string { for i, ch := range s { if !isWhitespace(ch) { return s[i:] } } return "" } func trimLeftSpaceBytes(s []byte) []byte { for i, ch := range s { if !isWhitespace(rune(ch)) { return s[i:] } } return nil } func trimRightSpaceBytes(s []byte) []byte { for i := len(s) - 1; i >= 0; i-- { ch := s[i] if !isWhitespace(rune(ch)) { return s[:i+1] } } return nil } func trimSpaceBytes(s []byte) []byte { s = trimLeftSpaceBytes(s) return trimRightSpaceBytes(s) } // Strip leading sequences of './' from file names, so that ./file // and file are considered to be the same file. // From http://www.gnu.org/software/make/manual/make.html#Features func trimLeadingCurdir(s string) string { for strings.HasPrefix(s, "./") { s = s[2:] } return s } func contains(list []string, s string) bool { for _, v := range list { if v == s { return true } } return false } func firstWord(line []byte) ([]byte, []byte) { s := newWordScanner(line) if s.Scan() { w := s.Bytes() return w, s.Remain() } return line, nil } type findCharOption int const ( noSkipVar findCharOption = iota skipVar ) func findLiteralChar(s []byte, stop1, stop2 byte, op findCharOption) int { i := 0 for { var ch byte for i < len(s) { ch = s[i] if ch == '\\' { i += 2 continue } if ch == stop1 { break } if ch == stop2 { break } if op == skipVar && ch == '$' { break } i++ } if i >= len(s) { return -1 } if ch == '$' { i++ if i == len(s) { return -1 } oparen := s[i] cparen := closeParen(oparen) i++ if cparen != 0 { pcount := 1 SkipParen: for i < len(s) { ch = s[i] switch ch { case oparen: pcount++ case cparen: pcount-- if pcount == 0 { i++ break SkipParen } } i++ } } continue } return i } } func removeComment(line []byte) ([]byte, bool) { var buf []byte for i := 0; i < len(line); i++ { if line[i] != '#' { continue } b := 1 for ; i-b >= 0; b++ { if line[i-b] != '\\' { break } } b++ nb := b / 2 quoted := b%2 == 1 if buf == nil { buf = make([]byte, len(line)) copy(buf, line) line = buf } line = append(line[:i-b+nb+1], line[i:]...) if !quoted { return line[:i-b+nb+1], true } i = i - nb + 1 } return line, false } // cmdline removes tab at the beginning of lines. func cmdline(line string) string { buf := []byte(line) for i := 0; i < len(buf); i++ { if buf[i] == '\n' && i+1 < len(buf) && buf[i+1] == '\t' { copy(buf[i+1:], buf[i+2:]) buf = buf[:len(buf)-1] } } return string(buf) } // concatline removes backslash newline. // TODO: backslash baskslash newline becomes backslash newline. func concatline(line []byte) []byte { var buf []byte for i := 0; i < len(line); i++ { if line[i] != '\\' { continue } if i+1 == len(line) { if line[i-1] != '\\' { line = line[:i] } break } if line[i+1] == '\n' { if buf == nil { buf = make([]byte, len(line)) copy(buf, line) line = buf } oline := trimRightSpaceBytes(line[:i]) oline = append(oline, ' ') nextline := trimLeftSpaceBytes(line[i+2:]) line = append(oline, nextline...) i = len(oline) - 1 continue } if i+2 < len(line) && line[i+1] == '\r' && line[i+2] == '\n' { if buf == nil { buf = make([]byte, len(line)) copy(buf, line) line = buf } oline := trimRightSpaceBytes(line[:i]) oline = append(oline, ' ') nextline := trimLeftSpaceBytes(line[i+3:]) line = append(oline, nextline...) i = len(oline) - 1 continue } } return line } strutil.h0100644 0000000 0000000 00000006756 13654546140 011512 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef STRUTIL_H_ #define STRUTIL_H_ #include #include #include "string_piece.h" using namespace std; class WordScanner { public: struct Iterator { Iterator& operator++(); StringPiece operator*() const; bool operator!=(const Iterator& r) const { return in != r.in || s != r.s || i != r.i; } const StringPiece* in; int s; int i; }; explicit WordScanner(StringPiece in); Iterator begin() const; Iterator end() const; void Split(vector* o); private: StringPiece in_; }; class WordWriter { public: explicit WordWriter(string* o); void MaybeAddWhitespace(); void Write(StringPiece s); private: string* out_; bool needs_space_; }; // Temporary modifies s[s.size()] to '\0'. class ScopedTerminator { public: explicit ScopedTerminator(StringPiece s); ~ScopedTerminator(); private: StringPiece s_; char c_; }; template inline string JoinStrings(vector v, const char* sep) { string r; for (StringPiece s : v) { if (!r.empty()) { r += sep; } r.append(s.begin(), s.end()); } return r; } void AppendString(StringPiece str, string* out); bool HasPrefix(StringPiece str, StringPiece prefix); bool HasSuffix(StringPiece str, StringPiece suffix); bool HasWord(StringPiece str, StringPiece w); StringPiece TrimPrefix(StringPiece str, StringPiece suffix); StringPiece TrimSuffix(StringPiece str, StringPiece suffix); class Pattern { public: explicit Pattern(StringPiece pat); bool Match(StringPiece str) const; StringPiece Stem(StringPiece str) const; void AppendSubst(StringPiece str, StringPiece subst, string* out) const; void AppendSubstRef(StringPiece str, StringPiece subst, string* out) const; private: bool MatchImpl(StringPiece str) const; StringPiece pat_; size_t percent_index_; }; string NoLineBreak(const string& s); StringPiece TrimLeftSpace(StringPiece s); StringPiece TrimRightSpace(StringPiece s); StringPiece TrimSpace(StringPiece s); StringPiece Dirname(StringPiece s); StringPiece Basename(StringPiece s); StringPiece GetExt(StringPiece s); StringPiece StripExt(StringPiece s); void NormalizePath(string* o); void AbsPath(StringPiece s, string* o); size_t FindOutsideParen(StringPiece s, char c); size_t FindTwoOutsideParen(StringPiece s, char c1, char c2); size_t FindThreeOutsideParen(StringPiece s, char c1, char c2, char c3); size_t FindEndOfLine(StringPiece s, size_t e, size_t* lf_cnt); // Strip leading sequences of './' from file names, so that ./file // and file are considered to be the same file. // From http://www.gnu.org/software/make/manual/make.html#Features StringPiece TrimLeadingCurdir(StringPiece s); void FormatForCommandSubstitution(string* s); string SortWordsInString(StringPiece s); string ConcatDir(StringPiece b, StringPiece n); string EchoEscape(const string& str); void EscapeShell(string* s); #endif // STRUTIL_H_ strutil_bench.cc0100644 0000000 0000000 00000002155 13654546140 012774 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include #include #include "flags.h" #include "string_piece.h" #include "strutil.h" #include "timeutil.h" using namespace std; int main() { g_flags.enable_stat_logs = true; string s; while (s.size() < 400000) { if (!s.empty()) s += ' '; s += "frameworks/base/docs/html/tv/adt-1/index.jd"; } ScopedTimeReporter tr("WordScanner"); static const int N = 1000; for (int i = 0; i < N; i++) { vector toks; WordScanner(s).Split(&toks); } } strutil_test.cc0100644 0000000 0000000 00000014302 13654546140 012671 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "strutil.h" #include #include #include #include #include #include "string_piece.h" #include "testutil.h" using namespace std; namespace { void TestWordScanner() { vector ss; for (StringPiece tok : WordScanner("foo bar baz hogeeeeeeeeeeeeeeee")) { ss.push_back(tok); } assert(ss.size() == 4LU); ASSERT_EQ(ss[0], "foo"); ASSERT_EQ(ss[1], "bar"); ASSERT_EQ(ss[2], "baz"); ASSERT_EQ(ss[3], "hogeeeeeeeeeeeeeeee"); } void TestHasPrefix() { assert(HasPrefix("foo", "foo")); assert(HasPrefix("foo", "fo")); assert(HasPrefix("foo", "")); assert(!HasPrefix("foo", "fooo")); } void TestHasSuffix() { assert(HasSuffix("bar", "bar")); assert(HasSuffix("bar", "ar")); assert(HasSuffix("bar", "")); assert(!HasSuffix("bar", "bbar")); } void TestTrimPrefix() { ASSERT_EQ(TrimPrefix("foo", "foo"), ""); ASSERT_EQ(TrimPrefix("foo", "fo"), "o"); ASSERT_EQ(TrimPrefix("foo", ""), "foo"); ASSERT_EQ(TrimPrefix("foo", "fooo"), "foo"); } void TestTrimSuffix() { ASSERT_EQ(TrimSuffix("bar", "bar"), ""); ASSERT_EQ(TrimSuffix("bar", "ar"), "b"); ASSERT_EQ(TrimSuffix("bar", ""), "bar"); ASSERT_EQ(TrimSuffix("bar", "bbar"), "bar"); } string SubstPattern(StringPiece str, StringPiece pat, StringPiece subst) { string r; Pattern(pat).AppendSubst(str, subst, &r); return r; } void TestSubstPattern() { ASSERT_EQ(SubstPattern("x.c", "%.c", "%.o"), "x.o"); ASSERT_EQ(SubstPattern("c.x", "c.%", "o.%"), "o.x"); ASSERT_EQ(SubstPattern("x.c.c", "%.c", "%.o"), "x.c.o"); ASSERT_EQ(SubstPattern("x.x y.c", "%.c", "%.o"), "x.x y.o"); ASSERT_EQ(SubstPattern("x.%.c", "%.%.c", "OK"), "OK"); ASSERT_EQ(SubstPattern("x.c", "x.c", "OK"), "OK"); ASSERT_EQ(SubstPattern("x.c.c", "x.c", "XX"), "x.c.c"); ASSERT_EQ(SubstPattern("x.x.c", "x.c", "XX"), "x.x.c"); } void TestNoLineBreak() { assert(NoLineBreak("a\nb") == "a\\nb"); assert(NoLineBreak("a\nb\nc") == "a\\nb\\nc"); } void TestHasWord() { assert(HasWord("foo bar baz", "bar")); assert(HasWord("foo bar baz", "foo")); assert(HasWord("foo bar baz", "baz")); assert(!HasWord("foo bar baz", "oo")); assert(!HasWord("foo bar baz", "ar")); assert(!HasWord("foo bar baz", "ba")); assert(!HasWord("foo bar baz", "az")); assert(!HasWord("foo bar baz", "ba")); assert(!HasWord("foo bar baz", "fo")); } static string NormalizePath(string s) { ::NormalizePath(&s); return s; } void TestNormalizePath() { ASSERT_EQ(NormalizePath(""), ""); ASSERT_EQ(NormalizePath("."), ""); ASSERT_EQ(NormalizePath("/"), "/"); ASSERT_EQ(NormalizePath("/tmp"), "/tmp"); ASSERT_EQ(NormalizePath("////tmp////"), "/tmp"); ASSERT_EQ(NormalizePath("a////b"), "a/b"); ASSERT_EQ(NormalizePath("a//.//b"), "a/b"); ASSERT_EQ(NormalizePath("a////b//../c/////"), "a/c"); ASSERT_EQ(NormalizePath("../foo"), "../foo"); ASSERT_EQ(NormalizePath("./foo"), "foo"); ASSERT_EQ(NormalizePath("x/y/..//../foo"), "foo"); ASSERT_EQ(NormalizePath("x/../../foo"), "../foo"); ASSERT_EQ(NormalizePath("/../foo"), "/foo"); ASSERT_EQ(NormalizePath("/../../foo"), "/foo"); ASSERT_EQ(NormalizePath("/a/../../foo"), "/foo"); ASSERT_EQ(NormalizePath("/a/b/.."), "/a"); ASSERT_EQ(NormalizePath("../../a/b"), "../../a/b"); ASSERT_EQ(NormalizePath("../../../a/b"), "../../../a/b"); ASSERT_EQ(NormalizePath(".././../a/b"), "../../a/b"); ASSERT_EQ(NormalizePath("./../../a/b"), "../../a/b"); } string EscapeShell(string s) { ::EscapeShell(&s); return s; } void TestEscapeShell() { ASSERT_EQ(EscapeShell(""), ""); ASSERT_EQ(EscapeShell("foo"), "foo"); ASSERT_EQ(EscapeShell("foo$`\\baz\"bar"), "foo\\$\\`\\\\baz\\\"bar"); ASSERT_EQ(EscapeShell("$$"), "\\$$"); ASSERT_EQ(EscapeShell("$$$"), "\\$$\\$"); ASSERT_EQ(EscapeShell("\\\n"), "\\\\\n"); } void TestFindEndOfLine() { size_t lf_cnt = 0; ASSERT_EQ(FindEndOfLine("foo", 0, &lf_cnt), 3); char buf[10] = {'f', 'o', '\\', '\0', 'x', 'y'}; ASSERT_EQ(FindEndOfLine(StringPiece(buf, 6), 0, &lf_cnt), 3); ASSERT_EQ(FindEndOfLine(StringPiece(buf, 2), 0, &lf_cnt), 2); } // Take a string, and copy it into an allocated buffer where // the byte immediately after the null termination character // is read protected. Useful for testing, but doesn't support // freeing the allocated pages. const char* CreateProtectedString(const char* str) { int pagesize = sysconf(_SC_PAGE_SIZE); void* buffer; char* buffer_str; // Allocate two pages of memory if (posix_memalign(&buffer, pagesize, pagesize * 2) != 0) { perror("posix_memalign failed"); assert(false); } // Make the second page unreadable buffer_str = (char*)buffer + pagesize; if (mprotect(buffer_str, pagesize, PROT_NONE) != 0) { perror("mprotect failed"); assert(false); } // Then move the test string into the very end of the first page buffer_str -= strlen(str) + 1; strcpy(buffer_str, str); return buffer_str; } void TestWordScannerInvalidAccess() { vector ss; for (StringPiece tok : WordScanner(CreateProtectedString("0123 456789"))) { ss.push_back(tok); } assert(ss.size() == 2LU); ASSERT_EQ(ss[0], "0123"); ASSERT_EQ(ss[1], "456789"); } void TestFindEndOfLineInvalidAccess() { size_t lf_cnt = 0; ASSERT_EQ(FindEndOfLine(CreateProtectedString("a\\"), 0, &lf_cnt), 2); } } // namespace int main() { TestWordScanner(); TestHasPrefix(); TestHasSuffix(); TestTrimPrefix(); TestTrimSuffix(); TestSubstPattern(); TestNoLineBreak(); TestHasWord(); TestNormalizePath(); TestEscapeShell(); TestFindEndOfLine(); TestWordScannerInvalidAccess(); TestFindEndOfLineInvalidAccess(); assert(!g_failed); } strutil_test.go0100644 0000000 0000000 00000011006 13654546140 012707 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "fmt" "reflect" "testing" ) func TestSplitSpaces(t *testing.T) { for _, tc := range []struct { in string want []string }{ { in: "foo", want: []string{"foo"}, }, { in: " ", want: nil, }, { in: " foo bar ", want: []string{"foo", "bar"}, }, { in: " foo bar", want: []string{"foo", "bar"}, }, { in: "foo bar ", want: []string{"foo", "bar"}, }, } { got := splitSpaces(tc.in) if !reflect.DeepEqual(got, tc.want) { t.Errorf(`splitSpaces(%q)=%q, want %q`, tc.in, got, tc.want) } } } func TestWordScanner(t *testing.T) { for _, tc := range []struct { in string want []string }{ { in: "foo", want: []string{"foo"}, }, { in: " ", want: nil, }, { in: " foo bar ", want: []string{"foo", "bar"}, }, { in: " foo bar", want: []string{"foo", "bar"}, }, { in: "foo bar ", want: []string{"foo", "bar"}, }, } { ws := newWordScanner([]byte(tc.in)) var got []string for ws.Scan() { got = append(got, string(ws.Bytes())) } if !reflect.DeepEqual(got, tc.want) { t.Errorf(`wordScanner(%q)=%q, want %q`, tc.in, got, tc.want) } } } func TestSubstPattern(t *testing.T) { concatStr := func(pre, subst, post []byte) string { var s []byte s = append(s, pre...) s = append(s, subst...) s = append(s, post...) return string(s) } for _, tc := range []struct { pat string repl string in string want string }{ { pat: "%.c", repl: "%.o", in: "x.c", want: "x.o", }, { pat: "c.%", repl: "o.%", in: "c.x", want: "o.x", }, { pat: "%.c", repl: "%.o", in: "x.c.c", want: "x.c.o", }, { pat: "%.c", repl: "%.o", in: "x.x y.c", want: "x.x y.o", }, { pat: "%.%.c", repl: "OK", in: "x.%.c", want: "OK", }, { pat: "x.c", repl: "XX", in: "x.c", want: "XX", }, { pat: "x.c", repl: "XX", in: "x.c.c", want: "x.c.c", }, { pat: "x.c", repl: "XX", in: "x.x.c", want: "x.x.c", }, } { got := substPattern(tc.pat, tc.repl, tc.in) if got != tc.want { t.Errorf(`substPattern(%q,%q,%q)=%q, want %q`, tc.pat, tc.repl, tc.in, got, tc.want) } got = concatStr(substPatternBytes([]byte(tc.pat), []byte(tc.repl), []byte(tc.in))) if got != tc.want { fmt.Printf("substPatternBytes(%q,%q,%q)=%q, want %q\n", tc.pat, tc.repl, tc.in, got, tc.want) t.Errorf(`substPatternBytes(%q,%q,%q)=%q, want %q`, tc.pat, tc.repl, tc.in, got, tc.want) } } } func TestRemoveComment(t *testing.T) { for _, tc := range []struct { in string want string removed bool }{ { in: "foo", want: "foo", }, { in: "foo #bar", want: "foo ", removed: true, }, { in: `foo \#bar`, want: "foo #bar", }, { in: `foo \#bar # baz`, want: `foo #bar `, removed: true, }, { in: `foo \\ \# \: \; \% \= \a \? \+`, want: `foo \\ # \: \; \% \= \a \? \+`, }, { in: `foo \\#bar`, want: `foo \`, removed: true, }, { in: `foo \\\#bar`, want: `foo \#bar`, }, { in: `PASS:=\#PASS`, want: `PASS:=#PASS`, }, } { got, removed := removeComment([]byte(tc.in)) if string(got) != tc.want { t.Errorf("removeComment(%q)=%q, _; want=%q, _", tc.in, got, tc.want) } if removed != tc.removed { t.Errorf("removeComment(%q)=_, %t; want=_, %t", tc.in, removed, tc.removed) } } } func TestConcatline(t *testing.T) { for _, tc := range []struct { in string want string }{ { in: "foo", want: "foo", }, { in: "foo \\\n\t bar", want: "foo bar", }, { in: "foo \\\n \\\n\t bar", want: "foo bar", }, { in: `foo \`, want: `foo `, }, { in: `foo \\`, want: `foo \\`, }, } { got := string(concatline([]byte(tc.in))) if got != tc.want { t.Errorf("concatline(%q)=%q; want=%q\n", tc.in, got, tc.want) } } } symtab.cc0100644 0000000 0000000 00000010413 13654546140 011422 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore //#define ENABLE_TID_CHECK #include "symtab.h" #ifdef ENABLE_TID_CHECK #include #endif #include #include #include "log.h" #include "strutil.h" #include "var.h" struct SymbolData { SymbolData() : gv(Var::Undefined()) {} Var* gv; }; vector* g_symbols; static vector g_symbol_data; Symbol kEmptySym; Symbol kShellSym; Symbol kKatiReadonlySym; Symbol::Symbol(int v) : v_(v) {} Var* Symbol::PeekGlobalVar() const { if (static_cast(v_) >= g_symbol_data.size()) { return Var::Undefined(); } return g_symbol_data[v_].gv; } Var* Symbol::GetGlobalVar() const { if (static_cast(v_) >= g_symbol_data.size()) { g_symbol_data.resize(v_ + 1); } Var* v = g_symbol_data[v_].gv; if (v->Origin() == VarOrigin::ENVIRONMENT || v->Origin() == VarOrigin::ENVIRONMENT_OVERRIDE) { Vars::add_used_env_vars(*this); } return v; } void Symbol::SetGlobalVar(Var* v, bool is_override, bool* readonly) const { if (static_cast(v_) >= g_symbol_data.size()) { g_symbol_data.resize(v_ + 1); } Var* orig = g_symbol_data[v_].gv; if (orig->ReadOnly()) { if (readonly != nullptr) *readonly = true; else ERROR("*** cannot assign to readonly variable: %s", c_str()); return; } else if (readonly != nullptr) { *readonly = false; } if (!is_override && (orig->Origin() == VarOrigin::OVERRIDE || orig->Origin() == VarOrigin::ENVIRONMENT_OVERRIDE)) { return; } if (orig->Origin() == VarOrigin::COMMAND_LINE && v->Origin() == VarOrigin::FILE) { return; } if (orig->Origin() == VarOrigin::AUTOMATIC) { ERROR("overriding automatic variable is not implemented yet"); } if (orig->IsDefined()) delete orig; g_symbol_data[v_].gv = v; } ScopedGlobalVar::ScopedGlobalVar(Symbol name, Var* var) : name_(name), orig_(NULL) { orig_ = name.GetGlobalVar(); g_symbol_data[name_.val()].gv = var; } ScopedGlobalVar::~ScopedGlobalVar() { g_symbol_data[name_.val()].gv = orig_; } class Symtab { public: Symtab() { #ifdef ENABLE_TID_CHECK tid_ = pthread_self(); #endif CHECK(g_symbols == NULL); g_symbols = &symbols_; Symbol s = InternImpl(""); CHECK(s.v_ == 0); CHECK(Intern("") == s); char b[2]; b[1] = 0; for (int i = 1; i < 256; i++) { b[0] = i; s = InternImpl(b); CHECK(s.val() == i); } kEmptySym = Intern(""); kShellSym = Intern("SHELL"); kKatiReadonlySym = Intern(".KATI_READONLY"); } ~Symtab() { LOG_STAT("%zu symbols", symbols_.size()); for (string* s : symbols_) delete s; } Symbol InternImpl(StringPiece s) { auto found = symtab_.find(s); if (found != symtab_.end()) { return found->second; } symbols_.push_back(new string(s.data(), s.size())); Symbol sym = Symbol(symtab_.size()); bool ok = symtab_.emplace(*symbols_.back(), sym).second; CHECK(ok); return sym; } Symbol Intern(StringPiece s) { #ifdef ENABLE_TID_CHECK if (tid_ != pthread_self()) abort(); #endif if (s.size() <= 1) { return Symbol(s.empty() ? 0 : (unsigned char)s[0]); } return InternImpl(s); } private: unordered_map symtab_; vector symbols_; #ifdef ENABLE_TID_CHECK pthread_t tid_; #endif }; static Symtab* g_symtab; void InitSymtab() { g_symtab = new Symtab; } void QuitSymtab() { delete g_symtab; } Symbol Intern(StringPiece s) { return g_symtab->Intern(s); } string JoinSymbols(const vector& syms, const char* sep) { vector strs; for (Symbol s : syms) { strs.push_back(s.str()); } return JoinStrings(strs, sep); } symtab.go0100644 0000000 0000000 00000001731 13654546140 011445 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import "sync" type symtabT struct { mu sync.Mutex m map[string]string } var symtab = &symtabT{ m: make(map[string]string), } func intern(s string) string { symtab.mu.Lock() v, ok := symtab.m[s] if ok { symtab.mu.Unlock() return v } symtab.m[s] = s symtab.mu.Unlock() return s } func internBytes(s []byte) string { return intern(string(s)) } symtab.h0100644 0000000 0000000 00000012443 13654546140 011271 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef SYMTAB_H_ #define SYMTAB_H_ #include #include #include #include "string_piece.h" using namespace std; extern vector* g_symbols; class Symtab; class Var; class Symbol { public: explicit Symbol() : v_(-1) {} const string& str() const { return *((*g_symbols)[v_]); } const char* c_str() const { return str().c_str(); } bool empty() const { return !v_; } int val() const { return v_; } char get(size_t i) const { const string& s = str(); if (i >= s.size()) return 0; return s[i]; } bool IsValid() const { return v_ >= 0; } Var* PeekGlobalVar() const; Var* GetGlobalVar() const; void SetGlobalVar(Var* v, bool is_override = false, bool* readonly = nullptr) const; private: explicit Symbol(int v); int v_; friend class Symtab; friend class SymbolSet; }; /* A set of symbols represented as bitmap indexed by Symbol's ordinal value. */ class SymbolSet { public: SymbolSet() : low_(0), high_(0) {} /* Returns true if Symbol belongs to this set. */ bool exists(Symbol sym) const { size_t bit_nr = static_cast(sym.val()); return sym.IsValid() && bit_nr >= low_ && bit_nr < high_ && bits_[(bit_nr - low_) / 64][(bit_nr - low_) % 64]; } /* Adds Symbol to this set. */ void insert(Symbol sym) { if (!sym.IsValid()) { return; } size_t bit_nr = static_cast(sym.val()); if (bit_nr < low_ || bit_nr >= high_) { resize(bit_nr); } bits_[(bit_nr - low_) / 64][(bit_nr - low_) % 64] = true; } /* Returns the number of Symbol's in this set. */ size_t size() const { size_t n = 0; for (auto const& bitset : bits_) { n += bitset.count(); } return n; } /* Allow using foreach. * E.g., * SymbolSet symbol_set; * for (auto const& symbol: symbol_set) { ... } */ class iterator { const SymbolSet* bitset_; size_t pos_; iterator(const SymbolSet* bitset, size_t pos) : bitset_(bitset), pos_(pos) {} /* Proceed to the next Symbol. */ void next() { size_t bit_nr = (pos_ > bitset_->low_) ? pos_ - bitset_->low_ : 0; while (bit_nr < (bitset_->high_ - bitset_->low_)) { if ((bit_nr % 64) == 0 && !bitset_->bits_[bit_nr / 64].any()) { bit_nr += 64; continue; } if (bitset_->bits_[bit_nr / 64][bit_nr % 64]) { break; } ++bit_nr; } pos_ = bitset_->low_ + bit_nr; } public: iterator& operator++() { if (pos_ < bitset_->high_) { ++pos_; next(); } return *this; } bool operator==(iterator other) const { return bitset_ == other.bitset_ && pos_ == other.pos_; } bool operator!=(iterator other) const { return !(*this == other); } Symbol operator*() { return Symbol(pos_); } friend class SymbolSet; }; iterator begin() const { iterator it(this, low_); it.next(); return it; } iterator end() const { return iterator(this, high_); } private: friend class iterator; /* Ensure that given bit number is in [low_, high_) */ void resize(size_t bit_nr) { size_t new_low = bit_nr & ~63; size_t new_high = (bit_nr + 64) & ~63; if (bits_.empty()) { high_ = low_ = new_low; } if (new_low > low_) { new_low = low_; } if (new_high <= high_) { new_high = high_; } if (new_low == low_) { bits_.resize((new_high - new_low) / 64); } else { std::vector > newbits((new_high - new_low) / 64); std::copy(bits_.begin(), bits_.end(), newbits.begin() + (low_ - new_low) / 64); bits_.swap(newbits); } low_ = new_low; high_ = new_high; } /* Keep only the (aligned) range where at least one bit has been set. * E.g., if we only ever set bits 65 and 141, |low_| will be 64, |high_| * will be 192, and |bits_| will have 2 elements. */ size_t low_; size_t high_; std::vector > bits_; }; class ScopedGlobalVar { public: ScopedGlobalVar(Symbol name, Var* var); ~ScopedGlobalVar(); private: Symbol name_; Var* orig_; }; inline bool operator==(const Symbol& x, const Symbol& y) { return x.val() == y.val(); } inline bool operator<(const Symbol& x, const Symbol& y) { return x.val() < y.val(); } namespace std { template <> struct hash { size_t operator()(const Symbol& s) const { return s.val(); } }; } // namespace std extern Symbol kEmptySym; extern Symbol kShellSym; extern Symbol kKatiReadonlySym; void InitSymtab(); void QuitSymtab(); Symbol Intern(StringPiece s); string JoinSymbols(const vector& syms, const char* sep); #endif // SYMTAB_H_ testcase/0040755 0000000 0000000 00000000000 13654546140 011433 5ustar000000000 0000000 testcase/abspath.mk0100644 0000000 0000000 00000000176 13654546140 013407 0ustar000000000 0000000 foo = $(abspath ./foo bar/../foo bar//..//foo / /usr) bar = $(abspath .. ./. ./ /aa/.. a///) test: echo $(foo) echo $(bar) testcase/addprefix.mk0100644 0000000 0000000 00000000046 13654546140 013727 0ustar000000000 0000000 test: echo $(addprefix src/,foo bar) testcase/addsuffix.mk0100644 0000000 0000000 00000000044 13654546140 013734 0ustar000000000 0000000 test: echo $(addsuffix .c,foo bar) testcase/and.mk0100644 0000000 0000000 00000000460 13654546140 012523 0ustar000000000 0000000 TRUE:=foo FALSE:= XY:=x y X:=$(subst y, ,$(XY)) Y:=$(subst x, ,$(XY)) $(and ${TRUE}, $(info PASS_1)) $(and ${FALSE}, $(info FAIL_2)) # Too many arguments. $(info $(and ${TRUE}, PASS, PASS)) $(info $(and ${TRUE}, $(X) )) $(info $(and ${TRUE}, $(Y) )) $(and ${FALSE} , $(info FAIL_3)) test: echo OK testcase/append_self_reference.mk0100644 0000000 0000000 00000000143 13654546140 016255 0ustar000000000 0000000 x := one x += two $(x) $(info $(x)) # TODO: shouldn't crash. #y = one #y += two $(y) #$(info $(y))testcase/assign_after_tab.mk0100644 0000000 0000000 00000000122 13654546140 015247 0ustar000000000 0000000 # This is an assignment. X=Y FOO=test: # But this is a recipe. $(FOO) X=$(X) testcase/assign_in_parens.mk0100644 0000000 0000000 00000000076 13654546140 015306 0ustar000000000 0000000 $(foreach varname,x,$(eval $(varname)=PASS)) test: echo $(x) testcase/assign_types.mk0100644 0000000 0000000 00000000134 13654546140 014467 0ustar000000000 0000000 A = a B = $(A) C := $(A) A = aa D = b D += b E ?= c E ?= d test: echo $(B) $(C) $(D) $(E) testcase/assign_with_trailing_space.mk0100644 0000000 0000000 00000000232 13654546140 017341 0ustar000000000 0000000 a := Y # comment $(info X$(a)Z) a := Y $(info X$(a)Z) a := Y $(info X$(a)Z) sp := $(subst S, ,S) a := Y$(sp) $(info X$(a)Z) a := $(sp)Y $(info X$(a)Z) testcase/auto_var_suffixes.mk0100644 0000000 0000000 00000000555 13654546140 015522 0ustar000000000 0000000 test1: mkdir adir bdir touch adir/afile bdir/bfile afile bfile test2: tdir/tfile tfile tdir/tfile: adir/afile bdir/bfile echo $(@D) echo $(@F) echo $( $@ testcase/basic_rule.mk0100644 0000000 0000000 00000000032 13654546140 014064 0ustar000000000 0000000 test: foo foo: echo foo testcase/basic_var.mk0100644 0000000 0000000 00000000034 13654546140 013707 0ustar000000000 0000000 VAR=var test: echo $(VAR) testcase/blank_line_before_command.mk0100644 0000000 0000000 00000000161 13654546140 017075 0ustar000000000 0000000 # TODO(c|go-ninja): "include: Command not found" should come before "*** [test] Error 127." test: include foo testcase/build_once.mk0100644 0000000 0000000 00000000555 13654546140 014071 0ustar000000000 0000000 # expect protoc compile/link only once. test: foo foo: foo.o bar.o echo link $@ from $< %.o: %.c FORCE_DO_CMD echo compile $@ from $< .PHONY: FORCE_DO_CMD FORCE_DO_CMD: foo.c: | protoc foo.c: foo.proto echo protoc $@ from $< foo.proto: bar.c: | protoc bar.c: bar.proto echo protoc $@ from $< bar.proto: protoc: proto.o echo link $@ from $< proto.c: testcase/builtin_rules.mk0100644 0000000 0000000 00000000207 13654546140 014640 0ustar000000000 0000000 CFLAGS:=-g CXXFLAGS:=-O TARGET_ARCH:=-O2 CPPFLAGS:=-S test1: touch foo.c bar.cc test2: foo.o bar.o # TODO: Add more builtin rules. testcase/builtin_vars.mk0100644 0000000 0000000 00000000116 13654546140 014460 0ustar000000000 0000000 test: echo $(CC) echo $(CXX) echo $(SHELL) # TODO: Add more builtin vars. testcase/call.mk0100644 0000000 0000000 00000000527 13654546140 012700 0ustar000000000 0000000 # from gyp-generated Makefile empty := space := $(empty) $(empty) replace_spaces = $(subst $(space),?,$1) unreplace_spaces = $(subst ?,$(space),$1) dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) test: foo echo $(call dirx,foo/bar) echo $(call dirx,foo bar/baz quux) echo $(call dirx,foo,bar) foo: mkdir foo "foo bar" testcase/call_with_many_args.mk0100644 0000000 0000000 00000000136 13654546140 015767 0ustar000000000 0000000 define func $(11)$(12)$(13)$(14) endef test: echo $(call func,1,2,3,4,5,6,7,8,9,10,P,A,S,S) testcase/call_with_whitespace.mk0100644 0000000 0000000 00000000217 13654546140 016143 0ustar000000000 0000000 func = $(info called with '$(1)') test = $(call $(1),$(1)) $(call test,func) $(call test, func) $(call test,func ) $(call test, func ) test: testcase/canned_recipes.mk0100644 0000000 0000000 00000000273 13654546140 014725 0ustar000000000 0000000 # http://www.gnu.org/software/make/manual/make.html#Canned-Recipes # canned recipes are used in gyp-generated Makefile (fixup_dep etc) define run-echo echo $@ endef test: $(run-echo) testcase/circular_dep.mk0100644 0000000 0000000 00000000326 13654546140 014416 0ustar000000000 0000000 # TODO(ninja): Fix? test: self loop not_circular1 not_circular2 echo PASS self: self echo $@ loop: loop1 echo $@ loop1: loop2 echo $@ loop2: loop echo $@ not_circular1: Makefile not_circular2: Makefile testcase/close_paren_without_open.mk0100644 0000000 0000000 00000000032 13654546140 017052 0ustar000000000 0000000 $(info }#) test: echo OK testcase/cmdline_var.sh0100755 0000000 0000000 00000001316 13654546140 014253 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat < Makefile CLVAR := FAIL all: @echo \$(CLVAR) EOF ${mk} CLVAR:=PASS 2> /dev/null testcase/cmdline_var_makeflags.sh0100755 0000000 0000000 00000001671 13654546140 016271 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat < Makefile CLVAR := FAIL MFVAR := FAIL FILEVAR := PASS all: @echo \$(ENVVAR) \$(origin ENVVAR) @echo \$(MFVAR) \$(origin MFVAR) @echo \$(CLVAR) \$(origin CLVAR) @echo \$(FILEVAR) \$(origin FILEVAR) EOF export ENVVAR=PASS export FILEVAR=FAIL export MAKEFLAGS="MFVAR=PASS CLVAR=FAIL" ${mk} CLVAR=PASS 2> /dev/null testcase/cmdline_var_modify.sh0100755 0000000 0000000 00000001352 13654546140 015622 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat < Makefile CLVAR := FAIL all: @echo \$(CLVAR) EOF ${mk} CLVAR:=P CLVAR+=A CLVAR+=SS CLVAR?=FAIL 2> /dev/null testcase/cmdline_var_override.sh0100755 0000000 0000000 00000001327 13654546140 016154 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat < Makefile override CLVAR := PASS all: @echo \$(CLVAR) EOF ${mk} CLVAR:=FAIL 2> /dev/null testcase/colon_ws_in_file.mk0100644 0000000 0000000 00000000056 13654546140 015272 0ustar000000000 0000000 # TODO(c): Fix test: a\ b a\:b a%: echo $@ testcase/colon_ws_in_target.mk0100644 0000000 0000000 00000000134 13654546140 015636 0ustar000000000 0000000 # TODO(c): Fix test: a\ b echo $@ / $< a\ b: a\:b echo $@ / $< a\\\:b: echo a\\\:b $@ testcase/command_vars.mk0100644 0000000 0000000 00000000076 13654546140 014435 0ustar000000000 0000000 test: foo foo: bar baz echo $@ echo $< echo $^ bar: baz: testcase/comment.mk0100644 0000000 0000000 00000000111 13654546140 013414 0ustar000000000 0000000 FOO=OK # A comment # A multiline comment \ FOO=fail test: echo $(FOO) testcase/comment_in_command.mk0100644 0000000 0000000 00000000465 13654546140 015614 0ustar000000000 0000000 MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') test1: # foo echo PASS test2: make$(MAKEVER) make4: # foo \ echo PASS make3: # foo \ echo PASS test3: $(shell echo foo #) test4: echo $(shell echo OK # FAIL \ FAIL2) test5: echo $(shell echo $$(echo PASS)) foo: echo OK testcase/comment_in_define.mk0100644 0000000 0000000 00000000110 13654546140 015413 0ustar000000000 0000000 define comment # PASS endef a:=$(comment) foo: $(comment) echo $(a) testcase/cond_syntax.mk0100644 0000000 0000000 00000001632 13654546140 014314 0ustar000000000 0000000 VAR=var VARREF=VAR EMPTY= UNDEFREF=UNDEFINED RESULT= ifdef VAR RESULT += PASS endif ifdef VAR RESULT += PASS else RESULT += FAIL endif ifdef $(VARREF) RESULT += PASS else RESULT += FAIL endif ifdef UNDEFINED RESULT += FAIL else RESULT += PASS endif ifdef $(UNDEFREF) RESULT += FAIL else RESULT += PASS endif ifdef EMPTY RESULT += FAIL else RESULT += PASS endif ifndef VAR RESULT += FAIL else RESULT += PASS endif ifndef $(VARREF) RESULT += FAIL else RESULT += PASS endif ifndef UNDEFINED RESULT += PASS else RESULT += FAIL endif ifndef $(UNDEFREF) RESULT += PASS else RESULT += FAIL endif ifeq ($(VAR),var) RESULT += PASS else RESULT += FAIL endif ifneq ($(VAR),var) RESULT += FAIL else RESULT += PASS endif ifeq ($(UNDEFINED),) RESULT += PASS else RESULT += FAIL endif ifeq (,$(UNDEFINED)) RESULT += PASS else RESULT += FAIL endif ifeq ($(VAR), var) RESULT += PASS else RESULT += FAIL endif test: echo $(RESULT) testcase/crlf.mk0100644 0000000 0000000 00000000065 13654546140 012710 0ustar000000000 0000000 PASS := \ PASS \ PASS test: echo $(PASS) testcase/crlf_after_directive.mk0100644 0000000 0000000 00000000073 13654546140 016126 0ustar000000000 0000000 ifdef foo else $(info PASS) endif define foo endef testcase/curdir.mk0100644 0000000 0000000 00000000327 13654546140 013253 0ustar000000000 0000000 srcdir := . test: foo.o bar.o echo linking $@ from $< foo.o: $(srcdir)/foo.c echo compiling $@ from $< bar.o: $(srcdir)/bar.c echo compiling $@ from $< $(srcdir)/foo.c: echo source $@ bar.c: echo source $@ testcase/curdir_implicit_rule.mk0100644 0000000 0000000 00000000241 13654546140 016167 0ustar000000000 0000000 srcdir := . test: foo.o bar.o echo linking $@ from $< %.o: $(srcdir)/%.c echo compiling $@ from $< $(srcdir)/foo.c: echo source $@ bar.c: echo source $@ testcase/curdir_var.mk0100644 0000000 0000000 00000000026 13654546140 014117 0ustar000000000 0000000 test: echo $(CURDIR) testcase/default_rule.mk0100644 0000000 0000000 00000000041 13654546140 014427 0ustar000000000 0000000 abc: echo PASS def: echo FAIL testcase/define.mk0100644 0000000 0000000 00000000670 13654546140 013216 0ustar000000000 0000000 # http://www.gnu.org/software/make/manual/make.html#Multi_002dLine # Note: in make 4.x # define name = # ... # endef # # but in make 3.x # define name # ... # endef # i.e. no = needed after name. # make 3.x defines "name =" for make 4.x example. # TODO: should we provide flag to specify gnu make version? # note: in make 4.x, there is `undefine`. define two-lines echo foo echo $(bar) endef bar = xxx test: echo BEGIN $(two-lines) END testcase/define_newline.mk0100644 0000000 0000000 00000000124 13654546140 014731 0ustar000000000 0000000 define newline endef $(info This should have$(newline)two lines) test: echo OK testcase/define_verbatim.mk0100644 0000000 0000000 00000000155 13654546140 015105 0ustar000000000 0000000 define multiline for i in 1 2 3 PASS; do\ echo $$i; \ done endef test: echo "$(multiline)" $(multiline) testcase/define_with_comments.mk0100644 0000000 0000000 00000001136 13654546140 016154 0ustar000000000 0000000 define define_with_space PASS1 endef define define_with_comment # foo PASS2 endef define endef_with_comment PASS3 endef # boo define endef_with_not_comment PASS4 endef bar define endef_with_not_comment2 PASS5 endef baz define endef_with_not_endef endefPASS endef define with_immediate_comment#comment PASS6 endef # Note: for some reason, the following is an error. #endef#comment test: echo $(define_with_space) echo $(define_with_comment) echo $(endef_with_comment) echo $(endef_with_not_comment) echo $(endef_with_not_comment2) echo $(endef_with_not_endef) echo $(with_immediate_comment) testcase/delete_on_error.mk0100644 0000000 0000000 00000000127 13654546140 015130 0ustar000000000 0000000 # TODO: Fix for non-ninja mode. .DELETE_ON_ERROR: test: file file: touch $@ false testcase/deprecated_export.mk0100644 0000000 0000000 00000000467 13654546140 015471 0ustar000000000 0000000 # TODO(go): not implemented A := 1 B := 2 export A B $(KATI_deprecate_export Message) export C := ok unexport B ifndef KATI $(info Makefile:9: C: export has been deprecated. Message.) $(info Makefile:10: B: unexport has been deprecated. Message.) endif test: echo $$(A) echo $$(B) echo $$(C) echo Done testcase/deprecated_var.mk0100644 0000000 0000000 00000004000 13654546140 014723 0ustar000000000 0000000 # TODO(go): not implemented A := test $(KATI_deprecated_var A B C D) $(info Writing to an undefined deprecated variable) B := test ifndef KATI $(info Makefile:8: B has been deprecated.) endif $(info Reading from deprecated variables - set before/after/never the deprecation func) $(info Writing to an undefined deprecated variable) D := $(A)$(B)$(C) ifndef KATI $(info Makefile:15: A has been deprecated.) $(info Makefile:15: B has been deprecated.) $(info Makefile:15: C has been deprecated.) $(info Makefile:15: D has been deprecated.) endif $(info Writing to a reset deprecated variable) D += test ifndef KATI $(info Makefile:24: D has been deprecated.) endif $(info Using a custom message) $(KATI_deprecated_var E,Use X instead) E = $(C) ifndef KATI $(info Makefile:31: E has been deprecated. Use X instead.) endif $(info Expanding a recursive variable with an embedded deprecated variable) $(E) ifndef KATI $(info Makefile:37: E has been deprecated. Use X instead.) $(info Makefile:37: C has been deprecated.) endif $(info All of the previous variable references have been basic SymRefs, now check VarRefs) F = E G := $($(F)) ifndef KATI $(info Makefile:45: E has been deprecated. Use X instead.) $(info Makefile:45: C has been deprecated.) endif $(info And check VarSubst) G := $(C:%.o=%.c) ifndef KATI $(info Makefile:52: C has been deprecated.) endif $(info Deprecated variable used in a rule-specific variable) test: A := $(E) ifndef KATI $(info Makefile:58: E has been deprecated. Use X instead.) $(info Makefile:58: C has been deprecated.) # A hides the global A variable, so is not considered deprecated. endif $(info Deprecated variable used as a macro) A := $(call B) ifndef KATI $(info Makefile:66: B has been deprecated.) $(info Makefile:66: A has been deprecated.) endif $(info Deprecated variable used in an ifdef) ifdef C endif ifndef KATI $(info Makefile:73: C has been deprecated.) endif $(info Deprecated variable used in a rule) test: echo $(C)Done ifndef KATI $(info Makefile:81: C has been deprecated.) endif testcase/dir.mk0100644 0000000 0000000 00000000276 13654546140 012544 0ustar000000000 0000000 test: foo echo $(dir foo) echo $(dir foo,bar) echo $(dir .) echo $(dir ) echo $(dir src/foo.c hacks) echo $(dir hacks src/foo.c) echo $(dir /) echo $(dir /foo) foo: mkdir foo bar testcase/directive_after_tab.mk0100644 0000000 0000000 00000000065 13654546140 015747 0ustar000000000 0000000 ifndef UNDEF test: echo PASS DUMMY:= else endif testcase/dollar_in_file.mk0100644 0000000 0000000 00000000077 13654546140 014727 0ustar000000000 0000000 test: $$testfile ls *testfile $$testfile: touch \$$testfile testcase/dot_rule.mk0100644 0000000 0000000 00000000125 13654546140 013574 0ustar000000000 0000000 # Rules start with dots cannot be the first rule. .foo: echo FAIL test: echo PASS testcase/double_colon_rule.mk0100644 0000000 0000000 00000000172 13654546140 015454 0ustar000000000 0000000 test:: echo FOO test:: echo BAR test:: A=B # Merge a double colon rule with target specific variable is OK. test: A=B testcase/else_if.mk0100644 0000000 0000000 00000000250 13654546140 013364 0ustar000000000 0000000 VAR:=FAIL ifndef UNDEF else ifndef VAR else ifndef VAR else endif ifdef UNDEF else ifndef VAR else ifndef VAR else ifndef VAR else VAR:=PASS endif test: echo $(VAR)testcase/empty_line_in_define.mk0100644 0000000 0000000 00000000221 13654546140 016121 0ustar000000000 0000000 define foo echo foo endef define bar echo bar endef define baz echo baz echo baz endef test: $(foo) $(foo) $(bar) $(bar) $(baz) $(baz) testcase/empty_output.mk0100644 0000000 0000000 00000000037 13654546140 014537 0ustar000000000 0000000 : echo FAIL test: echo PASS testcase/empty_static_pattern.sh0100644 0000000 0000000 00000001611 13654546140 016225 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: @echo "PASS" list := \$(list): %.foo: %.bar cp \$< \$@ EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'PASS' else ${mk} --no_builtin_rules --werror_implicit_rules 2>&1 fi testcase/empty_target_specific_var.mk0100644 0000000 0000000 00000000136 13654546140 017202 0ustar000000000 0000000 # TODO(go): https://github.com/google/kati/issues/83 test: =foo var==foo $(var): echo PASS testcase/empty_target_specific_var2.mk0100644 0000000 0000000 00000000175 13654546140 017267 0ustar000000000 0000000 # TODO(go): https://github.com/google/kati/issues/83 define var VAR:=1 endef $(call var) eq_one:==1 $(eq_one): echo PASS testcase/empty_var_cmd.mk0100644 0000000 0000000 00000000044 13654546140 014610 0ustar000000000 0000000 empty:= test: $(empty) echo PASS testcase/envvar.mk0100644 0000000 0000000 00000000024 13654546140 013256 0ustar000000000 0000000 test: echo $(PATH) testcase/equal_and_semi_in_rule.mk0100644 0000000 0000000 00000000116 13654546140 016442 0ustar000000000 0000000 test: foo bar foo: A=echo ; echo PASS foo: echo $(A) bar: ; echo PASS=PASS testcase/equal_in_target.mk0100644 0000000 0000000 00000000204 13654546140 015120 0ustar000000000 0000000 # TODO(c) fix parser. no rule to make target "test"? TSV:=test: A=PASS A_EQ_B:=A=B EQ== $(TSV) test: A$(EQ)B $(A_EQ_B): echo $(A) testcase/err_both_colon.mk0100644 0000000 0000000 00000000041 13654546140 014752 0ustar000000000 0000000 test: echo FOO test:: echo BAR testcase/err_deprecated_var_already_deprecated.mk0100644 0000000 0000000 00000000247 13654546140 021465 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_deprecated_var A) $(KATI_deprecated_var A)$(or $(KATI),$(error Cannot call KATI_deprecated_var on already deprecated variable: A)) testcase/err_deprecated_var_already_obsolete.mk0100644 0000000 0000000 00000000243 13654546140 021175 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_obsolete_var A) $(KATI_deprecated_var A)$(or $(KATI),$(error Cannot call KATI_deprecated_var on already obsolete variable: A)) testcase/err_empty_output.mk0100644 0000000 0000000 00000000015 13654546140 015403 0ustar000000000 0000000 : echo FAIL testcase/err_empty_var_name.mk0100644 0000000 0000000 00000000006 13654546140 015633 0ustar000000000 0000000 :=foo testcase/err_error.mk0100644 0000000 0000000 00000000025 13654546140 013757 0ustar000000000 0000000 $(error foo) test: testcase/err_error_in_recipe.mk0100644 0000000 0000000 00000000024 13654546140 015773 0ustar000000000 0000000 test: $(error foo) testcase/err_export_override.mk0100644 0000000 0000000 00000000507 13654546140 016053 0ustar000000000 0000000 # TODO(c): Fix - "override export define A" is invalid "override" directive. # GNU make 4 accepts this syntax. Note kati doesn't agree with make 4 # either. MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') ifeq ($(MAKE)$(MAKEVER),make4) $(error test skipped) endif export override define A PASS_A endef testcase/err_extra_else.mk0100644 0000000 0000000 00000000005 13654546140 014757 0ustar000000000 0000000 else testcase/err_extra_endif.mk0100644 0000000 0000000 00000000006 13654546140 015115 0ustar000000000 0000000 endif testcase/err_ifdef_two_args.mk0100644 0000000 0000000 00000000041 13654546140 015606 0ustar000000000 0000000 # TODO(go): Fix ifdef a b endif testcase/err_ifdef_two_args2.mk0100644 0000000 0000000 00000000053 13654546140 015673 0ustar000000000 0000000 # TODO(go): Fix x := a b ifdef $(x) endif testcase/err_ifdef_with_leading_space.mk0100644 0000000 0000000 00000000127 13654546140 017577 0ustar000000000 0000000 # TODO(go): Fix B := $(subst S, ,Sa) ifdef $(B) $(info PASS) else $(error FAIL) endif testcase/err_include.mk0100644 0000000 0000000 00000000014 13654546140 014247 0ustar000000000 0000000 include foo testcase/err_invalid_else.mk0100644 0000000 0000000 00000000033 13654546140 015263 0ustar000000000 0000000 ifdef UNDEF else foo endif testcase/err_invalid_ifeq.mk0100644 0000000 0000000 00000000007 13654546140 015260 0ustar000000000 0000000 ifeq X testcase/err_invalid_ifeq2.mk0100644 0000000 0000000 00000000007 13654546140 015342 0ustar000000000 0000000 ifeq ( testcase/err_invalid_ifeq3.mk0100644 0000000 0000000 00000000053 13654546140 015344 0ustar000000000 0000000 ifeq "foo" "bar" "baz" else endif test: testcase/err_invalid_ifeq4.mk0100644 0000000 0000000 00000000013 13654546140 015341 0ustar000000000 0000000 ifeq endif testcase/err_invalid_ifeq5.mk0100644 0000000 0000000 00000000104 13654546140 015343 0ustar000000000 0000000 # TODO(c) fix error message ifeq (foo, bar) XXX else endif test: testcase/err_keyword_in_rule.mk0100644 0000000 0000000 00000000013 13654546140 016024 0ustar000000000 0000000 ifeq : foo testcase/err_missing_endef.mk0100644 0000000 0000000 00000000036 13654546140 015442 0ustar000000000 0000000 all: echo FAIL define foo xx testcase/err_missing_endif.mk0100644 0000000 0000000 00000000032 13654546140 015442 0ustar000000000 0000000 all: echo FAIL ifdef foo testcase/err_missing_sep.mk0100644 0000000 0000000 00000000020 13654546140 015141 0ustar000000000 0000000 test: foo bar testcase/err_no_rule.mk0100644 0000000 0000000 00000000016 13654546140 014271 0ustar000000000 0000000 test: missing testcase/err_no_target_commands.mk0100644 0000000 0000000 00000000006 13654546140 016470 0ustar000000000 0000000 all: testcase/err_no_target_commands2.mk0100644 0000000 0000000 00000000017 13654546140 016554 0ustar000000000 0000000 $(empty) all: testcase/err_obsolete_export.mk0100644 0000000 0000000 00000000225 13654546140 016045 0ustar000000000 0000000 # TODO(go): not implemented export A := ok $(KATI_obsolete_export Message) export B := fail $(or $(KATI),$(error B: export is obsolete. Message)) testcase/err_obsolete_var.mk0100644 0000000 0000000 00000000312 13654546140 015311 0ustar000000000 0000000 # TODO(go): not implemented # # We go into a lot more cases in deprecated_var.mk, and hope that if deprecated works, obsolete does too. $(KATI_obsolete_var A) $(A) $(or $(KATI),$(error A is obsolete)) testcase/err_obsolete_var_already_deprecated.mk0100644 0000000 0000000 00000000243 13654546140 021175 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_deprecated_var A) $(KATI_obsolete_var A)$(or $(KATI),$(error Cannot call KATI_obsolete_var on already deprecated variable: A)) testcase/err_obsolete_var_already_obsolete.mk0100644 0000000 0000000 00000000237 13654546140 020714 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_obsolete_var A) $(KATI_obsolete_var A)$(or $(KATI),$(error Cannot call KATI_obsolete_var on already obsolete variable: A)) testcase/err_obsolete_var_assign.mk0100644 0000000 0000000 00000000136 13654546140 016661 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_obsolete_var A) A := $(or $(KATI),$(error A is obsolete)) testcase/err_obsolete_var_msg.mk0100644 0000000 0000000 00000000173 13654546140 016164 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_obsolete_var A,Use Y instead) $(A) $(or $(KATI),$(error A is obsolete. Use Y instead)) testcase/err_obsolete_var_varref.mk0100644 0000000 0000000 00000000150 13654546140 016656 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_obsolete_var A) B := A $($(B)) $(or $(KATI),$(error A is obsolete)) testcase/err_obsolete_var_varsubst.mk0100644 0000000 0000000 00000000146 13654546140 017247 0ustar000000000 0000000 # TODO(go): not implemented $(KATI_obsolete_var A) $(A:%.o=%.c) $(or $(KATI),$(error A is obsolete)) testcase/err_override.mk0100644 0000000 0000000 00000000054 13654546140 014447 0ustar000000000 0000000 test: foo foo: echo FAIL foo: echo PASS testcase/err_override_export.mk0100644 0000000 0000000 00000000507 13654546140 016053 0ustar000000000 0000000 # TODO(c): Fix - "override export define A" is invalid "override" directive. # GNU make 4 accepts this syntax. Note kati doesn't agree with make 4 # either. MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') ifeq ($(MAKE)$(MAKEVER),make4) $(error test skipped) endif override export define A PASS_A endef testcase/err_pattern_rule_only.mk0100644 0000000 0000000 00000000046 13654546140 016376 0ustar000000000 0000000 .c.o: echo FAIL %.o: %.c echo FAIL testcase/err_semicolon.mk0100644 0000000 0000000 00000000054 13654546140 014620 0ustar000000000 0000000 # This is an error. See also semicolon.mk ; testcase/err_semicolon2.mk0100644 0000000 0000000 00000000004 13654546140 014675 0ustar000000000 0000000 a; testcase/err_semicolon3.mk0100644 0000000 0000000 00000000024 13654546140 014700 0ustar000000000 0000000 # TODO(go): Fix ; testcase/err_semicolon4.mk0100644 0000000 0000000 00000000003 13654546140 014676 0ustar000000000 0000000 ; testcase/err_semicolon_in_output.mk0100644 0000000 0000000 00000000010 13654546140 016716 0ustar000000000 0000000 foo ; : testcase/err_suffixes.mk0100644 0000000 0000000 00000000143 13654546140 014463 0ustar000000000 0000000 # TODO: Fix test1: touch a.src test2: a.out # This isn't in .SUFFIXES. .src.out: echo $< > $@ testcase/err_suffixes2.mk0100644 0000000 0000000 00000000073 13654546140 014547 0ustar000000000 0000000 # TODO(go): Fix test1: touch a.c test2: a.o .SUFFIXES: testcase/err_two_else.mk0100644 0000000 0000000 00000000032 13654546140 014445 0ustar000000000 0000000 ifdef VAR else else endif testcase/err_unmatched_endef.mk0100644 0000000 0000000 00000000135 13654546140 015741 0ustar000000000 0000000 define test1 # Typo below, endif instead of endef endif define test2 endef foo: echo FAIL testcase/err_unterminated_func.mk0100644 0000000 0000000 00000000012 13654546140 016334 0ustar000000000 0000000 $(info () testcase/err_unterminated_var.mk0100644 0000000 0000000 00000000004 13654546140 016172 0ustar000000000 0000000 $(i testcase/err_word_non_numeric.mk0100644 0000000 0000000 00000000040 13654546140 016172 0ustar000000000 0000000 test: echo $(word -1, foo bar) testcase/err_word_zero.mk0100644 0000000 0000000 00000000037 13654546140 014643 0ustar000000000 0000000 test: echo $(word 0, foo bar) testcase/escape_for_shell_in_recipe.mk0100644 0000000 0000000 00000000320 13654546140 017266 0ustar000000000 0000000 # TODO(ninja): The first testcase fails due to an extra escape. We # should be careful not to break the second case when we fix the first # case. test: echo $(shell echo \"" # "\") echo $$(echo \"" # "\") testcase/escaped_backslash.mk0100644 0000000 0000000 00000000324 13654546140 015377 0ustar000000000 0000000 no_comment:=\\ # FAIL two_backslash:=\\ foo test: echo $(no_comment) echo $(two_backslash) echo \\ echo \\ foo $(info echo $(no_comment)) $(info echo $(two_backslash)) $(info echo \\) $(info echo \\ foo) testcase/escaped_comment.mk0100644 0000000 0000000 00000000144 13654546140 015106 0ustar000000000 0000000 PASS:=\#PASS test1: echo $(PASS) test2: echo \# # define pass \#PASS endef test3: echo $(pass) testcase/escaped_wildcard.mk0100644 0000000 0000000 00000000063 13654546140 015235 0ustar000000000 0000000 test1: touch foo test2: echo $(wildcard \f\o\o) testcase/eval.mk0100644 0000000 0000000 00000001013 13654546140 012703 0ustar000000000 0000000 test1: touch server.c server_priv.c server_access.c touch client.c client_api.c client_mem.c test2: all PROGRAMS = server client server_OBJS = server.o server_priv.o server_access.o client_OBJS = client.o client_api.o client_mem.o # Everything after this is generic .PHONY: all all: $(PROGRAMS) define PROGRAM_template $(1): $$($(1)_OBJS) ALL_OBJS += $$($(1)_OBJS) endef $(foreach prog,$(PROGRAMS),$(eval $(call PROGRAM_template,$(prog)))) $(PROGRAMS): echo $^ -o $@ clean: rm -f $(ALL_OBJS) $(PROGRAMS) testcase/eval_assign.mk0100644 0000000 0000000 00000001064 13654546140 014255 0ustar000000000 0000000 bar := FAIL pf := prog: baz $(pf) := PASS moge := PASS hoge := $$(moge) a := FAIL b := c c := PASS d := FAIL e := $$f f := PASS empty:=# g := FAIL h := $(empty) PASS define evaltest $(eval foo := PASS) $(eval bar := $$(foo)) echo $(bar) $(eval prog: baz := FAIL) echo $($(pf)) $(eval fuga := $(hoge)) echo $(fuga) $(eval a := $($(b))) echo $(a) $(eval d := $(e)) echo $(d) $(eval g := $(h)) echo _$(g)_ endef a.x=X y:=Y $(foreach tag,x,$(eval a.$(tag)+=$(y))) y:=Z $(foreach tag,x,$(eval a.$(tag)+=$(y))) $(info $(a.x)) test: $(call evaltest) testcase/eval_starts_with_comment.mk0100644 0000000 0000000 00000000116 13654546140 017063 0ustar000000000 0000000 .PHONY: test define _rule # comment test: echo PASS endef $(eval $(_rule)) testcase/excl_in_shell.mk0100644 0000000 0000000 00000000067 13654546140 014574 0ustar000000000 0000000 test: @if ! false; then echo PASS; else echo FAIL; fi testcase/expanded_nl.mk0100644 0000000 0000000 00000000052 13654546140 014237 0ustar000000000 0000000 # TODO(go): Fix define nl endef $(nl) testcase/explicit_pattern_rule.mk0100644 0000000 0000000 00000000207 13654546140 016365 0ustar000000000 0000000 # Preparation: create foo.c test1: touch foo.c # foo.o should match the pattern rule below. test2: foo.o foo.o: %.o: %.c echo PASS testcase/export.mk0100644 0000000 0000000 00000001025 13654546140 013300 0ustar000000000 0000000 export FOO = PASS_FOO export FOO2=PASS_FOO2 BAR := PASS_BAR export BAR export X Y Z X := PASS_X Y := PASS_Y Z := PASS_Z NOT_EXPORTED := FAIL export BAZ = NOT_EXPORTED unexport BAZ unexport Y export Y X # GNU make 3 and 4 behave differently for this, but it must not mess # up FOO, BAR, X, Y, and Z. export FOO BAR X Y Z := FAIL VARREF:=VAR1 VAR2 export $(VARREF) VAR1:=PASS_VAR1 VAR2:=PASS_VAR2 test: echo $$FOO echo $$FOO2 echo $$BAR echo $$BAZ echo $$X echo $$Y echo $$Z echo $$VAR1 echo $$VAR2 echo $$NOT_EXPORTED testcase/export_export.mk0100644 0000000 0000000 00000000101 13654546140 014673 0ustar000000000 0000000 # TODO(c): Fix export=PASS export export test: echo $${export} testcase/fail_ignore_error.mk0100644 0000000 0000000 00000000067 13654546140 015453 0ustar000000000 0000000 # TODO(go-ninja): Fix test: false -false echo FAIL testcase/fail_subshell_in_recipe.mk0100644 0000000 0000000 00000000070 13654546140 016607 0ustar000000000 0000000 # TODO(go-ninja): Fix test: false (true) ; echo FAIL testcase/file_func.sh0100755 0000000 0000000 00000002152 13654546140 013721 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" echo "PASS" >testfile cat < Makefile ifdef KATI SUPPORTS_FILE := 1 endif ifneq (,\$(filter 4.2%,\$(MAKE_VERSION))) SUPPORTS_FILE := 1 endif ifdef SUPPORTS_FILE \$(file >testwrite,PASS) \$(info Read not found: \$(if \$(file &1 testcase/filter-out.mk0100644 0000000 0000000 00000000254 13654546140 014054 0ustar000000000 0000000 objects=main1.o foo.o main2.o bar.o mains=main1.o main2.o # expect a list which contains all the object files not in `mains' test: echo $(filter-out $(mains),$(objects)) testcase/filter.mk0100644 0000000 0000000 00000000127 13654546140 013246 0ustar000000000 0000000 sources := foo.c bar.c baz.s ugh.h test: echo cc $(filter %.c %.s,$(sources)) -o foo testcase/final_global.sh0100644 0000000 0000000 00000002060 13654546140 014373 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" function build() { cat < Makefile FOO $1$= bar FOO $2 baz all: EOF echo "Testcase: $1 $2" if echo "${mk}" | grep -q "^make"; then # Make doesn't support final assignment echo "Makefile:2: *** cannot assign to readonly variable: FOO" else ${mk} 2>&1 && echo "Clean exit" fi } build "=" "=" build "=" ":=" build "=" "+=" build ":=" ":=" build ":=" "+=" build ":=" "=" build "+=" ":=" build "+=" "+=" build "+=" "=" testcase/final_rule.sh0100644 0000000 0000000 00000001602 13654546140 014103 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" if echo "${mk}" | grep -q "^make"; then # Make doesn't support final assignment echo "Makefile:3: *** cannot assign to readonly variable: FOO" else cat < Makefile all: FOO :=$= bar FOO +=$= foo all: FOO +=$= baz all: EOF ${mk} 2>&1 && echo "Clean exit" fi testcase/final_rule2.sh0100644 0000000 0000000 00000001602 13654546140 014165 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" if echo "${mk}" | grep -q "^make"; then # Make doesn't support final assignment echo "Makefile:3: *** cannot assign to readonly variable: FOO" else cat < Makefile all: FOO +=$= bar FOO +=$= foo all: FOO +=$= baz all: EOF ${mk} 2>&1 && echo "Clean exit" fi testcase/find_command.mk0100644 0000000 0000000 00000011372 13654546140 014403 0ustar000000000 0000000 # TODO(go|ninja): This test is only for ckati. ninja: multiple problems # go: symlink support isn't enough. # ninja: find . finds ninja temporary files # ninja: escaping ! doesn't seem to be working # ninja: stderr gets reordered ifeq ($(shell uname),Darwin) USE_GNU_FIND:= else USE_GNU_FIND:=1 endif define run_find @echo $$ '$(strip $(1))' @echo $(shell $(1)) endef test1: mkdir testdir touch testdir/file1 touch testdir/file2 mkdir testdir/dir1 touch testdir/dir1/file1 touch testdir/dir1/file2 touch testdir/dir1/file3 mkdir testdir/dir2 touch testdir/dir2/file1 touch testdir/dir2/file2 touch testdir/dir2/file3 ln -s ../dir1/file1 testdir/dir2/link1 ln -s ../../testdir/dir1 testdir/dir2/link2 ln -s broken testdir/dir2/link3 mkdir -p build/tools cp ../../testcase/tools/findleaves.py build/tools mkdir -p testdir3/b/c/d ln -s b testdir3/a touch testdir3/b/c/d/e mkdir -p testdir4/a/b ln -s self testdir4/self ln -s .. testdir4/a/b/c ln -s b testdir4/a/l mkdir -p testdir5 ln -s a testdir5/a ln -s b testdir5/c ln -s c testdir5/b test2: @echo no options $(call run_find, find testdir) $(call run_find, find .) ifeq ($(USE_GNU_FIND),1) $(call run_find, find ./) $(call run_find, find .///) $(call run_find, find ) $(call run_find, find ./.) $(call run_find, find ././) endif $(call run_find, find testdir/../testdir) @echo print $(call run_find, find testdir -print) @echo conditiions $(call run_find, find testdir -name foo) $(call run_find, find testdir -name file1) $(call run_find, find testdir -name "file1") $(call run_find, find testdir -name "file1") $(call run_find, find testdir -name "*1") $(call run_find, find testdir -name "*1" -name "file*") $(call run_find, find testdir -name "*1" -and -name "file*") $(call run_find, find testdir -name "*1" -or -name "file*") $(call run_find, find testdir -name "*1" -or -type f) $(call run_find, find testdir -name "*1" -or -not -type f) $(call run_find, find testdir -name "*1" -or \! -type f) $(call run_find, find testdir -name "*1" -or -type d) $(call run_find, find testdir -name "*1" -or -type l) $(call run_find, find testdir -name "*1" -a -type l -o -name "dir*") $(call run_find, find testdir -name "dir*" -o -name "*1" -a -type l) $(call run_find, find testdir \( -name "dir*" -o -name "*1" \) -a -type f) @echo cd $(call run_find, cd testdir && find) $(call run_find, cd testdir/// && find .) $(call run_find, cd testdir///dir1// && find .///) $(call run_find, cd testdir && find ../testdir) @echo test $(call run_find, test -d testdir && find testdir) $(call run_find, if [ -d testdir ] ; then find testdir ; fi) $(call run_find, if [ -d testdir ]; then find testdir; fi) $(call run_find, if [ -d testdir ]; then cd testdir && find .; fi) $(call run_find, test -d testdir//dir1/// && find testdir///dir1///) $(call run_find, test -d testdir//.///dir1/// && find testdir//.///dir1///) @echo prune $(call run_find, find testdir -name dir2 -prune -o -name file1) @echo multi $(call run_find, find testdir testdir) @echo symlink $(call run_find, find -L testdir -type f) $(call run_find, find -L testdir -type d) $(call run_find, find -L testdir -type l) $(call run_find, cd testdir; find -L . -type f) $(call run_find, cd testdir; find -L . -type d) $(call run_find, cd testdir; find -L . -type l) @echo maxdepth $(call run_find, find testdir -maxdepth 1) $(call run_find, find testdir -maxdepth 2) $(call run_find, find testdir -maxdepth 0) $(call run_find, find testdir -maxdepth hoge) $(call run_find, find testdir -maxdepth 1hoge) $(call run_find, find testdir -maxdepth -1) @echo findleaves $(call run_find, build/tools/findleaves.py testdir file1) $(call run_find, build/tools/findleaves.py testdir file3) $(call run_find, build/tools/findleaves.py --prune=dir1 testdir file3) $(call run_find, build/tools/findleaves.py --prune=dir1 --prune=dir2 testdir file3) $(call run_find, build/tools/findleaves.py --mindepth=1 testdir file1) $(call run_find, build/tools/findleaves.py --mindepth=2 testdir file1) $(call run_find, build/tools/findleaves.py --mindepth=3 testdir file1) $(call run_find, build/tools/findleaves.py --mindepth=2 testdir file1) $(call run_find, build/tools/findleaves.py --prune=dir1 --dir=testdir file1) $(call run_find, build/tools/findleaves.py --prune=dir1 --dir=testdir file3 link3) @echo missing chdir / testdir $(call run_find, cd xxx && find .) $(call run_find, if [ -d xxx ]; then find .; fi) test3: $(call run_find, find testdir3/a/c) $(call run_find, if [ -d testdir3/a/c ]; then find testdir3/a/c; fi) $(call run_find, cd testdir3/a/c && find .) $(call run_find, build/tools/findleaves.py testdir3 e) test4: $(call run_find, find -L testdir4) test5: $(call run_find, find -L testdir5) $(call run_find, build/tools/findleaves.py testdir5 x) testcase/find_command_sorted.mk0100644 0000000 0000000 00000000430 13654546140 015754 0ustar000000000 0000000 # TODO(ninja): This test is only for ckati. ninja: fix $(sort $(shell $(1))) # go: implement generic builtin find # ninja: $(sort $(shell "find .")) becomes "$( .) find" define run_find @echo $$ '$(strip $(1))' @echo $(sort $(shell $(1))) endef test1: $(call run_find, find .) testcase/findstring.mk0100644 0000000 0000000 00000000230 13654546140 014123 0ustar000000000 0000000 test: echo $(findstring a, a b c) echo $(findstring b, a b c) echo $(findstring b c, a b c) echo $(findstring a, b c) echo $(findstring a, b c, a) testcase/first_rule.mk0100644 0000000 0000000 00000000035 13654546140 014135 0ustar000000000 0000000 %: echo FAIL a b: echo $@ testcase/firstword.mk0100644 0000000 0000000 00000000072 13654546140 014003 0ustar000000000 0000000 test: echo $(firstword foo bar baz) echo $(firstword ) testcase/flavor.mk0100644 0000000 0000000 00000000573 13654546140 013257 0ustar000000000 0000000 # https://android.googlesource.com/platform/external/compiler-rt/+/master/make/util.mk#44 A=a B:=b C+=c D?=d AREF:=A all: echo $(flavor A) $(flavor B) $(flavor C) $(flavor D) $(flavor E) echo $(flavor PATH) echo $(flavor MAKEFILE_LIST) echo $(flavor $(AREF)) echo $(flavor CC) # For some reason, $(flavor MAKECMDGOALS) should be "undefined" # echo $(flavor MAKECMDGOALS) testcase/foreach.mk0100644 0000000 0000000 00000000222 13654546140 013364 0ustar000000000 0000000 base := base dirs := a b c d dir := FAIL files := $(foreach dir,$(dirs),$(foreach subdir,$(dirs),$(dir)/$(subdir)/$(base))) test: echo $(files) testcase/func_backslash.mk0100644 0000000 0000000 00000000026 13654546140 014725 0ustar000000000 0000000 $(info\ PASS) test: testcase/func_nop.mk0100644 0000000 0000000 00000000070 13654546140 013565 0ustar000000000 0000000 define V $(eval ## comment)PASS endef test: echo $(V) testcase/func_with_backslash.mk0100644 0000000 0000000 00000000060 13654546140 015756 0ustar000000000 0000000 $(info $(if \ $(X),FAIL,PASS)) test: echo OK testcase/gen_testcase_parse_benchmark.go0100644 0000000 0000000 00000004072 13654546140 017632 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // gen_testcase_parse_benchmark is a program to generate benchmark tests // for parsing testcases. // package main import ( "fmt" "io" "os" "path/filepath" "strings" "text/template" ) const preamble = `package kati import ( "io/ioutil" "testing" ) ` var tmpl = template.Must(template.New("benchmarktest").Parse(` func BenchmarkTestcaseParse{{.Name}}(b *testing.B) { data, err := ioutil.ReadFile({{.Filename | printf "%q"}}) if err != nil { b.Fatal(err) } mk := string(data) b.ResetTimer() for i := 0; i < b.N; i++ { parseMakefileString(mk, srcpos{ filename: {{.Filename | printf "%q"}}, lineno: 0, }) } } `)) func testName(fname string) string { base := filepath.Base(fname) i := strings.Index(base, ".") if i >= 0 { base = base[:i] } base = strings.Replace(base, "-", "", -1) tn := strings.Title(base) return tn } func writeBenchmarkTest(w io.Writer, fname string) { name := testName(fname) if strings.HasPrefix(name, "Err") { return } err := tmpl.Execute(w, struct { Name string Filename string }{ Name: testName(fname), Filename: fname, }) if err != nil { panic(err) } } func main() { f, err := os.Create("testcase_parse_benchmark_test.go") if err != nil { panic(err) } defer func() { err := f.Close() if err != nil { panic(err) } }() fmt.Fprint(f, preamble) matches, err := filepath.Glob("testcase/*.mk") if err != nil { panic(err) } for _, tc := range matches { writeBenchmarkTest(f, tc) } } testcase/hash_in_var.mk0100644 0000000 0000000 00000000172 13654546140 014242 0ustar000000000 0000000 $(shell mkdir -p tmp) file = $(shell echo tmp/test\#.ext) all: test1 test1: $(file) echo PASS $(file): touch $(file) testcase/if.mk0100644 0000000 0000000 00000000503 13654546140 012355 0ustar000000000 0000000 TRUE:=foo FALSE:= $(if ${TRUE}, $(info PASS1)) $(if ${FALSE}, $(info FAIL1)) $(if ${TRUE}, $(info PASS2), $(info FAIL2)) $(if ${FALSE}, $(info FAIL3), $(info PASS3)) $(info $(if ${TRUE}, PASS4, FAIL4)) # Too many arguments $(info $(if ${FALSE}, FAIL5, PASS5, PASS6)) $(info $(if ${FALSE} , FAIL6, PASS7)) test: echo OK testcase/if_recipe.mk0100644 0000000 0000000 00000000327 13654546140 013710 0ustar000000000 0000000 test1: echo TEST ifdef UNDEFINED echo FAIL else echo PASS endif echo DONE test2: ifdef UNDEFINED echo FAIL else echo PASS endif echo DONE test3: ifndef UNDEFINED echo PASS else echo FAIL endif echo DONE testcase/ifdef_rec_var.mk0100644 0000000 0000000 00000000120 13654546140 014530 0ustar000000000 0000000 empty=$(info FAIL) rec=$(empty) ifdef rec $(info PASS) else $(info FAIL) endif testcase/ifdef_ret_in_arg.mk0100644 0000000 0000000 00000000077 13654546140 015233 0ustar000000000 0000000 define x a b endef $(x):=PASS ifdef $(x) $(info $($(x))) endif testcase/ifdef_with_comments.mk0100644 0000000 0000000 00000000617 13654546140 016002 0ustar000000000 0000000 VAR:=OK ifdef VAR PASS1:=PASS1 endif # foo ifdef VAR # hoge PASS2:=PASS2 endif # foo ifeq ($(VAR),OK) # hoge PASS3:=PASS3 else # bar $(error fail) endif # foo ifeq ($(VAR),NOK) # hoge $(error fail) else # bar PASS4:=PASS4 endif # foo ifeq ($(VAR),NOK)# hoge $(error fail) else# bar PASS5:=PASS5 endif# foo test: echo $(PASS1) echo $(PASS2) echo $(PASS3) echo $(PASS4) echo $(PASS5) testcase/ifdef_with_trailing_space.mk0100644 0000000 0000000 00000000306 13654546140 017134 0ustar000000000 0000000 # TODO(go): Fix A := a # comment ifdef $(A) $(error FAIL) else $(info PASS) endif a := b ifdef $(A) $(info PASS) else $(error FAIL) endif ifdef a # comment $(info PASS) else $(error FAIL) endif testcase/ifeq_without_parens.mk0100644 0000000 0000000 00000001022 13654546140 016033 0ustar000000000 0000000 VAR=var VARREF=VAR EMPTY= UNDEFREF=UNDEFINED RESULT= ifeq "$(VAR)" "var" RESULT += PASS else RESULT += FAIL endif ifneq "$(VAR)" "var" RESULT += FAIL else RESULT += PASS endif ifeq '$(VAR)' "var" RESULT += PASS else RESULT += FAIL endif ifeq "$(VAR)" 'var' RESULT += PASS else RESULT += FAIL endif ifeq "$(UNDEFINED)" "" RESULT += PASS else RESULT += FAIL endif ifeq "" "$(UNDEFINED)" RESULT += PASS else RESULT += FAIL endif ifeq "var var" "$(VAR) $(VAR)" RESULT += PASS else RESULT += FAIL endif test: echo $(RESULT) testcase/ignore_error.mk0100644 0000000 0000000 00000000016 13654546140 014452 0ustar000000000 0000000 test: -false testcase/implicit_pattern_rule.mk0100644 0000000 0000000 00000000377 13654546140 016366 0ustar000000000 0000000 # Preparation: create foo.c test1: touch foo.c # foo.o should match the pattern rule below. test2: foo.o %.o: %.c echo FAIL # This passes with GNU make 4.0 but fails with 3.81. #%o: %c # echo FAIL2 # The last one should be used. %.o: %.c echo PASS testcase/implicit_pattern_rule_chain.mk0100644 0000000 0000000 00000000176 13654546140 017525 0ustar000000000 0000000 test: foo foo: foo.o echo link foo %.o: %.c echo compile from $< to $@ foo.c: genc echo generate $@ .PHONY: genc genc: testcase/implicit_pattern_rule_chain2.mk0100644 0000000 0000000 00000000244 13654546140 017603 0ustar000000000 0000000 # TODO: Fix. We probably need to assume foo.y exists as there's a rule # to generate it. test1: touch foo.x test2: foo.z %.z: %.y cp $< $@ %.y: %.x cp $< $@ testcase/implicit_pattern_rule_for_no_commands.mk0100644 0000000 0000000 00000000257 13654546140 021606 0ustar000000000 0000000 test: foo.c foo.h foo.c foo.h: touch $@ test2: foo CFLAGS=-O foo: foo.o echo cc $(CFLAGS) -o $@ $< foo.o: CFLAGS=-g foo.o: foo.h %.o: %.c echo cc $(CFLAGS) -o $@ -c $< testcase/implicit_pattern_rule_phony.mk0100644 0000000 0000000 00000000201 13654546140 017565 0ustar000000000 0000000 test: foo.x test2: all .PHONY: FORCE FORCE: all: foo.y echo $@ from $< %.y: %.x FORCE echo $@ from $< foo.x: touch foo.x testcase/implicit_pattern_rule_prefix.mk0100644 0000000 0000000 00000000345 13654546140 017736 0ustar000000000 0000000 MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') test: abcd abcd: # GNU make 3 does not prioritize the rule with a shortest stem. ifeq ($(MAKEVER),4) a%: echo FAIL endif abc%: echo PASS ab%: echo FAIL testcase/implicit_pattern_rule_warn.sh0100644 0000000 0000000 00000002361 13654546140 017413 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: @echo "PASS" # Static pattern rules are still supported a.foo b.foo: %.foo: %.bar cp $< $@ %.foo: %.bar cp $< $@ EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:6: warning: implicit rules are deprecated: %.foo' echo 'PASS' else ${mk} --no_builtin_rules --warn_implicit_rules 2>&1 fi if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:6: *** implicit rules are obsolete: %.foo' else ${mk} --no_builtin_rules --werror_implicit_rules 2>&1 fi testcase/include.mk0100644 0000000 0000000 00000000126 13654546140 013403 0ustar000000000 0000000 test1: echo "foo: bar" > foo.d test2: foo bar: echo OK -include foo.d missing.d testcase/include_glob.mk0100644 0000000 0000000 00000000112 13654546140 014401 0ustar000000000 0000000 test1: echo "foo: bar" > foo.d test2: foo bar: echo OK -include *.d testcase/include_glob_order.mk0100644 0000000 0000000 00000000425 13654546140 015603 0ustar000000000 0000000 MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') # GNU make 4 doesn't sort glob results. ifeq ($(MAKEVER,4)) $(info test skipped) else test1: echo '$$(info foo)' > foo.d echo '$$(info bar)' > bar.d test2: echo $(wildcard *.d) -include *.d endif testcase/include_var.mk0100644 0000000 0000000 00000000171 13654546140 014253 0ustar000000000 0000000 foo=foo.d missing.d empty= test1: echo "foo: bar" > foo.d test2: foo bar: echo OK -include $(foo) include $(empty) testcase/info.mk0100644 0000000 0000000 00000000064 13654546140 012714 0ustar000000000 0000000 test: echo $(info "%s:%s" foo bar)xxx $(info baz) testcase/join.mk0100644 0000000 0000000 00000000163 13654546140 012720 0ustar000000000 0000000 foo:=$(join a b,.c .o) # produces `a.c b.o'. test: echo $(foo) echo $(join a b c, 0 1) echo $(join a b, 0 1 2) testcase/kati_cache.sh0100755 0000000 0000000 00000002253 13654546140 014044 0ustar000000000 0000000 #!/bin/sh # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat < Makefile all: foo foo: echo foo EOF # Pretend to be a very old Makefile. touch -t 197101010000 Makefile "$@" if [ -e .kati_cache.Makefile ]; then if ! grep -q 'Cache not found' kati.INFO; then echo 'Cache unexpectedly found' fi fi "$@" if [ -e .kati_cache.Makefile ]; then if ! grep -q 'Cache found' kati.INFO; then echo 'Cache unexpectedly not found' fi fi cat <> Makefile echo bar EOF "$@" if [ -e .kati_cache.Makefile ]; then if ! grep -q 'Cache expired' kati.INFO; then echo 'Cache unexpectedly not expired' fi fi testcase/last_resort.mk0100644 0000000 0000000 00000000034 13654546140 014317 0ustar000000000 0000000 test: foo %: echo PASS_$@ testcase/lastword.mk0100644 0000000 0000000 00000000070 13654546140 013615 0ustar000000000 0000000 test: echo $(lastword foo bar baz) echo $(lastword ) testcase/lineno_in_call.mk0100644 0000000 0000000 00000000135 13654546140 014725 0ustar000000000 0000000 define foo $(eval X:=) \ $(eval X:=) \ $(warning foo) endef $(call foo) test: echo FOO testcase/makecmdgoals.mk0100644 0000000 0000000 00000000034 13654546140 014405 0ustar000000000 0000000 test: echo $(MAKECMDGOALS) testcase/makefile_list.mk0100644 0000000 0000000 00000000377 13654546140 014600 0ustar000000000 0000000 test1: echo $(MAKEFILE_LIST) touch foo.mk test2: echo $(MAKEFILE_LIST) touch bar.mk test3: echo $(MAKEFILE_LIST) test4: MAKEFILE_LIST=PASS test4: echo $(MAKEFILE_LIST) -include foo.mk bar.mk -include bar.mk -include foo.mk -include ./././foo.mk testcase/merge_inputs.mk0100644 0000000 0000000 00000000106 13654546140 014457 0ustar000000000 0000000 test1: touch bar baz test2: foo foo: bar foo: baz foo: echo $< $^ testcase/merge_output_pattern.mk0100644 0000000 0000000 00000000126 13654546140 016234 0ustar000000000 0000000 test: foo.o foo.o: %.o: %.c foo.o: foo.h echo $^ cp $< $@ foo.h foo.c: touch $@ testcase/merge_target_specific_vars.mk0100644 0000000 0000000 00000000065 13654546140 017327 0ustar000000000 0000000 test: foo foo: A:=FAIL foo: A:=PASS foo: echo $(A) testcase/multi_explicit_output_patterns.mk0100644 0000000 0000000 00000000100 13654546140 020343 0ustar000000000 0000000 test: zzz zzz: %z: %a zzz: z%: a% echo $@ azz zza: echo $@ testcase/multi_explicit_output_patterns_double_colon.mk0100644 0000000 0000000 00000000136 13654546140 023100 0ustar000000000 0000000 # TODO: Fix test: xyz xyz:: %z: %a echo 1 $* xyz:: x%: a% echo 2 $* ayz xya: echo 3 $@ testcase/multi_implicit_output_patterns.mk0100644 0000000 0000000 00000000517 13654546140 020350 0ustar000000000 0000000 # TODO(go): Fix MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') all: a.h.x a.c.x a.h.z a.c.z b.h.x b.c.x b.h.z b.c.z a.h.%: echo twice $@ a.c.%: echo twice $@ b.h.% b.c.%: echo once $@ b.h.z: pass # GNU make 4 invokes this rule. ifeq ($(MAKEVER,3)) b.c.z: fail endif pass: echo PASS fail: echo FAIL testcase/multi_outputs.mk0100644 0000000 0000000 00000000052 13654546140 014713 0ustar000000000 0000000 test: foo bar foo bar baz: echo PASS_$@ testcase/multi_pattern_rule.mk0100644 0000000 0000000 00000000350 13654546140 015675 0ustar000000000 0000000 # Preparation: create foo.c test1: touch foo.c exist # foo.o should match the suffix rule below. test2: foo.o %.o: %.c not_exist echo FAIL %.o: %.c exist echo PASS $@ $< $^ %.o: %.c not_exist echo FAIL %.o: %.cc echo FAIL testcase/multi_rule.mk0100644 0000000 0000000 00000000325 13654546140 014142 0ustar000000000 0000000 test: foo.c # simplified case for gyp-generated action targets # with 'process_outputs_as_sources': 1 # and 'hard_dependency': 1 foo.c: CFLAGS:=-g foo.c: echo generating foo.c outputs := foo.c $(outputs): | testcase/multi_rule_order_only.mk0100644 0000000 0000000 00000000367 13654546140 016404 0ustar000000000 0000000 test: foo.c foo.c: touch foo.c OBJS := foo.o $(OBJS): | bar.a $(OBJS): CFLAGS:=-g %.o: %.c FORCE_DO_CMD echo compile $@ from $< .PHONY: FORCE_DO_CMD FORCE_DO_CMD: bar.a: echo archive $@ foo.a: $(OBJS) echo archive $@ test2: foo.a testcase/multi_suffix_rule.mk0100644 0000000 0000000 00000000230 13654546140 015521 0ustar000000000 0000000 # Preparation: create foo.c test1: touch foo.c # foo.o should match the suffix rule below. test2: foo.o .c.o: echo PASS $@ $< $^ .cc.o: echo FAIL testcase/multiline_and_leading_space.mk0100644 0000000 0000000 00000000125 13654546140 017441 0ustar000000000 0000000 X:=foo \ bar Y:=foo \ \ bar $(info foo \ bar) test: echo PASS $(X) $(Y) testcase/multiline_and_lineno.mk0100644 0000000 0000000 00000000072 13654546140 016150 0ustar000000000 0000000 $(warning foo) # foo \ bar $(warning bar) test: echo foo testcase/multiline_arg.mk0100644 0000000 0000000 00000000224 13654546140 014612 0ustar000000000 0000000 SHELL:=/bin/bash define func $(info INFO: $(1)) echo $(1) endef $(info INFO2: $(call func, \ foo)) test: $(call func, \ foo) $(call func, \) testcase/multiline_define.mk0100644 0000000 0000000 00000000525 13654546140 015277 0ustar000000000 0000000 define or1 $(or , \ ,,) endef define or2 $(or ,,, \ ) endef define or3 $(or , , ,) endef define var A\ B endef define var2 A\ \ B endef define var3 A\ B endef test: echo $(if $(call or1),FAIL,PASS)_or1 echo $(if $(call or2),FAIL,PASS)_or2 echo $(if $(call or3),FAIL,PASS)_or3 $(info $(var)) $(info $(var2)) $(info $(var3)) testcase/multiline_recipe.mk0100644 0000000 0000000 00000001211 13654546140 015305 0ustar000000000 0000000 # TODO(c): fix test6 - \t$${empty} should be empty. MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') ifeq ($(MAKEVER),4) AT=@ endif # http://www.gnu.org/software/make/manual/make.html#Splitting-Recipe-Lines test1: $(AT) echo no\ space $(AT) # echo no\ # space $(AT) echo one \ space $(AT) echo one\ space test2: $(AT) for d in foo bar; do \ echo $$d ; done define cmd3 echo foo echo bar endef test3: $(cmd3) define cmd4 echo foo ; \ echo bar endef test4: $(cmd4) test5: $(AT) echo foo \ $$empty bar test6: echo foo\ $${empty}bar define cmd7 @echo first @echo second endef test7: $(cmd7) \ third testcase/multiple_output_patterns.mk0100644 0000000 0000000 00000000150 13654546140 017150 0ustar000000000 0000000 test: foo.o foo.o: %.o: %.c foo.o: foo.%: bar.% foo.o: foo.h cp $< $@ foo.h foo.c bar.o: touch $@ testcase/nested_call.mk0100644 0000000 0000000 00000000374 13654546140 014242 0ustar000000000 0000000 define inner {$(1)|$(origin 1),$(2)|$(origin 2)} endef define macro $(call inner,$(1)) \ $(call inner,test2) \ $(call inner,test3,) \ $(call inner,test4,macro) \ $(call inner) endef 2=global test: @echo "$(call macro,test1)" @echo "$(call macro)" testcase/nested_define.mk0100644 0000000 0000000 00000000423 13654546140 014554 0ustar000000000 0000000 define outer define inner PASS endef define inner_fail FAIL endef endef # Prefixed defines don't increase the nest level. define outer_override override define inner2 export define inner3 endef A := $(inner_fail) $(eval $(outer)) foo: echo $(A) echo $(inner) testcase/ninja_implicit_dependent.sh0100755 0000000 0000000 00000002217 13654546140 017010 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat <Makefile all: secondary_dep secondary_dep: secondary @touch \$@ @echo Made \$@ primary: .KATI_IMPLICIT_OUTPUTS := secondary primary: @touch primary secondary @echo Made primary+secondary EOF if [[ "${mk}" =~ ^make ]]; then echo Made primary+secondary echo Made secondary_dep echo Made secondary_dep echo Nothing to do else ${mk} -j1 ./ninja.sh -j1 -w dupbuild=err; sleep 1 touch secondary ./ninja.sh -j1 -w dupbuild=err; sleep 1 echo Nothing to do touch primary ./ninja.sh -j1 -w dupbuild=err; fi testcase/ninja_implicit_output_var.sh0100644 0000000 0000000 00000001510 13654546140 017242 0ustar000000000 0000000 #!/bin/bash # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat <Makefile all: a a: if ! [ -z "\$(VAR)" ]; then echo \$(VAR); fi a: .KATI_IMPLICIT_OUTPUTS := b b: VAR := OK EOF ${mk} -j1 if [ -e ninja.sh ]; then ./ninja.sh -j1 -w dupbuild=err; else echo OK fi testcase/ninja_implicit_outputs.sh0100644 0000000 0000000 00000001551 13654546140 016562 0ustar000000000 0000000 #!/bin/bash # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat <Makefile all: a b a b: touch A echo 1 >>A d: a c: .KATI_IMPLICIT_OUTPUTS := d c: touch C echo 1 >>C c d: b EOF ${mk} -j1 all d c if [ -e ninja.sh ]; then ./ninja.sh -j1 -w dupbuild=err all d; fi echo "A:" cat A echo "C": cat C testcase/ninja_mkdir.sh0100644 0000000 0000000 00000001756 13654546140 014262 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e log=/tmp/log mk="$@" cat < Makefile test: a/b a/b: @mkdir -p \$(dir \$@) touch \$@ EOF ${mk} 2> ${log} if [ -e ninja.sh ]; then ./ninja.sh fi if [[ ! -d a ]]; then echo "Created 'a'" fi if [ -e ninja.sh ]; then if grep -q "mkdir -p" build.ninja; then echo "Should not include 'mkdir -p' in build.ninja" echo "Ninja will automatically create this directory" fi fi testcase/ninja_normalized_path.mk0100644 0000000 0000000 00000000305 13654546140 016316 0ustar000000000 0000000 # TODO(ninja): This is tough to fix with ninja. Ninja normalizes # target names while make does not. test1: mkdir a b test2: a/b a/../a/b a/./b b/a b/../b/a b/./a a/%: touch $@ b/%: echo $@ testcase/ninja_phony_targets.sh0100644 0000000 0000000 00000001637 13654546140 016040 0ustar000000000 0000000 #!/bin/bash # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" function build() { # Remove once regen works with different args rm -f build.ninja .kati_stamp ninja.sh ${mk} $@ if [ -e ninja.sh ]; then ./ninja.sh -j1 $@; fi } cat < Makefile .PHONY: a b b: echo \$@ a: echo \$@ EOF echo "===" build echo "---" build a echo "---" build b echo "===" testcase/ninja_pool.sh0100644 0000000 0000000 00000001757 13654546140 014126 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e log=/tmp/log mk="$@" cat < Makefile test: .KATI_NINJA_POOL := test_pool test: echo "PASS" EOF ${mk} 2>${log} if [ -e ninja.sh ]; then mv build.ninja kati.ninja cat < build.ninja pool test_pool depth = 1 include kati.ninja EOF ./ninja.sh fi if [ -e ninja.sh ]; then if ! grep -q "pool = test_pool" kati.ninja; then echo "Pool not present in build.ninja" fi fi testcase/ninja_regen.sh0100755 0000000 0000000 00000005753 13654546140 014260 0ustar000000000 0000000 #!/bin/sh # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e log=/tmp/log mk="$@" sleep_if_necessary() { if [ x$(uname) != x"Linux" -o x"${TRAVIS}" != x"" ]; then sleep "$@" fi } export VAR=hoge cat < Makefile all: echo foo EOF ${mk} 2> ${log} if [ -e ninja.sh ]; then ./ninja.sh fi sleep_if_necessary 1 cat < Makefile \$(KATI_deprecated_var VAR4) \$(KATI_obsolete_var VAR5) VAR3 := unused all: echo bar echo VAR=\$(VAR) echo VAR2=\$(VAR2) echo VAR3=\$(VAR3) echo wildcard=\$(wildcard *.mk) other: echo foo EOF ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} > /dev/null; then echo 'Should be regenerated (Makefile)' fi ./ninja.sh fi export VAR=fuga ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} > /dev/null; then echo 'Should be regenerated (env changed)' fi ./ninja.sh fi export VAR2=OK ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} > /dev/null; then echo 'Should be regenerated (env added)' fi ./ninja.sh fi export VAR3=testing ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log} >/dev/null; then echo 'Should not regenerate (unused env added)' fi ./ninja.sh fi export VAR3=test2 ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log} >/dev/null; then echo 'Should not regenerate (unused env changed)' fi ./ninja.sh fi export VAR4=foo ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log} >/dev/null; then echo 'Should not regenerate (deprecated env added)' fi ./ninja.sh fi export VAR5=foo ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log} >/dev/null; then echo 'Should not regenerate (obsolete env added)' fi ./ninja.sh fi export PATH=/random_path:$PATH ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} > /dev/null; then echo 'Should be regenerated (PATH changed)' fi ./ninja.sh fi sleep_if_necessary 1 touch PASS.mk ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} > /dev/null; then echo 'Should be regenerated (wildcard)' fi ./ninja.sh fi sleep_if_necessary 1 touch XXX ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log}; then echo 'Should not be regenerated' fi ./ninja.sh fi ${mk} other 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} >/dev/null; then echo 'Should be regenerated (argument)' fi ./ninja.sh other fi testcase/ninja_regen_filefunc_read.sh0100755 0000000 0000000 00000002720 13654546140 017115 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e log=/tmp/log mk="$@" sleep_if_necessary() { if [ x$(uname) != x"Linux" -o x"${TRAVIS}" != x"" ]; then sleep "$@" fi } cat < Makefile A := \$(file ${log} if [ -e ninja.sh ]; then ./ninja.sh fi ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log}; then echo 'Should not be regenerated' fi ./ninja.sh fi echo regen >file_a ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} >/dev/null; then echo 'Should be regenerated' fi ./ninja.sh fi ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log}; then echo 'Should not be regenerated' fi ./ninja.sh fi sleep_if_necessary 1 echo regen >>file_a ${mk} 2> ${log} if [ -e ninja.sh ]; then if ! grep regenerating ${log} >/dev/null; then echo 'Should be regenerated' fi ./ninja.sh fi testcase/ninja_regen_filefunc_write.sh0100755 0000000 0000000 00000002014 13654546140 017330 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e log=/tmp/log mk="$@" cat < Makefile \$(file >file_a,test) all: echo foo EOF ${mk} 2> ${log} if [ -e ninja.sh ]; then if [ ! -f file_a ]; then echo 'file_a does not exist' fi ./ninja.sh rm file_a fi ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log}; then echo 'Should not be regenerated' fi if [ ! -f file_a ]; then echo 'file_a does not exist' fi ./ninja.sh fi testcase/ninja_regen_find_link.sh0100755 0000000 0000000 00000002215 13654546140 016263 0ustar000000000 0000000 #!/bin/bash # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e sleep_if_necessary() { if [ x"${TRAVIS}" != x"" ]; then sleep "$@" fi } mk="$@" if echo "${mk}" | grep kati > /dev/null; then mk="${mk} --use_find_emulator" fi function build() { ${mk} $@ 2> /dev/null if [ -e ninja.sh ]; then ./ninja.sh -j1 $@; fi } cat < Makefile V := \$(shell find -L linkdir/d/link) all: @echo \$(V) EOF mkdir -p dir1 dir2 linkdir/d touch dir1/file1 dir2/file2 ln -s ../../dir1 linkdir/d/link build sleep_if_necessary 1 touch dir1/file1_2 build rm linkdir/d/link ln -s ../../dir2 linkdir/d/link build testcase/ninja_regen_glob.sh0100755 0000000 0000000 00000002004 13654546140 015245 0ustar000000000 0000000 #!/bin/sh # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e log=/tmp/log mk="$@" sleep_if_necessary() { if [ x$(uname) != x"Linux" -o x"${TRAVIS}" != x"" ]; then sleep "$@" fi } touch xe.mk yc.mk xa.mk yb.mk xd.mk cat < Makefile include *.mk all: echo foo EOF ${mk} 2> ${log} if [ -e ninja.sh ]; then ./ninja.sh fi ${mk} 2> ${log} if [ -e ninja.sh ]; then if grep regenerating ${log}; then echo 'Should not be regenerated' fi ./ninja.sh fi testcase/no_last_newline.mk0100644 0000000 0000000 00000000020 13654546140 015131 0ustar000000000 0000000 test: echo PASStestcase/not_command_with_tab.mk0100644 0000000 0000000 00000000215 13654546140 016136 0ustar000000000 0000000 test: foo bar empty:= $(empty) export A=PASS_A\ with_space foo: echo $$A rule:=bar: $(rule) export B=PASS_B; echo $${B}\ without_space testcase/notdir.mk0100644 0000000 0000000 00000000343 13654546140 013260 0ustar000000000 0000000 test: echo $(notdir foo) echo $(notdir foo,bar) echo $(notdir foo bar) echo $(notdir .) echo $(notdir /) echo $(notdir ) echo $(notdir src/foo.c hacks) echo $(notdir hacks src/foo.c) echo $(notdir hacks / src/foo.c) testcase/nothing_to_do.mk0100644 0000000 0000000 00000000012 13654546140 014604 0ustar000000000 0000000 Makefile: testcase/or.mk0100644 0000000 0000000 00000000510 13654546140 012375 0ustar000000000 0000000 TRUE:=foo FALSE:= XY:=x y X:=$(subst y, ,$(XY)) Y:=$(subst x, ,$(XY)) $(or ${FALSE}, $(info PASS_1)) # expect "foo" $(info $(or ${TRUE}, $(info FAIL_2))) # Too many arguments. $(info $(or ${FALSE}, PASS, PASS)) $(info $(or ${FALSE}, $(X) )) $(info $(or ${FALSE}, $(Y) )) $(info $(or ${FALSE} , PASS, PASS)) test: echo OK testcase/order_only.mk0100644 0000000 0000000 00000000326 13654546140 014136 0ustar000000000 0000000 test1: touch -t 197101010000 foo touch bar # Note order-only dependency will not appear in $^ test2: foo | bar echo PASS_$^ # bar is newer than foo but we should not rebuild it. foo: | bar baz baz: touch $@ testcase/order_only2.mk0100644 0000000 0000000 00000000370 13654546140 014217 0ustar000000000 0000000 # TODO(ninja): Ninja does not believe the timestamp so this test is invalid. test1: touch -t 197101010000 old1 touch -t 197101010000 old2 touch new test2: old1 old2 echo DONE old1: | new echo FAIL old2: new echo PASS new: echo FAIL_new testcase/origin.mk0100644 0000000 0000000 00000000660 13654546140 013252 0ustar000000000 0000000 FOO = foo FOO_SPACE_BAR:=foo bar FOO_COMMA_BAR:=foo,bar $(FOO_SPACE_BAR):=foo $(FOO_COMMA_BAR):=foo FOOREF := FOO test: echo $(origin FOO) echo $(origin FOO BAR) echo $(origin FOO,BAR) echo $(origin UNDEFINED) echo $(origin PATH) echo $(origin MAKEFILE_LIST) echo $(origin CC) echo $(origin $(FOOREF)) # TODO: support environment override, command line, and override. # TODO: Also add more tests especially for += and ?= testcase/override.mk0100644 0000000 0000000 00000000157 13654546140 013603 0ustar000000000 0000000 test: foo echo FAIL test: bar echo PASS_test foo: echo FAIL_foo foo: echo PASS_foo bar: echo PASS_bar testcase/override_define.mk0100644 0000000 0000000 00000001040 13654546140 015105 0ustar000000000 0000000 # http://www.gnu.org/software/make/manual/make.html#Multi_002dLine # see also define.mk override define two-lines echo foo echo $(bar) endef bar = xxx override CC := gcc override AS = as override define three-lines echo 1 echo 2 echo 3 endef override define four-lines echo I echo II echo III echo IV endef test: echo CC=$(CC) $(flavor CC) echo AS=$(AS) $(flavor AS) echo two BEGIN $(two-lines) END $(flavor two-lines) echo three BEGIN $(three-lines) END $(flavor three-lines) echo four BEGIN $(four-lines) END $(flavor four-lines) testcase/override_export.mk0100644 0000000 0000000 00000000454 13654546140 015204 0ustar000000000 0000000 # TODO(c|ninja): it overrides "export A" and exports(?) "override B" # ninja: can't export variable with space in name (by bash). override export A:=override_A export override B:=export_B A:=make_A B:=make_B test: echo $$A echo $$B echo $(export A) echo $(override B) env | grep 'override B' testcase/override_override.mk0100644 0000000 0000000 00000000335 13654546140 015500 0ustar000000000 0000000 override A:=PASS_A A:=FAIL_A override define B PASS_B endef B:=FAIL_B override C := FAIL_C override C := PASS_C C := FAIL_C2 test: echo $(A) echo $(origin A) echo $(B) echo $(origin B) echo $(C) echo $(origin C) testcase/override_rule.mk0100644 0000000 0000000 00000000254 13654546140 014630 0ustar000000000 0000000 test: override A=PASS # The behavior for this depends on the version of GNU make. It looks # like old GNU make has a bug here. # override : A=PASS_2 override : echo $(A) testcase/param.mk0100644 0000000 0000000 00000000314 13654546140 013057 0ustar000000000 0000000 # TODO(c): Fix 1:=foo $(info $(1) is foo) define param $(eval 1:=bar) param1-1=$(1) $(call param2,$(1)) endef define param2 param2-1=$(1) endef test: @echo call param $(call param,baz) @echo 1=$(1) testcase/parse_benchcmp.go0100644 0000000 0000000 00000005431 13654546140 014733 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* Program parse_benchcmp runs testcase_parse_benchmark and displays performance changes. */ package main import ( "fmt" "os" "os/exec" "strings" ) func run(prog string, args ...string) { cmd := exec.Command(prog, args...) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err := cmd.Run() if err != nil { panic(err) } } func output(prog string, args ...string) string { cmd := exec.Command(prog, args...) out, err := cmd.CombinedOutput() if err != nil { panic(err) } return strings.TrimSpace(string(out)) } func runBenchtest(fname string) { run("go", "generate") f, err := os.Create(fname) if err != nil { panic(err) } defer func() { err = f.Close() if err != nil { panic(err) } }() cmd := exec.Command("go", "test", "-run", "NONE", "-bench", ".") cmd.Stdout = f err = cmd.Run() if err != nil { panic(err) } } func main() { _, err := exec.LookPath("benchcmp") if err != nil { fmt.Fprintln(os.Stderr, "benchcmp not found:", err) fmt.Fprintln(os.Stderr, "install it by:") fmt.Fprintln(os.Stderr, " export GOPATH=$HOME # if not set") fmt.Fprintln(os.Stderr, " PATH=$PATH:$GOPATH/bin") fmt.Fprintln(os.Stderr, " go get -u golang.org/x/tools/cmd/benchcmp") os.Exit(1) } status := output("git", "status", "-s") if status != "" { fmt.Fprintln(os.Stderr, "workspace is dirty. please commit.") fmt.Fprintln(os.Stderr, status) os.Exit(1) } curBranch := output("git", "symbolic-ref", "--short", "HEAD") if curBranch == "master" { fmt.Fprintln(os.Stderr, "current branch is master.") fmt.Fprintln(os.Stderr, "run in branch to compare with master.") os.Exit(1) } fmt.Println("Run benchmark on master and ", curBranch) fmt.Println("git checkout master") run("git", "checkout", "master") run("git", "clean", "-f") commit := output("git", "log", "--oneline", "-1") fmt.Println(commit) fmt.Println("running benchmark tests...") runBenchtest("bench-old.out") fmt.Println("git checkout", curBranch) run("git", "checkout", curBranch) run("git", "clean", "-f") commit = output("git", "log", "--oneline", "-1") fmt.Println(commit) fmt.Println("running benchmark tests...") runBenchtest("bench-new.out") run("benchcmp", "bench-old.out", "bench-new.out") } testcase/patsubst.mk0100644 0000000 0000000 00000000066 13654546140 013630 0ustar000000000 0000000 test: echo $(patsubst %.c, %.o , x.c.c bar.c ) testcase/pattern_rules_priority.mk0100644 0000000 0000000 00000000514 13654546140 016611 0ustar000000000 0000000 # Preparation: create foo.c bar.c baz.cc test1: touch foo.c bar.c baz.cc test2: foo.o bar.o baz.o # The right choice for foo.o foo.o: %.o: %.c echo PASS_foo # The right choice for bar.o %.o: %.c echo PASS_bar # This rule should be dominated by other rules .c.o: echo FAIL # The right choice for baz.o .cc.o: echo PASS_baz testcase/phony.mk0100644 0000000 0000000 00000001027 13654546140 013116 0ustar000000000 0000000 .PHONY: foo echo PASS phony foo .PHONY: bar .PHONY: test4 # if no foo target, but foo is .PHONY, don't warn # "Circular baz <- foo dependency dropped.". baz: foo echo baz test1: foo bar baz echo PASS test1 from foo bar baz test3: touch test4 test4: echo PASS test4 # test5 is similar with test1, but foo2 has command. # foo2 runs once to build test5 even if it appears twice # test5 <- foo2, test5 <- baz2 <- foo2. .PHONY: foo2 foo2: echo foo2 baz2: foo2 echo baz2 test5: foo2 bar baz2 echo PASS test5 from foo bar baz testcase/phony_looks_real.sh0100644 0000000 0000000 00000002523 13654546140 015335 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: foo/bar foo/baz foo/bar: .KATI_IMPLICIT_OUTPUTS := foo/baz foo/bar: @echo "END" .PHONY: test foo/bar EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:4: warning: PHONY target "foo/bar" looks like a real file (contains a "/")' echo 'Makefile:4: warning: PHONY target "foo/baz" looks like a real file (contains a "/")' echo 'END' else ${mk} --warn_phony_looks_real 2>&1 fi if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:4: *** PHONY target "foo/bar" looks like a real file (contains a "/")' else ${mk} --werror_phony_looks_real 2>&1 fi testcase/phony_targets.sh0100644 0000000 0000000 00000001365 13654546140 014657 0ustar000000000 0000000 #!/bin/bash # # Copyright 2015 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e mk="$@" cat < Makefile .PHONY: a b b: echo \$@ a: echo \$@ EOF echo "===" ${mk} echo "---" ${mk} a echo "---" ${mk} b echo "===" testcase/posix_var.mk0100644 0000000 0000000 00000000471 13654546140 013775 0ustar000000000 0000000 # TODO(go): Fix MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') # GNU make 3.82 has this feature though. ifeq ($(MAKEVER),3) test: echo test skipped else $(info $(shell echo foo)) override SHELL := echo $(info $(shell echo bar)) .POSIX: $(info $(shell echo baz)) test: foobar endif testcase/preserve_single_dot.mk0100644 0000000 0000000 00000000113 13654546140 016016 0ustar000000000 0000000 test: a/./b ./x a/./b: echo $@ mkdir -p a # for ninja. ././x: echo $@ testcase/quine.mk0100644 0000000 0000000 00000000172 13654546140 013102 0ustar000000000 0000000 define q $$(info define q) $$(info $$(subst $$$$,$$$$$$$$,$$q)) $$(info endef) $$(info $$$$(eval $$$$q)) endef $(eval $q) testcase/readonly_global.sh0100644 0000000 0000000 00000002131 13654546140 015116 0ustar000000000 0000000 #!/bin/bash # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" function build() { cat < Makefile FOO $1 bar .KATI_READONLY $2 FOO FOO $3 baz all: EOF echo "Testcase: $1 $2 $3" if echo "${mk}" | grep -q "^make"; then # Make doesn't support .KATI_READONLY echo "Makefile:3: *** cannot assign to readonly variable: FOO" else ${mk} 2>&1 && echo "Clean exit" fi } build "=" "=" "=" build "=" "+=" "=" build "=" ":=" "=" build "=" ":=" ":=" build "=" ":=" "+=" build ":=" ":=" ":=" build ":=" ":=" "+=" build ":=" ":=" "=" testcase/readonly_global_missing.sh0100644 0000000 0000000 00000001537 13654546140 016660 0ustar000000000 0000000 #!/bin/bash # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile all: FOO = bar .KATI_READONLY = FOO all: EOF if echo "${mk}" | grep -q "^make"; then # Make doesn't support .KATI_READONLY echo "Makefile:2: *** unknown variable: FOO" else ${mk} 2>&1 && echo "Clean exit" fi testcase/readonly_rule.sh0100644 0000000 0000000 00000002174 13654546140 014634 0ustar000000000 0000000 #!/bin/bash # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" function build() { cat < Makefile all: FOO $1 bar all: .KATI_READONLY $2 FOO FOO $3 foo all: FOO $3 baz all: EOF echo "Testcase: $1 $2 $3" if echo "${mk}" | grep -q "^make"; then # Make doesn't support .KATI_READONLY echo "Makefile:4: *** cannot assign to readonly variable: FOO" else ${mk} 2>&1 && echo "Clean exit" fi } #build "=" "=" "=" #build "=" "+=" "=" #build "=" ":=" "=" # #build "=" ":=" ":=" #build "=" ":=" "+=" # #build ":=" ":=" ":=" build ":=" ":=" "+=" #build ":=" ":=" "=" testcase/readonly_rule_missing.sh0100644 0000000 0000000 00000001537 13654546140 016367 0ustar000000000 0000000 #!/bin/bash # # Copyright 2016 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile FOO = bar all: .KATI_READONLY = FOO all: EOF if echo "${mk}" | grep -q "^make"; then # Make doesn't support .KATI_READONLY echo "Makefile:2: *** unknown variable: FOO" else ${mk} 2>&1 && echo "Clean exit" fi testcase/real_to_phony.sh0100644 0000000 0000000 00000002234 13654546140 014627 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: foo foo: bar @echo "END" bar: @exit 0 .PHONY: bar EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:3: warning: real file "foo" depends on PHONY target "bar"' echo 'END' else ${mk} --warn_real_to_phony 2>&1 fi if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:3: *** real file "foo" depends on PHONY target "bar"' else ${mk} --werror_real_to_phony 2>&1 fi testcase/realpath.mk0100644 0000000 0000000 00000000310 13654546140 013553 0ustar000000000 0000000 foo = $(realpath ./foo) bar = $(realpath ./bar) foofoo = $(realpath ./foo ./foo) foobar = $(realpath ./foo ./bar) test: foo echo $(foo) echo $(bar) echo $(foofoo) echo $(foobar) foo: touch foo testcase/recipe_in_rule.mk0100644 0000000 0000000 00000000036 13654546140 014744 0ustar000000000 0000000 all: ; echo PASS1 echo PASS2 testcase/recipe_var.mk0100644 0000000 0000000 00000000031 13654546140 014072 0ustar000000000 0000000 foo=FAIL $$: echo "$@" testcase/recursive_command_expansion.mk0100644 0000000 0000000 00000000057 13654546140 017554 0ustar000000000 0000000 unexport A A="$${A}" B=$(A) test: echo $(B) testcase/recursive_marker.mk0100644 0000000 0000000 00000000023 13654546140 015324 0ustar000000000 0000000 test: +echo PASS testcase/rule_in_var.mk0100644 0000000 0000000 00000000047 13654546140 014267 0ustar000000000 0000000 RULE=foo: test: foo $(RULE) echo OK testcase/rule_with_extra_ws.mk0100644 0000000 0000000 00000000075 13654546140 015701 0ustar000000000 0000000 test: foo bar foo: echo PASS_foo bar: echo PASS_foo testcase/segfault_stack_overflow.sh0100644 0000000 0000000 00000001765 13654546140 016717 0ustar000000000 0000000 #!/bin/bash # # Copyright 2017 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile a = \$(a) \$(a) \$(a) \$(a) \$(a) EOF if echo "${mk}" | grep -qv "kati"; then # Make detects this differently echo 'Segmentation fault, last evaluated line was Makefile:2' else # runtest.rb strips *kati*: lines, so strip that prefix to test # Grab only *kati* lines, since asan may print a backtrace ${mk} 2>&1 | grep "*kati*" | sed "s/^\*kati\*: //" fi testcase/semi_in_var.mk0100644 0000000 0000000 00000000221 13654546140 014247 0ustar000000000 0000000 ECHO=@echo $@ SEMI=; RULE=bar: ; $(EHCO) all: foo bar baz foo: ; $(ECHO)_1 $(ECHO)_2 $(RULE) $(ECHO)_2 baz: $(SEMI) @echo $@_1 @echo $@_2 testcase/semicolon.mk0100644 0000000 0000000 00000000262 13654546140 013751 0ustar000000000 0000000 # When a line only has semicolons after variables are expanded, they # are silently ignored, for some reason. SEMI:=; $(SEMI) $(SEMI) $(SEMI) $(foreach v,x,;) test: echo PASS testcase/semicolon_in_var.mk0100644 0000000 0000000 00000000202 13654546140 015301 0ustar000000000 0000000 test: foo bar baz bazz A:=foo: ; echo PASS $(A) B:=bar: ; echo PA $(B)\ SS baz: ; echo PA\ SS SEMI=; bazz: $(SEMI) echo PA\ SS testcase/shell.mk0100644 0000000 0000000 00000000644 13654546140 013074 0ustar000000000 0000000 test: echo $(shell pwd) echo $(shell false) echo $(shell /bin/echo -e "\na \n b \n " ) echo $(shell /bin/echo -e "\na \n b \n " )X echo X$(shell /bin/echo -e "\n\n" )Y echo X$(shell /bin/echo -e "a\n\n" )Y echo X$(shell /bin/echo -e "\n\nb" )Y echo X$(shell /bin/echo -e "\n\nb" )Y echo X$(shell /bin/echo -e "\n\n\nb" )Y echo X$(shell /bin/echo -e " b" )Y echo X$(shell /bin/echo -e "b " )Y testcase/shell_arith_in_recipe.mk0100644 0000000 0000000 00000000060 13654546140 016270 0ustar000000000 0000000 test: echo $(filter 0,$(shell echo $$((1-1)))) testcase/shell_stderr.mk0100644 0000000 0000000 00000000104 13654546140 014446 0ustar000000000 0000000 FOO=$(shell echo SHOULD_NOT_BE_AFTER_ECHO 1>&2) test: echo $(FOO) testcase/shell_var.mk0100644 0000000 0000000 00000000213 13654546140 013734 0ustar000000000 0000000 $(info $(SHELL)) override SHELL:=/bin/echo $(info $(shell foo)) echo=/bin/echo override SHELL=$(echo) $(info $(shell bar)) test: baz testcase/shell_var_with_args.mk0100644 0000000 0000000 00000000437 13654546140 016013 0ustar000000000 0000000 # TODO(go): Fix MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') ifeq ($(MAKEVER),4) # GNU make 4 escapes $(SHELL). test: echo test skipped else export FOO=-x override SHELL := PS4="cmd: " /bin/bash $${FOO} $(info $(shell echo foo)) test: @echo baz endif testcase/silent.mk0100644 0000000 0000000 00000000022 13654546140 013251 0ustar000000000 0000000 test: @echo foo testcase/silent_ignore_error.mk0100644 0000000 0000000 00000000030 13654546140 016024 0ustar000000000 0000000 test: -@false @-false testcase/silent_multiline.mk0100644 0000000 0000000 00000000213 13654546140 015335 0ustar000000000 0000000 define cmd echo foo echo bar endef define cmd2 echo baz @$(call cmd) endef test: $(call cmd) @$(call cmd) $(call cmd2) @$(call cmd2) testcase/sort.mk0100644 0000000 0000000 00000002236 13654546140 012753 0ustar000000000 0000000 sp := $(subst S, ,S) test: echo $(sort foo bar lose) echo $(sort foo bar aaaa) echo $(sort foo bar lose lose foo bar bar) echo $(sort baz bar) echo $(sort single) echo $(sort $(sp)foo$(sp)) echo $(sort ) echo $(sort device/sample/products/AndroidProducts.mk device/moto/shamu/AndroidProducts.mk device/asus/fugu/AndroidProducts.mk device/asus/deb/AndroidProducts.mk device/asus/flo/AndroidProducts.mk device/generic/arm64/AndroidProducts.mk device/generic/qemu/AndroidProducts.mk device/generic/mini-emulator-x86_64/AndroidProducts.mk device/generic/x86/AndroidProducts.mk device/generic/mips/AndroidProducts.mk device/generic/mini-emulator-x86/AndroidProducts.mk device/generic/mini-emulator-mips/AndroidProducts.mk device/generic/mini-emulator-arm64/AndroidProducts.mk device/generic/mini-emulator-armv7-a-neon/AndroidProducts.mk device/generic/x86_64/AndroidProducts.mk device/generic/armv7-a-neon/AndroidProducts.mk device/htc/flounder/AndroidProducts.mk device/lge/bullhead/AndroidProducts.mk device/lge/hammerhead/AndroidProducts.mk device/huawei/angler/AndroidProducts.mk) echo $(sort cpplint-art-phony libart libartd libgabi++ libopenjdkjvm libopenjdkjvmd libart) testcase/static_pattern.mk0100644 0000000 0000000 00000000332 13654546140 015003 0ustar000000000 0000000 srcs := a.cc b.cc c.cc srcs := $(addprefix ./,$(srcs)) objs := $(patsubst ./%.cc,./%.o,$(srcs)) test: out out: $(objs) $(objs): ./%.o: ./%.cc echo $@: $<: $^ a.o: a.cc a.h b.o: b.cc a.h b.h c.o: b.cc a.h b.h c.h testcase/stem.mk0100644 0000000 0000000 00000000216 13654546140 012730 0ustar000000000 0000000 # TODO(go): Fix test: PASS_FAIL PASS2_FAIL2 FAIL3.FAIL4 %_FAIL: echo $* PASS2_FAIL2: %_FAIL2: echo $* FAIL3.FAIL4: echo $(or $*,PASS3) testcase/strip.mk0100644 0000000 0000000 00000000403 13654546140 013117 0ustar000000000 0000000 XY:=x y X:=$(subst y, ,$(XY)) Y:=$(subst x, ,$(XY)) define func foo bar endef test: echo $(X) echo $(Y) echo $(strip $(X)) echo $(strip $(Y)) echo $(strip $(Y),$(X)) echo $(strip $(XY)) $(info $(strip $(call func))) test2: echo $(strip $(X),$(Y)) testcase/strip_and_shell.mk0100644 0000000 0000000 00000000152 13654546140 015131 0ustar000000000 0000000 # TODO(c-ninja): $(shell) in another make expression is not supported. test: echo $(strip $(shell pwd)) testcase/submake/0040755 0000000 0000000 00000000000 13654546140 013062 5ustar000000000 0000000 testcase/submake/basic.mk0100644 0000000 0000000 00000000020 13654546140 014461 0ustar000000000 0000000 all: echo PASS testcase/submake_basic.mk0100644 0000000 0000000 00000000074 13654546140 014552 0ustar000000000 0000000 # TODO(go|c-ninja): Fix test: $(MAKE) -f submake/basic.mk testcase/subshell_in_recipe.mk0100644 0000000 0000000 00000000031 13654546140 015611 0ustar000000000 0000000 test: true (echo PASS) testcase/subst.mk0100644 0000000 0000000 00000000352 13654546140 013121 0ustar000000000 0000000 # http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions comma:= , empty:= space:= $(empty) $(empty) foo:= a b c bar:= $(subst $(space),$(comma),$(foo)) # bar is now `a,b,c' test: echo $(bar) echo $(subst ,repl,str) testcase/subst2.mk0100644 0000000 0000000 00000000402 13654546140 013177 0ustar000000000 0000000 # http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions ,:= , empty:= space:= $(empty) $(empty) foo:= a b c bar:= $(subst $(space),$,,$(foo)) # bar is now `,abc' # space in `,$(foo)' replaced with `$', which will be empty test: echo $(bar) testcase/suffix.mk0100644 0000000 0000000 00000000064 13654546140 013265 0ustar000000000 0000000 test: echo $(suffix src/foo.c src-1.0/bar.c hacks) testcase/suffix_rule.mk0100644 0000000 0000000 00000000205 13654546140 014311 0ustar000000000 0000000 # Preparation: create foo.c test1: touch foo.c # foo.o should match the suffix rule below. test2: foo.o .c.o: echo PASS $@ $< $^ testcase/suffix_rule_warn.sh0100644 0000000 0000000 00000002221 13654546140 015343 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: @echo "PASS" .c.o: cp $< $@ EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:3: warning: suffix rules are deprecated: .c.o' echo 'PASS' else ${mk} --no_builtin_rules --warn_suffix_rules 2>&1 fi if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:3: *** suffix rules are obsolete: .c.o' else ${mk} --no_builtin_rules --werror_suffix_rules 2>&1 fi testcase/suffix_subst.mk0100644 0000000 0000000 00000000056 13654546140 014506 0ustar000000000 0000000 foo:= hoge.c mgoe.c test: echo $(foo:.c=.o) testcase/suffix_subst_pat.mk0100644 0000000 0000000 00000000060 13654546140 015345 0ustar000000000 0000000 foo:= hoge.c mgoe.c test: echo $(foo:%.c=%.o) testcase/tab_comment.mk0100644 0000000 0000000 00000000072 13654546140 014250 0ustar000000000 0000000 # GNU make is OK with them. # # test: echo PASS testcase/tab_only_line.mk0100644 0000000 0000000 00000000026 13654546140 014575 0ustar000000000 0000000 test: echo PASS testcase/target_specific_var.mk0100644 0000000 0000000 00000000356 13654546140 015770 0ustar000000000 0000000 # https://www.gnu.org/software/make/manual/html_node/Target_002dspecific.html CFLAGS = -O test: prog prog: CFLAGS = -g prog : prog.o echo prog $(CFLAGS) prog.o : prog.c echo cc $(CFLAGS) -o prog.o -c prog.c prog.c: touch prog.c testcase/target_specific_var_append.mk0100644 0000000 0000000 00000000762 13654546140 017320 0ustar000000000 0000000 all: a b c d e f g h a: A:=PASS_A a: A+=A a: echo A=$(A) # Note: for some reason, make does not insert a whitespace before OK. B:=FAIL_B b: B+=OK b: echo B=$(B) B:= C:=PASS_C c: C?=FAIL_CC c: echo C=$(C) d: D?=PASS_D d: echo D=$(D) PASS_E:=PASS e: E:= e: E+=$(PASS_E) e: echo E=$(E) PASS_E:=FAIL PASS_F:=FAIL f: F= f: F+=$(PASS_F) f: echo F=$(F) PASS_F:=PASS PASS_G:=FAIL G:=X g: G+=$(PASS_G) g: echo G=$(G) PASS_G:=PASS PASS_H:=FAIL H=X h: H+=$(PASS_H) h: echo H=$(H) PASS_H:=PASS testcase/target_specific_var_in_var.mk0100644 0000000 0000000 00000000160 13654546140 017317 0ustar000000000 0000000 A:= x:a:=foo; B:=foo BAR:=bar BAZ:=baz $(A) echo $(BAR) ; echo $(BAZ) BAR:=FAIL_bar BAZ:=FAIL_baz x: echo $(a) testcase/target_specific_var_ref.mk0100644 0000000 0000000 00000000147 13654546140 016622 0ustar000000000 0000000 test: foo bar X:=FAIL foo: X:=PASS foo: A:=$(X) foo: echo $(A) Y:=PASS bar: B:=$(Y) bar: echo $(B) testcase/target_specific_var_simple.mk0100644 0000000 0000000 00000000037 13654546140 017335 0ustar000000000 0000000 test: X:=PASS test: echo $(X) testcase/target_specific_var_timing.mk0100644 0000000 0000000 00000000211 13654546140 017325 0ustar000000000 0000000 PASS:=PASS FAIL:=FAIL PASS2:=PASS test: foo foo: X := $(PASS) foo: Y=$(FAIL) foo: Z=$(PASS2) foo: echo $(X) $(Y) $(Z) PASS:= FAIL:= testcase/target_specific_var_var_name.mk0100644 0000000 0000000 00000000062 13654546140 017632 0ustar000000000 0000000 FOO:=BAR test: $$(FOO) := FAIL test: echo $(BAR) testcase/target_specific_var_with_pattern.mk0100644 0000000 0000000 00000000252 13654546140 020553 0ustar000000000 0000000 # TODO(go): Fix test: foo.x bar.z Z:=FAIL foo.x: X:=PASS %.x: X+=FAIL %.x: Y:=PASS %.x: Z:=PASS %.x: echo X=$(X) Y=$(Y) Z=$(Z) X:=FAIL %.z: X:=PASS %.z: echo $(X) testcase/target_specific_var_with_semi.mk0100644 0000000 0000000 00000000047 13654546140 020035 0ustar000000000 0000000 test: X = foo ; bar test: echo '$(X)' testcase/terms_in_parens.mk0100644 0000000 0000000 00000000140 13654546140 015144 0ustar000000000 0000000 # TODO(go): Fix define func $1 endef $(info $(call func,(PA,SS))) $(info ${call func,{PA,SS}}) testcase/tools/0040755 0000000 0000000 00000000000 13654546140 012573 5ustar000000000 0000000 testcase/tools/findleaves.py0100755 0000000 0000000 00000006355 13654546140 015276 0ustar000000000 0000000 #!/usr/bin/env python # # Copyright (C) 2009 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # Finds files with the specified name under a particular directory, stopping # the search in a given subdirectory when the file is found. # import os import sys def perform_find(mindepth, prune, dirlist, filenames): result = [] pruneleaves = set(map(lambda x: os.path.split(x)[1], prune)) for rootdir in dirlist: rootdepth = rootdir.count("/") for root, dirs, files in os.walk(rootdir, followlinks=True): # prune check_prune = False for d in dirs: if d in pruneleaves: check_prune = True break if check_prune: i = 0 while i < len(dirs): if dirs[i] in prune: del dirs[i] else: i += 1 # mindepth if mindepth > 0: depth = 1 + root.count("/") - rootdepth if depth < mindepth: continue # match for filename in filenames: if filename in files: result.append(os.path.join(root, filename)) del dirs[:] return result def usage(): sys.stderr.write("""Usage: %(progName)s [] [--dir=] Options: --mindepth= Both behave in the same way as their find(1) equivalents. --prune= Avoids returning results from inside any directory called (e.g., "*/out/*"). May be used multiple times. --dir= Add a directory to search. May be repeated multiple times. For backwards compatibility, if no --dir argument is provided then all but the last entry in are treated as directories. """ % { "progName": os.path.split(sys.argv[0])[1], }) sys.exit(1) def main(argv): mindepth = -1 prune = [] dirlist = [] i=1 while i2 and argv[i][0:2] == "--": arg = argv[i] if arg.startswith("--mindepth="): try: mindepth = int(arg[len("--mindepth="):]) except ValueError: usage() elif arg.startswith("--prune="): p = arg[len("--prune="):] if len(p) == 0: usage() prune.append(p) elif arg.startswith("--dir="): d = arg[len("--dir="):] if len(p) == 0: usage() dirlist.append(d) else: usage() i += 1 if len(dirlist) == 0: # backwards compatibility if len(argv)-i < 2: # need both and usage() dirlist = argv[i:-1] filenames = [argv[-1]] else: if len(argv)-i < 1: # need usage() filenames = argv[i:] results = list(set(perform_find(mindepth, prune, dirlist, filenames))) results.sort() for r in results: print r if __name__ == "__main__": main(sys.argv) testcase/top_level_phony.sh0100644 0000000 0000000 00000002316 13654546140 015174 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile .PHONY: test test: out/foo out/foo: bar @echo "END" EOF touch bar if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:4: warning: real file "out/foo" depends on PHONY target "bar"' echo 'END' else ${mk} --warn_real_to_phony --top_level_phony 2>&1 fi if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:4: *** real file "out/foo" depends on PHONY target "bar"' else ${mk} --werror_real_to_phony --top_level_phony 2>&1 fi testcase/trim_leading_curdir.mk0100644 0000000 0000000 00000000112 13654546140 015761 0ustar000000000 0000000 all: foo.bar ./foo.bar: ./%.bar: ./%.baz cp $< $@ ./foo.baz: touch $@ testcase/unmatched_paren.mk0100644 0000000 0000000 00000000105 13654546140 015112 0ustar000000000 0000000 PAREN:=( $(PAREN):=PASS X:=O( Y:=$(X:(=K) test: echo $Y echo $(() testcase/unmatched_paren2.mk0100644 0000000 0000000 00000000404 13654546140 015176 0ustar000000000 0000000 foo = FOO bar = BAR dp := $$( $(dp)foo := PASS_UNMATCHED FOO1BAR := PASS_MATCHED baz = 0$($(foo)1$(bar)2 # baz will be 0PASS_UNMATCHED, 1$(bar)2 will be discarded?? baz2 = 0$($(foo)1$(bar))2 # baz2 will be 0PASS_MATCHED2. test: echo "$(baz)" echo "$(baz2)" testcase/value.mk0100644 0000000 0000000 00000000555 13654546140 013102 0ustar000000000 0000000 FOO = $PATH FOO_SPACE_BAR:=foo bar FOO_COMMA_BAR:=foo,bar $(FOO_SPACE_BAR):=$PATH $(FOO_COMMA_BAR):=$PATH FOOREF := FOO X=$(X) test: echo $(FOO) echo $(value FOO) echo $(value FOO BAR) echo $(value FOO,BAR) echo $(value $(FOOREF)) # TODO(c): Fix. ifneq ($(KATI_VARIANT),c) echo $(value @) echo '$(value @D)' echo '$(value @F)' endif $(info $(value X)) testcase/var_append.mk0100644 0000000 0000000 00000000476 13654546140 014107 0ustar000000000 0000000 S:=simple R=recursive SE:= RE= foo=FOO bar= S+=$(foo) $(bar) R+=$(foo) $(bar) SE+=$(foo) $(bar) RE+=$(foo) $(bar) U+=$(foo) $(bar) bar=BAR test: echo "$(S)" echo "$(R)" echo "$(SE)" echo "$(RE)" echo "$(U)" echo "$(flavor S)" echo "$(flavor R)" echo "$(flavor SE)" echo "$(flavor RE)" echo "$(flavor U)" testcase/var_cond_assign.mk0100644 0000000 0000000 00000000071 13654546140 015116 0ustar000000000 0000000 foo=FOO C ?= $(foo) $(bar) test: echo "$(C)" bar=BAR testcase/var_eval.mk0100644 0000000 0000000 00000000360 13654546140 013557 0ustar000000000 0000000 var1 = $($(bar)) var2 = $$(bar) var3 := $($(bar)) var4 := $$(bar) D=$$ O=( C=) # expects # foo # $(bar) # # $(bar) # $(bar) test: echo '$(var1)' echo '$(var2)' echo '$(var3)' echo '$(var4)' echo '$D$Obar$C' bar = foo foo = foo testcase/var_target.mk0100644 0000000 0000000 00000000047 13654546140 014120 0ustar000000000 0000000 FOO=BAR $(FOO)=BAZ test: echo $(BAR) testcase/var_with_space.mk0100644 0000000 0000000 00000000443 13654546140 014760 0ustar000000000 0000000 MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') ifeq ($(MAKEVER),4) # A variable name with space is invalid on GNU make 4. all: echo PASS else varname_with_ws:=hello, world! $(varname_with_ws):=PASS foo bar = PASS2 all: echo $(hello, world!) echo $(foo bar) endif testcase/vardef_in_call.mk0100644 0000000 0000000 00000000101 13654546140 014701 0ustar000000000 0000000 vardef=$(eval $(1):=$(2)) $(call vardef,x,PASS) test: echo $(x) testcase/vpath.mk0100644 0000000 0000000 00000000214 13654546140 013100 0ustar000000000 0000000 # TODO(c): bar is built even if foo doesn't exist. VPATH=dir test: bar test1: mkdir dir touch dir/foo test2: bar bar: foo echo PASS testcase/vpath_directive.mk0100644 0000000 0000000 00000000175 13654546140 015144 0ustar000000000 0000000 # TODO(c): Implement vpath. vpath %.c dir test: bar test1: mkdir dir touch dir/foo.c test2: bar bar: foo.c echo PASS testcase/warn_extra_trailings.mk0100644 0000000 0000000 00000000114 13654546140 016203 0ustar000000000 0000000 # TODO(c): should fail with "*** No targets." ifdef foo else foo endif foo testcase/warn_output_pattern_mismatch.mk0100644 0000000 0000000 00000000076 13654546140 017775 0ustar000000000 0000000 # TODO(go): Fix test: foo foo: x%x: echo PASS $(info foo) testcase/warning.mk0100644 0000000 0000000 00000000140 13654546140 013421 0ustar000000000 0000000 $(warning foo) define baz b a z endef test: $(warning bar'""') $(warning $(baz)) echo PASS testcase/warning_in_eval.mk0100644 0000000 0000000 00000000127 13654546140 015123 0ustar000000000 0000000 warn=$(warning foo) $(eval $(warn)) $(eval $$(warn)) $(warning bar) test: echo done testcase/werror_find_emulator.sh0100644 0000000 0000000 00000002277 13654546140 016224 0ustar000000000 0000000 #!/bin/bash # # Copyright 2017 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile FOO := \$(shell find does/not/exist -name '*.txt') all: EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't use find emulator, or support --werror_find_emulator, so write # expected output. echo 'find: "does/not/exist": No such file or directory' echo 'Nothing to be done for "all".' echo 'Clean exit' else ${mk} --use_find_emulator 2>&1 && echo "Clean exit" fi if echo "${mk}" | grep -qv "kati"; then echo 'find: "does/not/exist": No such file or directory' else ${mk} --use_find_emulator --werror_find_emulator 2>&1 && echo "Clean exit" fi testcase/werror_overriding_commands.sh0100644 0000000 0000000 00000002356 13654546140 017423 0ustar000000000 0000000 #!/bin/bash # # Copyright 2017 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: foo foo: @echo "FAIL" foo: @echo "PASS" EOF if echo "${mk}" | grep -qv "kati"; then # Make doesn't use find emulator, or support --werror_find_emulator, so write # expected output. echo 'Makefile:5: warning: overriding commands for target "foo"' echo 'Makefile:3: warning: ignoring old commands for target "foo"' echo 'PASS' echo 'Clean exit' else ${mk} 2>&1 && echo "Clean exit" fi if echo "${mk}" | grep -qv "kati"; then echo 'Makefile:5: *** overriding commands for target "foo", previously defined at Makefile:3' else ${mk} --werror_overriding_commands 2>&1 && echo "Clean exit" fi testcase/whitespace_in_cmd.mk0100644 0000000 0000000 00000000043 13654546140 015423 0ustar000000000 0000000 test: echo foo echo bar testcase/wildcard.mk0100644 0000000 0000000 00000000640 13654546140 013552 0ustar000000000 0000000 # TODO(go): Fix MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') files = $(wildcard M*) $(shell mkdir -p tmp) files += $(wildcard tmp/../M*) files += $(wildcard not_exist/../M*) files += $(wildcard tmp/../M* not_exist/../M* tmp/../M*) # GNU make 4 does not sort the result of $(wildcard) ifeq ($(MAKEVER),3) files += $(wildcard [ABC] C B A) endif test1: touch A C B test2: echo $(files) testcase/wildcard_cache.mk0100644 0000000 0000000 00000001240 13654546140 014672 0ustar000000000 0000000 # TODO(c): Fix this. Maybe $(wildcard) always runs at eval-phase. # GNU make 4 agrees with ckati. MAKEVER:=$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]') ifeq ($(MAKE)$(MAKEVER),make4) $(error test skipped) endif files = $(wildcard *,*) # if make starts without foo,bar, it will be empty, although expect foo,bar. test: foo,bar echo $(files) echo $(wildcard foo*) # first $(files) will be empty since no foo,bar exists. # second $(files) expects foo, but empty. foo,bar: echo $(files) touch foo,bar echo $(files) $(shell mkdir dir) $(info $(wildcard dir/not_exist)) $(shell touch dir/file) # This should show nothing. $(info $(wildcard dir/file)) testcase/wildcard_multi.mk0100644 0000000 0000000 00000000105 13654546140 014760 0ustar000000000 0000000 files = $(wildcard P* M*) test1: touch PASS test2: echo $(files) testcase/wildcard_target.mk0100644 0000000 0000000 00000000133 13654546140 015115 0ustar000000000 0000000 # TODO(c): Implement wildcard expansion in prerequisites. test1: touch foo.x test2: *.x testcase/wildcard_with_commas.mk0100644 0000000 0000000 00000000377 13654546140 016153 0ustar000000000 0000000 files = $(wildcard *,*) # test expectes empty, since no *,* found. test: echo $(files) touch foo,bar # when foo,bar doesn't exit, "make test2" report empty. # next "make test2" reports "foo,bar". test2: foo,bar echo $(files) foo,bar: touch foo,bar testcase/wildcard_with_var.mk0100644 0000000 0000000 00000000232 13654546140 015452 0ustar000000000 0000000 prefix = M pattern = ${prefix}* files = $(wildcard $(pattern)) # expect Makefile, since runtest.rb put this as Makefile in new dir. test: echo $(files) testcase/word.mk0100644 0000000 0000000 00000000252 13654546140 012733 0ustar000000000 0000000 test: echo $(word 2, foo bar baz) echo $(word 2, ) echo $(word 4, foo bar baz) echo $(word 1, foo,bar baz) echo $(word 2, foo,bar baz) echo $(word 2, foo, bar baz) testcase/wordlist.mk0100644 0000000 0000000 00000000272 13654546140 013631 0ustar000000000 0000000 test: echo $(wordlist 2, 3, foo bar baz) echo $(wordlist 2, 4, foo bar baz) echo $(wordlist 4, 7, foo bar baz) echo $(wordlist 3, 2, foo bar baz) echo $(wordlist 3, 0, foo bar baz) testcase/words.mk0100644 0000000 0000000 00000000061 13654546140 013114 0ustar000000000 0000000 test: echo $(words foo bar baz) echo $(words ) testcase/writable.sh0100644 0000000 0000000 00000002460 13654546140 013577 0ustar000000000 0000000 #!/bin/bash # # Copyright 2018 Google Inc. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -u mk="$@" cat < Makefile test: out/foo.o test2: out/foo.o: foo.c foo.h test2 @echo "END" foo.c: @exit 0 foo.h: foo.c .PHONY: test test2 EOF # TODO: test implicit outputs if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:6: warning: writing to readonly directory: "foo.c"' echo 'Makefile:7: warning: writing to readonly directory: "foo.h"' echo 'END' else ${mk} --writable=out/ 2>&1 fi if echo "${mk}" | grep -qv "kati"; then # Make doesn't support these warnings, so write the expected output. echo 'Makefile:6: *** writing to readonly directory: "foo.c"' else ${mk} --writable=out/ --werror_writable 2>&1 fi testutil.h0100644 0000000 0000000 00000002703 13654546140 011645 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include #include "string_piece.h" bool g_failed; #define ASSERT_EQ(a, b) \ do { \ if ((a) != (b)) { \ fprintf(stderr, \ "Assertion failure at %s:%d: %s (which is \"%.*s\") vs %s\n", \ __FILE__, __LINE__, #a, SPF(GetStringPiece(a)), #b); \ g_failed = true; \ } \ } while (0) StringPiece GetStringPiece(StringPiece s) { return s; } StringPiece GetStringPiece(size_t v) { static char buf[64]; sprintf(buf, "%zd", v); return buf; } thread_local.h0100644 0000000 0000000 00000005574 13654546140 012422 0ustar000000000 0000000 // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // A simple cross platform thread local storage implementation. // // This is a drop-in replacement of __thread keyword. If your compiler // toolchain supports __thread keyword, the user of this code should // be as fast as the code which uses __thread. Chrome's // base::ThreadLocalPointer and base::ThreadLocalStorage cannot be as // fast as __thread. // TODO(crbug.com/249345): If pthread_getspecific is slow for our use, // expose bionic's internal TLS and stop using pthread_getspecific // based implementation. // // Usage: // // Before (linux): // // __thread Foo* foo; // foo = new Foo(); // foo->func(); // // // After: // // DEFINE_THREAD_LOCAL(Foo*, foo); // foo.Ref() = new Foo(); // foo.Ref()->func(); // // Thread local PODs are zero-initialized. // Thread local non-PODs are initialized with the default constructor. #ifndef THREAD_LOCAL_H_ #define THREAD_LOCAL_H_ #include #include #include "log.h" #ifdef __linux__ #define DEFINE_THREAD_LOCAL(Type, name) thread_local Type name #define TLS_REF(x) x #else // Thread local storage implementation which uses pthread. // Note that DEFINE_THREAD_LOCAL creates a global variable just like // thread local storage based on __thread keyword. So we should not use // constructor in ThreadLocal class to avoid static initializator. template void ThreadLocalDestructor(void* ptr) { delete reinterpret_cast(ptr); } template void ThreadLocalInit() { if (pthread_key_create(key, ThreadLocalDestructor)) ERROR("Failed to create a pthread key for TLS errno=%d", errno); } template class ThreadLocal { public: Type& Ref() { return *GetPointer(); } Type Get() { return Ref(); } void Set(const Type& value) { Ref() = value; } Type* GetPointer() { pthread_once(once, ThreadLocalInit); Type* value = reinterpret_cast(pthread_getspecific(*key)); if (value) return value; // new Type() for PODs means zero initialization. value = new Type(); int error = pthread_setspecific(*key, value); if (error != 0) ERROR("Failed to set a TLS: error=%d", error); return value; } }; // We need a namespace for name##_key and name##_once since template parameters // do not accept unnamed values such as static global variables. #define DEFINE_THREAD_LOCAL(Type, name) \ namespace { \ pthread_once_t name##_once = PTHREAD_ONCE_INIT; \ pthread_key_t name##_key; \ } \ ThreadLocal name; #define TLS_REF(x) x.Ref() #endif #endif // THREAD_LOCAL_H_ thread_pool.cc0100644 0000000 0000000 00000004045 13654546140 012427 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "thread_pool.h" #include #include #include #include #include #include "affinity.h" class ThreadPoolImpl : public ThreadPool { public: explicit ThreadPoolImpl(int num_threads) : is_waiting_(false) { SetAffinityForMultiThread(); threads_.reserve(num_threads); for (int i = 0; i < num_threads; i++) { threads_.push_back(thread([this]() { Loop(); })); } } virtual ~ThreadPoolImpl() override {} virtual void Submit(function task) override { unique_lock lock(mu_); tasks_.push(task); cond_.notify_one(); } virtual void Wait() override { { unique_lock lock(mu_); is_waiting_ = true; cond_.notify_all(); } for (thread& th : threads_) { th.join(); } SetAffinityForSingleThread(); } private: void Loop() { while (true) { function task; { unique_lock lock(mu_); if (tasks_.empty()) { if (is_waiting_) return; cond_.wait(lock); } if (tasks_.empty()) continue; task = tasks_.top(); tasks_.pop(); } task(); } } vector threads_; mutex mu_; condition_variable cond_; stack> tasks_; bool is_waiting_; }; ThreadPool* NewThreadPool(int num_threads) { return new ThreadPoolImpl(num_threads); } thread_pool.h0100644 0000000 0000000 00000001701 13654546140 012265 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef THREAD_POOL_H_ #define THREAD_POOL_H_ #include using namespace std; class ThreadPool { public: virtual ~ThreadPool() = default; virtual void Submit(function task) = 0; virtual void Wait() = 0; protected: ThreadPool() = default; }; ThreadPool* NewThreadPool(int num_threads); #endif // THREAD_POOL_H_ timeutil.cc0100644 0000000 0000000 00000002327 13654546140 011764 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "timeutil.h" #include #include #include "log.h" double GetTime() { #if defined(__linux__) struct timespec ts; clock_gettime(CLOCK_REALTIME, &ts); return ts.tv_sec + ts.tv_nsec * 0.001 * 0.001 * 0.001; #else struct timeval tv; if (gettimeofday(&tv, NULL) < 0) PERROR("gettimeofday"); return tv.tv_sec + tv.tv_usec * 0.001 * 0.001; #endif } ScopedTimeReporter::ScopedTimeReporter(const char* name) : name_(name), start_(GetTime()) {} ScopedTimeReporter::~ScopedTimeReporter() { double elapsed = GetTime() - start_; LOG_STAT("%s: %f", name_, elapsed); } timeutil.h0100644 0000000 0000000 00000001525 13654546140 011625 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef TIMEUTIL_H_ #define TIMEUTIL_H_ double GetTime(); struct ScopedTimeReporter { public: explicit ScopedTimeReporter(const char* name); ~ScopedTimeReporter(); private: const char* name_; double start_; }; #endif // TIME_H_ var.cc0100644 0000000 0000000 00000012640 13654546140 010717 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore #include "var.h" #include "eval.h" #include "expr.h" #include "log.h" unordered_map Var::diagnostic_messages_; const char* GetOriginStr(VarOrigin origin) { switch (origin) { case VarOrigin::UNDEFINED: return "undefined"; case VarOrigin::DEFAULT: return "default"; case VarOrigin::ENVIRONMENT: return "environment"; case VarOrigin::ENVIRONMENT_OVERRIDE: return "environment override"; case VarOrigin::FILE: return "file"; case VarOrigin::COMMAND_LINE: return "command line"; case VarOrigin::OVERRIDE: return "override"; case VarOrigin::AUTOMATIC: return "automatic"; } CHECK(false); return "*** broken origin ***"; } Var::Var() : Var(VarOrigin::UNDEFINED) {} Var::Var(VarOrigin origin) : origin_(origin), readonly_(false), deprecated_(false), obsolete_(false) {} Var::~Var() { diagnostic_messages_.erase(this); } void Var::AppendVar(Evaluator*, Value*) { CHECK(false); } void Var::SetDeprecated(const StringPiece& msg) { deprecated_ = true; diagnostic_messages_[this] = msg.as_string(); } void Var::SetObsolete(const StringPiece& msg) { obsolete_ = true; diagnostic_messages_[this] = msg.as_string(); } void Var::Used(Evaluator* ev, const Symbol& sym) const { if (obsolete_) { ev->Error(StringPrintf("*** %s is obsolete%s.", sym.c_str(), diagnostic_message_text())); } else if (deprecated_) { WARN_LOC(ev->loc(), "%s has been deprecated%s.", sym.c_str(), diagnostic_message_text()); } } const char* Var::diagnostic_message_text() const { auto it = diagnostic_messages_.find(this); return it == diagnostic_messages_.end() ? "" : it->second.c_str(); } const string& Var::DeprecatedMessage() const { static const string empty_string; auto it = diagnostic_messages_.find(this); return it == diagnostic_messages_.end() ? empty_string : it->second; } Var* Var::Undefined() { static Var* undefined_var; if (!undefined_var) { undefined_var = new UndefinedVar(); } return undefined_var; } SimpleVar::SimpleVar(VarOrigin origin) : Var(origin) {} SimpleVar::SimpleVar(const string& v, VarOrigin origin) : Var(origin), v_(v) {} SimpleVar::SimpleVar(VarOrigin origin, Evaluator* ev, Value* v) : Var(origin) { v->Eval(ev, &v_); } void SimpleVar::Eval(Evaluator* ev, string* s) const { ev->CheckStack(); *s += v_; } void SimpleVar::AppendVar(Evaluator* ev, Value* v) { string buf; v->Eval(ev, &buf); v_.push_back(' '); v_ += buf; } StringPiece SimpleVar::String() const { return v_; } string SimpleVar::DebugString() const { return v_; } RecursiveVar::RecursiveVar(Value* v, VarOrigin origin, StringPiece orig) : Var(origin), v_(v), orig_(orig) {} void RecursiveVar::Eval(Evaluator* ev, string* s) const { ev->CheckStack(); v_->Eval(ev, s); } void RecursiveVar::AppendVar(Evaluator* ev, Value* v) { ev->CheckStack(); v_ = Value::NewExpr(v_, Value::NewLiteral(" "), v); } StringPiece RecursiveVar::String() const { return orig_; } string RecursiveVar::DebugString() const { return Value::DebugString(v_); } UndefinedVar::UndefinedVar() {} void UndefinedVar::Eval(Evaluator*, string*) const { // Nothing to do. } StringPiece UndefinedVar::String() const { return StringPiece(""); } string UndefinedVar::DebugString() const { return "*undefined*"; } Vars::~Vars() { for (auto p : *this) { delete p.second; } } void Vars::add_used_env_vars(Symbol v) { used_env_vars_.insert(v); } Var* Vars::Lookup(Symbol name) const { auto found = find(name); if (found == end()) return Var::Undefined(); Var* v = found->second; if (v->Origin() == VarOrigin::ENVIRONMENT || v->Origin() == VarOrigin::ENVIRONMENT_OVERRIDE) { used_env_vars_.insert(name); } return v; } Var* Vars::Peek(Symbol name) const { auto found = find(name); return found == end() ? Var::Undefined() : found->second; } void Vars::Assign(Symbol name, Var* v, bool* readonly) { *readonly = false; auto p = emplace(name, v); if (!p.second) { Var* orig = p.first->second; if (orig->ReadOnly()) { *readonly = true; return; } if (orig->Origin() == VarOrigin::OVERRIDE || orig->Origin() == VarOrigin::ENVIRONMENT_OVERRIDE) { return; } if (orig->Origin() == VarOrigin::AUTOMATIC) { ERROR("overriding automatic variable is not implemented yet"); } if (orig->IsDefined()) delete p.first->second; p.first->second = v; } } SymbolSet Vars::used_env_vars_; ScopedVar::ScopedVar(Vars* vars, Symbol name, Var* var) : vars_(vars), orig_(NULL) { auto p = vars->emplace(name, var); iter_ = p.first; if (!p.second) { orig_ = iter_->second; iter_->second = var; } } ScopedVar::~ScopedVar() { if (orig_) { iter_->second = orig_; } else { vars_->erase(iter_); } } var.go0100644 0000000 0000000 00000020612 13654546140 010735 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "bytes" "fmt" "io" "strings" ) // Var is an interface of make variable. type Var interface { Value Append(*Evaluator, string) (Var, error) AppendVar(*Evaluator, Value) (Var, error) Flavor() string Origin() string IsDefined() bool } type targetSpecificVar struct { v Var op string } func (v *targetSpecificVar) Append(ev *Evaluator, s string) (Var, error) { nv, err := v.v.Append(ev, s) if err != nil { return nil, err } return &targetSpecificVar{ v: nv, op: v.op, }, nil } func (v *targetSpecificVar) AppendVar(ev *Evaluator, v2 Value) (Var, error) { nv, err := v.v.AppendVar(ev, v2) if err != nil { return nil, err } return &targetSpecificVar{ v: nv, op: v.op, }, nil } func (v *targetSpecificVar) Flavor() string { return v.v.Flavor() } func (v *targetSpecificVar) Origin() string { return v.v.Origin() } func (v *targetSpecificVar) IsDefined() bool { return v.v.IsDefined() } func (v *targetSpecificVar) String() string { // TODO: If we add the info of |op| a test starts // failing. Shouldn't we use this only for debugging? return v.v.String() // return v.v.String() + " (op=" + v.op + ")" } func (v *targetSpecificVar) Eval(w evalWriter, ev *Evaluator) error { return v.v.Eval(w, ev) } func (v *targetSpecificVar) serialize() serializableVar { return serializableVar{ Type: v.op, Children: []serializableVar{v.v.serialize()}, } } func (v *targetSpecificVar) dump(d *dumpbuf) { d.Byte(valueTypeTSV) d.Str(v.op) v.v.dump(d) } type simpleVar struct { // space separated. note that each string may contain spaces, so // it is not word list. value []string origin string } func (v *simpleVar) Flavor() string { return "simple" } func (v *simpleVar) Origin() string { return v.origin } func (v *simpleVar) IsDefined() bool { return true } func (v *simpleVar) String() string { return strings.Join(v.value, " ") } func (v *simpleVar) Eval(w evalWriter, ev *Evaluator) error { space := false for _, v := range v.value { if space { writeByte(w, ' ') } io.WriteString(w, v) space = true } return nil } func (v *simpleVar) serialize() serializableVar { return serializableVar{ Type: "simple", V: v.String(), Origin: v.origin, } } func (v *simpleVar) dump(d *dumpbuf) { d.Byte(valueTypeSimple) d.Int(len(v.value)) for _, v := range v.value { d.Str(v) } d.Str(v.origin) } func (v *simpleVar) Append(ev *Evaluator, s string) (Var, error) { val, _, err := parseExpr([]byte(s), nil, parseOp{}) if err != nil { return nil, err } abuf := newEbuf() err = val.Eval(abuf, ev) if err != nil { return nil, err } v.value = append(v.value, abuf.String()) abuf.release() return v, nil } func (v *simpleVar) AppendVar(ev *Evaluator, val Value) (Var, error) { abuf := newEbuf() err := val.Eval(abuf, ev) if err != nil { return nil, err } v.value = append(v.value, abuf.String()) abuf.release() return v, nil } type automaticVar struct { value []byte } func (v *automaticVar) Flavor() string { return "simple" } func (v *automaticVar) Origin() string { return "automatic" } func (v *automaticVar) IsDefined() bool { return true } func (v *automaticVar) String() string { return string(v.value) } func (v *automaticVar) Eval(w evalWriter, ev *Evaluator) error { w.Write(v.value) return nil } func (v *automaticVar) serialize() serializableVar { return serializableVar{Type: ""} } func (v *automaticVar) dump(d *dumpbuf) { d.err = fmt.Errorf("cannnot dump automatic var:%s", v.value) } func (v *automaticVar) Append(ev *Evaluator, s string) (Var, error) { val, _, err := parseExpr([]byte(s), nil, parseOp{}) if err != nil { return nil, err } abuf := newEbuf() err = val.Eval(abuf, ev) if err != nil { return nil, err } value := []string{string(v.value), abuf.String()} abuf.release() return &simpleVar{ value: value, origin: "file", }, nil } func (v *automaticVar) AppendVar(ev *Evaluator, val Value) (Var, error) { abuf := newEbuf() err := val.Eval(abuf, ev) if err != nil { return nil, err } value := []string{string(v.value), abuf.String()} abuf.release() return &simpleVar{ value: value, origin: "file", }, nil } type recursiveVar struct { expr Value origin string } func (v *recursiveVar) Flavor() string { return "recursive" } func (v *recursiveVar) Origin() string { return v.origin } func (v *recursiveVar) IsDefined() bool { return true } func (v *recursiveVar) String() string { return v.expr.String() } func (v *recursiveVar) Eval(w evalWriter, ev *Evaluator) error { v.expr.Eval(w, ev) return nil } func (v *recursiveVar) serialize() serializableVar { return serializableVar{ Type: "recursive", Children: []serializableVar{v.expr.serialize()}, Origin: v.origin, } } func (v *recursiveVar) dump(d *dumpbuf) { d.Byte(valueTypeRecursive) v.expr.dump(d) d.Str(v.origin) } func (v *recursiveVar) Append(_ *Evaluator, s string) (Var, error) { var exp expr if e, ok := v.expr.(expr); ok { exp = append(e, literal(" ")) } else { exp = expr{v.expr, literal(" ")} } sv, _, err := parseExpr([]byte(s), nil, parseOp{alloc: true}) if err != nil { return nil, err } if aexpr, ok := sv.(expr); ok { exp = append(exp, aexpr...) } else { exp = append(exp, sv) } v.expr = exp return v, nil } func (v *recursiveVar) AppendVar(ev *Evaluator, val Value) (Var, error) { var buf bytes.Buffer buf.WriteString(v.expr.String()) buf.WriteByte(' ') buf.WriteString(val.String()) e, _, err := parseExpr(buf.Bytes(), nil, parseOp{alloc: true}) if err != nil { return nil, err } v.expr = e return v, nil } type undefinedVar struct{} func (undefinedVar) Flavor() string { return "undefined" } func (undefinedVar) Origin() string { return "undefined" } func (undefinedVar) IsDefined() bool { return false } func (undefinedVar) String() string { return "" } func (undefinedVar) Eval(_ evalWriter, _ *Evaluator) error { return nil } func (undefinedVar) serialize() serializableVar { return serializableVar{Type: "undefined"} } func (undefinedVar) dump(d *dumpbuf) { d.Byte(valueTypeUndefined) } func (undefinedVar) Append(*Evaluator, string) (Var, error) { return undefinedVar{}, nil } func (undefinedVar) AppendVar(_ *Evaluator, val Value) (Var, error) { return undefinedVar{}, nil } // Vars is a map for make variables. type Vars map[string]Var // usedEnvs tracks what environment variables are used. var usedEnvs = map[string]bool{} // Lookup looks up named make variable. func (vt Vars) Lookup(name string) Var { if v, ok := vt[name]; ok { if strings.HasPrefix(v.Origin(), "environment") { usedEnvs[name] = true } return v } return undefinedVar{} } // origin precedence // override / environment override // command line // file // environment // default // TODO(ukai): is this correct order? var originPrecedence = map[string]int{ "override": 4, "environment override": 4, "command line": 3, "file": 2, "environment": 2, "default": 1, "undefined": 0, "automatic": 0, } // Assign assigns v to name. func (vt Vars) Assign(name string, v Var) { vo := v.Origin() // assign automatic always win. // assign new value to automatic always win. if vo != "automatic" { vp := originPrecedence[v.Origin()] var op int if ov, ok := vt[name]; ok { op = originPrecedence[ov.Origin()] } if op > vp { return } } vt[name] = v } // NewVars creates new Vars. func NewVars(vt Vars) Vars { r := make(Vars) r.Merge(vt) return r } // Merge merges vt2 into vt. func (vt Vars) Merge(vt2 Vars) { for k, v := range vt2 { vt[k] = v } } // save saves value of the variable named name. // calling returned value will restore to the old value at the time // when save called. func (vt Vars) save(name string) func() { if v, ok := vt[name]; ok { return func() { vt[name] = v } } return func() { delete(vt, name) } } var.h0100644 0000000 0000000 00000010071 13654546140 010555 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef VAR_H_ #define VAR_H_ #include #include #include #include #include "eval.h" #include "expr.h" #include "log.h" #include "stmt.h" #include "string_piece.h" #include "symtab.h" using namespace std; class Evaluator; class Value; enum struct VarOrigin : char { UNDEFINED, DEFAULT, ENVIRONMENT, ENVIRONMENT_OVERRIDE, FILE, COMMAND_LINE, OVERRIDE, AUTOMATIC, }; const char* GetOriginStr(VarOrigin origin); class Var : public Evaluable { public: virtual ~Var(); virtual const char* Flavor() const = 0; VarOrigin Origin() { return origin_; } virtual bool IsDefined() const { return true; } virtual void AppendVar(Evaluator* ev, Value* v); virtual StringPiece String() const = 0; virtual string DebugString() const = 0; bool ReadOnly() const { return readonly_; } void SetReadOnly() { readonly_ = true; } bool Deprecated() const { return deprecated_; } void SetDeprecated(const StringPiece& msg); bool Obsolete() const { return obsolete_; } void SetObsolete(const StringPiece& msg); const string& DeprecatedMessage() const; // This variable was used (either written or read from) void Used(Evaluator* ev, const Symbol& sym) const; AssignOp op() const { return assign_op_; } void SetAssignOp(AssignOp op) { assign_op_ = op; } static Var* Undefined(); protected: Var(); explicit Var(VarOrigin origin); private: const VarOrigin origin_; AssignOp assign_op_; bool readonly_ : 1; bool deprecated_ : 1; bool obsolete_ : 1; const char* diagnostic_message_text() const; static unordered_map diagnostic_messages_; }; class SimpleVar : public Var { public: explicit SimpleVar(VarOrigin origin); SimpleVar(const string& v, VarOrigin origin); SimpleVar(VarOrigin, Evaluator* ev, Value* v); virtual const char* Flavor() const override { return "simple"; } virtual void Eval(Evaluator* ev, string* s) const override; virtual void AppendVar(Evaluator* ev, Value* v) override; virtual StringPiece String() const override; virtual string DebugString() const override; private: string v_; }; class RecursiveVar : public Var { public: RecursiveVar(Value* v, VarOrigin origin, StringPiece orig); virtual const char* Flavor() const override { return "recursive"; } virtual void Eval(Evaluator* ev, string* s) const override; virtual void AppendVar(Evaluator* ev, Value* v) override; virtual StringPiece String() const override; virtual string DebugString() const override; private: Value* v_; StringPiece orig_; }; class UndefinedVar : public Var { public: UndefinedVar(); virtual const char* Flavor() const override { return "undefined"; } virtual bool IsDefined() const override { return false; } virtual void Eval(Evaluator* ev, string* s) const override; virtual StringPiece String() const override; virtual string DebugString() const override; }; class Vars : public unordered_map { public: ~Vars(); Var* Lookup(Symbol name) const; Var* Peek(Symbol name) const; void Assign(Symbol name, Var* v, bool* readonly); static void add_used_env_vars(Symbol v); static const SymbolSet used_env_vars() { return used_env_vars_; } private: static SymbolSet used_env_vars_; }; class ScopedVar { public: // Does not take ownerships of arguments. ScopedVar(Vars* vars, Symbol name, Var* var); ~ScopedVar(); private: Vars* vars_; Var* orig_; Vars::iterator iter_; }; #endif // VAR_H_ version.go0100644 0000000 0000000 00000001206 13654546140 011630 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati var gitVersion string version.h0100644 0000000 0000000 00000001300 13654546140 011445 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef VERSION_H_ #define VERSION_H_ extern const char* kGitVersion; #endif // VERSION_H_ version_unknown.cc0100644 0000000 0000000 00000001231 13654546140 013365 0ustar000000000 0000000 // Copyright 2016 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore const char* kGitVersion = "unknown"; worker.go0100644 0000000 0000000 00000017035 13654546140 011463 0ustar000000000 0000000 // Copyright 2015 Google Inc. All rights reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kati import ( "container/heap" "errors" "fmt" "os" "os/exec" "syscall" "time" "github.com/golang/glog" ) var ( errNothingDone = errors.New("nothing done") ) type job struct { n *DepNode ex *Executor parents []*job outputTs int64 numDeps int depsTs int64 id int runners []runner } type jobResult struct { j *job w *worker err error } type newDep struct { j *job neededBy *job } type worker struct { wm *workerManager jobChan chan *job waitChan chan bool doneChan chan bool } type jobQueue []*job func (jq jobQueue) Len() int { return len(jq) } func (jq jobQueue) Swap(i, j int) { jq[i], jq[j] = jq[j], jq[i] } func (jq jobQueue) Less(i, j int) bool { // First come, first serve, for GNU make compatibility. return jq[i].id < jq[j].id } func (jq *jobQueue) Push(x interface{}) { item := x.(*job) *jq = append(*jq, item) } func (jq *jobQueue) Pop() interface{} { old := *jq n := len(old) item := old[n-1] *jq = old[0 : n-1] return item } func newWorker(wm *workerManager) *worker { w := &worker{ wm: wm, jobChan: make(chan *job), waitChan: make(chan bool), doneChan: make(chan bool), } return w } func (w *worker) Run() { done := false for !done { select { case j := <-w.jobChan: err := j.build() w.wm.ReportResult(w, j, err) case done = <-w.waitChan: } } w.doneChan <- true } func (w *worker) PostJob(j *job) { w.jobChan <- j } func (w *worker) Wait() { w.waitChan <- true <-w.doneChan } func (j *job) createRunners() ([]runner, error) { runners, _, err := createRunners(j.ex.ctx, j.n) return runners, err } // TODO(ukai): use time.Time? func getTimestamp(filename string) int64 { st, err := os.Stat(filename) if err != nil { return -2 } return st.ModTime().Unix() } func (j *job) build() error { if j.n.IsPhony { j.outputTs = -2 // trigger cmd even if all inputs don't exist. } else { j.outputTs = getTimestamp(j.n.Output) } if !j.n.HasRule { if j.outputTs >= 0 || j.n.IsPhony { return errNothingDone } if len(j.parents) == 0 { return fmt.Errorf("*** No rule to make target %q.", j.n.Output) } return fmt.Errorf("*** No rule to make target %q, needed by %q.", j.n.Output, j.parents[0].n.Output) } if j.outputTs >= j.depsTs { // TODO: stats. return errNothingDone } rr, err := j.createRunners() if err != nil { return err } if len(rr) == 0 { return errNothingDone } for _, r := range rr { err := r.run(j.n.Output) glog.Warningf("cmd result for %q: %v", j.n.Output, err) if err != nil { exit := exitStatus(err) return fmt.Errorf("*** [%s] Error %d", j.n.Output, exit) } } if j.n.IsPhony { j.outputTs = time.Now().Unix() } else { j.outputTs = getTimestamp(j.n.Output) if j.outputTs < 0 { j.outputTs = time.Now().Unix() } } return nil } func (wm *workerManager) handleJobs() error { for { if len(wm.freeWorkers) == 0 { return nil } if wm.readyQueue.Len() == 0 { return nil } j := heap.Pop(&wm.readyQueue).(*job) glog.V(1).Infof("run: %s", j.n.Output) j.numDeps = -1 // Do not let other workers pick this. w := wm.freeWorkers[0] wm.freeWorkers = wm.freeWorkers[1:] wm.busyWorkers[w] = true w.jobChan <- j } } func (wm *workerManager) updateParents(j *job) { for _, p := range j.parents { p.numDeps-- glog.V(1).Infof("child: %s (%d)", p.n.Output, p.numDeps) if p.depsTs < j.outputTs { p.depsTs = j.outputTs } wm.maybePushToReadyQueue(p) } } type workerManager struct { maxJobs int jobs []*job readyQueue jobQueue jobChan chan *job resultChan chan jobResult newDepChan chan newDep stopChan chan bool waitChan chan bool doneChan chan error freeWorkers []*worker busyWorkers map[*worker]bool ex *Executor runnings map[string]*job finishCnt int skipCnt int } func newWorkerManager(numJobs int) (*workerManager, error) { wm := &workerManager{ maxJobs: numJobs, jobChan: make(chan *job), resultChan: make(chan jobResult), newDepChan: make(chan newDep), stopChan: make(chan bool), waitChan: make(chan bool), doneChan: make(chan error), busyWorkers: make(map[*worker]bool), } wm.busyWorkers = make(map[*worker]bool) for i := 0; i < numJobs; i++ { w := newWorker(wm) wm.freeWorkers = append(wm.freeWorkers, w) go w.Run() } heap.Init(&wm.readyQueue) go wm.Run() return wm, nil } func exitStatus(err error) int { if err == nil { return 0 } exit := 1 if err, ok := err.(*exec.ExitError); ok { if w, ok := err.ProcessState.Sys().(syscall.WaitStatus); ok { return w.ExitStatus() } } return exit } func (wm *workerManager) hasTodo() bool { return wm.finishCnt != len(wm.jobs) } func (wm *workerManager) maybePushToReadyQueue(j *job) { if j.numDeps != 0 { return } heap.Push(&wm.readyQueue, j) glog.V(1).Infof("ready: %s", j.n.Output) } func (wm *workerManager) handleNewDep(j *job, neededBy *job) { if j.numDeps < 0 { neededBy.numDeps-- if neededBy.id > 0 { panic("FIXME: already in WM... can this happen?") } } else { j.parents = append(j.parents, neededBy) } } func (wm *workerManager) Run() { done := false var err error Loop: for wm.hasTodo() || len(wm.busyWorkers) > 0 || len(wm.runnings) > 0 || !done { select { case j := <-wm.jobChan: glog.V(1).Infof("wait: %s (%d)", j.n.Output, j.numDeps) j.id = len(wm.jobs) + 1 wm.jobs = append(wm.jobs, j) wm.maybePushToReadyQueue(j) case jr := <-wm.resultChan: glog.V(1).Infof("done: %s", jr.j.n.Output) delete(wm.busyWorkers, jr.w) wm.freeWorkers = append(wm.freeWorkers, jr.w) wm.updateParents(jr.j) wm.finishCnt++ if jr.err == errNothingDone { wm.skipCnt++ jr.err = nil } if jr.err != nil { err = jr.err close(wm.stopChan) break Loop } case af := <-wm.newDepChan: wm.handleNewDep(af.j, af.neededBy) glog.V(1).Infof("dep: %s (%d) %s", af.neededBy.n.Output, af.neededBy.numDeps, af.j.n.Output) case done = <-wm.waitChan: } err = wm.handleJobs() if err != nil { break Loop } glog.V(1).Infof("job=%d ready=%d free=%d busy=%d", len(wm.jobs)-wm.finishCnt, wm.readyQueue.Len(), len(wm.freeWorkers), len(wm.busyWorkers)) } if !done { <-wm.waitChan } for _, w := range wm.freeWorkers { w.Wait() } for w := range wm.busyWorkers { w.Wait() } wm.doneChan <- err } func (wm *workerManager) PostJob(j *job) error { select { case wm.jobChan <- j: return nil case <-wm.stopChan: return errors.New("worker manager stopped") } } func (wm *workerManager) ReportResult(w *worker, j *job, err error) { select { case wm.resultChan <- jobResult{w: w, j: j, err: err}: case <-wm.stopChan: } } func (wm *workerManager) ReportNewDep(j *job, neededBy *job) { select { case wm.newDepChan <- newDep{j: j, neededBy: neededBy}: case <-wm.stopChan: } } func (wm *workerManager) Wait() (int, error) { wm.waitChan <- true err := <-wm.doneChan glog.V(2).Infof("finish %d skip %d", wm.finishCnt, wm.skipCnt) return wm.finishCnt - wm.skipCnt, err }