pax_global_header00006660000000000000000000000064143133412570014515gustar00rootroot0000000000000052 comment=197ef76f30e7205a2a39b8bfbe53359234afcb7d hcl-2.14.1/000077500000000000000000000000001431334125700123505ustar00rootroot00000000000000hcl-2.14.1/.github/000077500000000000000000000000001431334125700137105ustar00rootroot00000000000000hcl-2.14.1/.github/workflows/000077500000000000000000000000001431334125700157455ustar00rootroot00000000000000hcl-2.14.1/.github/workflows/push.yml000066400000000000000000000033151431334125700174510ustar00rootroot00000000000000name: Per-commit Checks on: push: workflow_dispatch: pull_request: jobs: unit_tests: strategy: matrix: include: - runs-on: ubuntu-latest target: linux_amd64 - runs-on: windows-latest target: windows_amd64 fail-fast: false name: "Unit Tests on ${{ matrix.target }}" runs-on: "${{ matrix.runs-on }}" steps: - name: "Disable git crlf conversions" if: ${{ runner.os == 'Windows' }} # HCL preserves the input line endings when processing a heredoc, # and our tests for heredocs are written to expect the result for # the source code as checked in to the repository, so git's normal # tampering with the line endings would invalidate those tests. run: | git config --global core.autocrlf false - name: "Fetch source code" uses: actions/checkout@v2 - name: Install Go uses: actions/setup-go@v2 with: go-version: 1.18 - name: Go test run: | go test ./... fmt_and_vet: name: "fmt and lint" runs-on: ubuntu-latest steps: - name: "Fetch source code" uses: actions/checkout@v2 - name: Install Go uses: actions/setup-go@v2 with: go-version: 1.18 - name: "Check vet" run: | go vet ./... - name: "Check fmt" run: | go fmt ./... if [[ -z "$(git status --porcelain)" ]]; then echo "Formatting is consistent with 'go fmt'." else echo "Run 'go fmt ./...' to automatically apply standard Go style to all packages." git status --porcelain exit 1 fi hcl-2.14.1/CHANGELOG.md000066400000000000000000000415421431334125700141670ustar00rootroot00000000000000# HCL Changelog ## v2.14.1 (September 23, 2022) ### Bugs Fixed * ext/typeexpr: Type convert defaults for optional object attributes when applying them. This prevents crashes in certain cases when the objects in question are part of a collection. ([#555](https://github.com/hashicorp/hcl/pull/555)) ## v2.14.0 (September 1, 2022) ### Enhancements * ext/typeexpr: Added support for optional object attributes to `TypeConstraint`. Attributes can be wrapped in the special `optional(…)` modifier, allowing the attribute to be omitted while still meeting the type constraint. For more information, [cty's documentation on conversion between object types](https://github.com/zclconf/go-cty/blob/main/docs/convert.md#conversion-between-object-types). ([#549](https://github.com/hashicorp/hcl/pull/549)) * ext/typeexpr: New function: `TypeConstraintWithDefaults`. In this mode, the `optional(…)` modifier accepts a second argument which can be used as the default value for omitted object attributes. The function returns both a `cty.Type` and associated `Defaults`, the latter of which has an `Apply` method to apply defaults to a given value. ([#549](https://github.com/hashicorp/hcl/pull/549)) ## v2.13.0 (June 22, 2022) ### Enhancements * hcl: `hcl.Diagnostic` now has an additional field `Extra` which is intended for carrying arbitrary supporting data ("extra information") related to the diagnostic message, intended to allow diagnostic renderers to optionally tailor the presentation of messages for particular situations. ([#539](https://github.com/hashicorp/hcl/pull/539)) * hclsyntax: When an error occurs during a function call, the returned diagnostics will include _extra information_ (as described in the previous point) about which function was being called and, if the message is about an error returned by the function itself, that raw `error` value without any post-processing. ([#539](https://github.com/hashicorp/hcl/pull/539)) ### Bugs Fixed * hclwrite: Fixed a potential data race for any situation where `hclwrite.Format` runs concurrently with itself. ([#534](https://github.com/hashicorp/hcl/pull/534)) ## v2.12.0 (April 22, 2022) ### Enhancements * hclsyntax: Evaluation of conditional expressions will now produce more precise error messages about inconsistencies between the types of the true and false result expressions, particularly in cases where both are of the same structural type kind but differ in their nested elements. ([#530](https://github.com/hashicorp/hcl/pull/530)) * hclsyntax: The lexer will no longer allocate a small object on the heap for each token. Instead, in that situation it will allocate only when needed to return a diagnostic message with source location information. ([#490](https://github.com/hashicorp/hcl/pull/490)) * hclwrite: New functions `TokensForTuple`, `TokensForObject`, and `TokensForFunctionCall` allow for more easily constructing the three constructs which are supported for static analysis and which HCL-based languages typically use in contexts where an expression is used only for its syntax, and not evaluated to produce a real value. For example, these new functions together are sufficient to construct all valid type constraint expressions from [the Type Expressions Extension](./ext/typeexpr/), which is the basis of variable type constraints in the Terraform language at the time of writing. ([#502](https://github.com/hashicorp/hcl/pull/502)) * json: New functions `IsJSONExpression` and `IsJSONBody` to determine if a given expression or body was created by the JSON syntax parser. In normal situations it's better not to worry about what syntax a particular expression/body originated in, but this can be useful in some trickier cases where an application needs to shim for backwards-compatibility or for static analysis that needs to have special handling of the JSON syntax's embedded expression/template conventions. ([#524](https://github.com/hashicorp/hcl/pull/524)) ### Bugs Fixed * gohcl: Fix docs about supported types for blocks. ([#507](https://github.com/hashicorp/hcl/pull/507)) ## v2.11.1 (December 1, 2021) ### Bugs Fixed * hclsyntax: The type for an upgraded unknown value with a splat expression cannot be known ([#495](https://github.com/hashicorp/hcl/pull/495)) ## v2.11.0 (December 1, 2021) ### Enhancements * hclsyntax: Various error messages related to unexpectedly reaching end of file while parsing a delimited subtree will now return specialized messages describing the opening tokens as "unclosed", instead of returning a generic diagnostic that just happens to refer to the empty source range at the end of the file. This gives better feedback when error messages are being presented alongside a source code snippet, as is common in HCL-based applications, because it shows which innermost container the parser was working on when it encountered the error. ([#492](https://github.com/hashicorp/hcl/pull/492)) ### Bugs Fixed * hclsyntax: Upgrading an unknown single value to a list using a splat expression must return unknown ([#493](https://github.com/hashicorp/hcl/pull/493)) ## v2.10.1 (July 21, 2021) * dynblock: Decode unknown dynamic blocks in order to obtain any diagnostics even though the decoded value is not used ([#476](https://github.com/hashicorp/hcl/pull/476)) * hclsyntax: Calling functions is now more robust in the face of an incorrectly-implemented function which returns a `function.ArgError` whose argument index is out of range for the length of the arguments. Previously this would often lead to a panic, but now it'll return a less-precice error message instead. Functions that return out-of-bounds argument indices still ought to be fixed so that the resulting error diagnostics can be as precise as possible. ([#472](https://github.com/hashicorp/hcl/pull/472)) * hclsyntax: Ensure marks on unknown values are maintained when processing string templates. ([#478](https://github.com/hashicorp/hcl/pull/478)) * hcl: Improved error messages for various common error situtions in `hcl.Index` and `hcl.GetAttr`. These are part of the implementation of indexing and attribute lookup in the native syntax expression language too, so the new error messages will apply to problems using those operators. ([#474](https://github.com/hashicorp/hcl/pull/474)) ## v2.10.0 (April 20, 2021) ### Enhancements * dynblock,hcldec: Using dynblock in conjunction with hcldec can now decode blocks with unknown dynamic for_each arguments as entirely unknown values ([#461](https://github.com/hashicorp/hcl/pull/461)) * hclsyntax: Some syntax errors during parsing of the inside of `${` ... `}` template interpolation sequences will now produce an extra hint message about the need to escape as `$${` when trying to include interpolation syntax for other languages like shell scripting, AWS IAM policies, etc. ([#462](https://github.com/hashicorp/hcl/pull/462)) ## v2.9.1 (March 10, 2021) ### Bugs Fixed * hclsyntax: Fix panic for marked index value. ([#451](https://github.com/hashicorp/hcl/pull/451)) ## v2.9.0 (February 23, 2021) ### Enhancements * HCL's native syntax and JSON scanners -- and thus all of the other parsing components that build on top of them -- are now using Unicode 13 rules for text segmentation when counting text characters for the purpose of reporting source location columns. Previously HCL was using Unicode 12. Unicode 13 still uses the same algorithm but includes some additions to the character tables the algorithm is defined in terms of, to properly categorize new characters defined in Unicode 13. ## v2.8.2 (January 6, 2021) ### Bugs Fixed * hclsyntax: Fix panic for marked collection splat. ([#436](https://github.com/hashicorp/hcl/pull/436)) * hclsyntax: Fix panic for marked template loops. ([#437](https://github.com/hashicorp/hcl/pull/437)) * hclsyntax: Fix `for` expression marked conditional. ([#438](https://github.com/hashicorp/hcl/pull/438)) * hclsyntax: Mark objects with keys that are sensitive. ([#440](https://github.com/hashicorp/hcl/pull/440)) ## v2.8.1 (December 17, 2020) ### Bugs Fixed * hclsyntax: Fix panic when expanding marked function arguments. ([#429](https://github.com/hashicorp/hcl/pull/429)) * hclsyntax: Error when attempting to use a marked value as an object key. ([#434](https://github.com/hashicorp/hcl/pull/434)) * hclsyntax: Error when attempting to use a marked value as an object key in expressions. ([#433](https://github.com/hashicorp/hcl/pull/433)) ## v2.8.0 (December 7, 2020) ### Enhancements * hclsyntax: Expression grouping parentheses will now be reflected by an explicit node in the AST, whereas before they were only considered during parsing. ([#426](https://github.com/hashicorp/hcl/pull/426)) ### Bugs Fixed * hclwrite: The parser will now correctly include the `(` and `)` tokens when an expression is surrounded by parentheses. Previously it would incorrectly recognize those tokens as being extraneous tokens outside of the expression. ([#426](https://github.com/hashicorp/hcl/pull/426)) * hclwrite: The formatter will now remove (rather than insert) spaces between the `!` (unary boolean "not") operator and its subsequent operand. ([#403](https://github.com/hashicorp/hcl/pull/403)) * hclsyntax: Unmark conditional values in expressions before checking their truthfulness ([#427](https://github.com/hashicorp/hcl/pull/427)) ## v2.7.2 (November 30, 2020) ### Bugs Fixed * gohcl: Fix panic when decoding into type containing value slices. ([#335](https://github.com/hashicorp/hcl/pull/335)) * hclsyntax: The unusual expression `null[*]` was previously always returning an unknown value, even though the rules for `[*]` normally call for it to return an empty tuple when applied to a null. As well as being a surprising result, it was particularly problematic because it violated the rule that a calling application may assume that an expression result will always be known unless the application itself introduces unknown values via the evaluation context. `null[*]` will now produce an empty tuple. ([#416](https://github.com/hashicorp/hcl/pull/416)) * hclsyntax: Fix panic when traversing a list, tuple, or map with cty "marks" ([#424](https://github.com/hashicorp/hcl/pull/424)) ## v2.7.1 (November 18, 2020) ### Bugs Fixed * hclwrite: Correctly handle blank quoted string block labels, instead of dropping them ([#422](https://github.com/hashicorp/hcl/pull/422)) ## v2.7.0 (October 14, 2020) ### Enhancements * json: There is a new function `ParseWithStartPos`, which allows overriding the starting position for parsing in case the given JSON bytes are a fragment of a larger document, such as might happen when decoding with `encoding/json` into a `json.RawMessage`. ([#389](https://github.com/hashicorp/hcl/pull/389)) * json: There is a new function `ParseExpression`, which allows parsing a JSON string directly in expression mode, whereas previously it was only possible to parse a JSON string in body mode. ([#381](https://github.com/hashicorp/hcl/pull/381)) * hclwrite: `Block` type now supports `SetType` and `SetLabels`, allowing surgical changes to the type and labels of an existing block without having to reconstruct the entire block. ([#340](https://github.com/hashicorp/hcl/pull/340)) ### Bugs Fixed * hclsyntax: Fix confusing error message for bitwise OR operator ([#380](https://github.com/hashicorp/hcl/pull/380)) * hclsyntax: Several bug fixes for using HCL with values containing cty "marks" ([#404](https://github.com/hashicorp/hcl/pull/404), [#406](https://github.com/hashicorp/hcl/pull/404), [#407](https://github.com/hashicorp/hcl/pull/404)) ## v2.6.0 (June 4, 2020) ### Enhancements * hcldec: Add a new `Spec`, `ValidateSpec`, which allows custom validation of values at decode-time. ([#387](https://github.com/hashicorp/hcl/pull/387)) ### Bugs Fixed * hclsyntax: Fix panic with combination of sequences and null arguments ([#386](https://github.com/hashicorp/hcl/pull/386)) * hclsyntax: Fix handling of unknown values and sequences ([#386](https://github.com/hashicorp/hcl/pull/386)) ## v2.5.1 (May 14, 2020) ### Bugs Fixed * hclwrite: handle legacy dot access of numeric indexes. ([#369](https://github.com/hashicorp/hcl/pull/369)) * hclwrite: Fix panic for dotted full splat (`foo.*`) ([#374](https://github.com/hashicorp/hcl/pull/374)) ## v2.5.0 (May 6, 2020) ### Enhancements * hclwrite: Generate multi-line objects and maps. ([#372](https://github.com/hashicorp/hcl/pull/372)) ## v2.4.0 (Apr 13, 2020) ### Enhancements * The Unicode data tables that HCL uses to produce user-perceived "column" positions in diagnostics and other source ranges are now updated to Unicode 12.0.0, which will cause HCL to produce more accurate column numbers for combining characters introduced to Unicode since Unicode 9.0.0. ### Bugs Fixed * json: Fix panic when parsing malformed JSON. ([#358](https://github.com/hashicorp/hcl/pull/358)) ## v2.3.0 (Jan 3, 2020) ### Enhancements * ext/tryfunc: Optional functions `try` and `can` to include in your `hcl.EvalContext` when evaluating expressions, which allow users to make decisions based on the success of expressions. ([#330](https://github.com/hashicorp/hcl/pull/330)) * ext/typeexpr: Now has an optional function `convert` which you can include in your `hcl.EvalContext` when evaluating expressions, allowing users to convert values to specific type constraints using the type constraint expression syntax. ([#330](https://github.com/hashicorp/hcl/pull/330)) * ext/typeexpr: A new `cty` capsule type `typeexpr.TypeConstraintType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression to be interpreted as a type constraint expression rather than a value expression. ([#330](https://github.com/hashicorp/hcl/pull/330)) * ext/customdecode: An optional extension that allows overriding the static decoding behavior for expressions either in function arguments or `hcldec` attribute specifications. ([#330](https://github.com/hashicorp/hcl/pull/330)) * ext/customdecode: New `cty` capsuletypes `customdecode.ExpressionType` and `customdecode.ExpressionClosureType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression (and, for the closure type, also the `hcl.EvalContext` it was evaluated in) to be captured for later analysis, rather than immediately evaluated. ([#330](https://github.com/hashicorp/hcl/pull/330)) ## v2.2.0 (Dec 11, 2019) ### Enhancements * hcldec: Attribute evaluation (as part of `AttrSpec` or `BlockAttrsSpec`) now captures expression evaluation metadata in any errors it produces during type conversions, allowing for better feedback in calling applications that are able to make use of this metadata when printing diagnostic messages. ([#329](https://github.com/hashicorp/hcl/pull/329)) ### Bugs Fixed * hclsyntax: `IndexExpr`, `SplatExpr`, and `RelativeTraversalExpr` will now report a source range that covers all of their child expression nodes. Previously they would report only the operator part, such as `["foo"]`, `[*]`, or `.foo`, which was problematic for callers using source ranges for code analysis. ([#328](https://github.com/hashicorp/hcl/pull/328)) * hclwrite: Parser will no longer panic when the input includes index, splat, or relative traversal syntax. ([#328](https://github.com/hashicorp/hcl/pull/328)) ## v2.1.0 (Nov 19, 2019) ### Enhancements * gohcl: When decoding into a struct value with some fields already populated, those values will be retained if not explicitly overwritten in the given HCL body, with similar overriding/merging behavior as `json.Unmarshal` in the Go standard library. * hclwrite: New interface to set the expression for an attribute to be a raw token sequence, with no special processing. This has some caveats, so if you intend to use it please refer to the godoc comments. ([#320](https://github.com/hashicorp/hcl/pull/320)) ### Bugs Fixed * hclwrite: The `Body.Blocks` method was returing the blocks in an indefined order, rather than preserving the order of declaration in the source input. ([#313](https://github.com/hashicorp/hcl/pull/313)) * hclwrite: The `TokensForTraversal` function (and thus in turn the `Body.SetAttributeTraversal` method) was not correctly handling index steps in traversals, and thus producing invalid results. ([#319](https://github.com/hashicorp/hcl/pull/319)) ## v2.0.0 (Oct 2, 2019) Initial release of HCL 2, which is a new implementating combining the HCL 1 language with the HIL expression language to produce a single language supporting both nested configuration structures and arbitrary expressions. HCL 2 has an entirely new Go library API and so is _not_ a drop-in upgrade relative to HCL 1. It's possible to import both versions of HCL into a single program using Go's _semantic import versioning_ mechanism: ``` import ( hcl1 "github.com/hashicorp/hcl" hcl2 "github.com/hashicorp/hcl/v2" ) ``` --- Prior to v2.0.0 there was not a curated changelog. Consult the git history from the latest v1.x.x tag for information on the changes to HCL 1. hcl-2.14.1/LICENSE000066400000000000000000000371501431334125700133630ustar00rootroot00000000000000Mozilla Public License, version 2.0 1. Definitions 1.1. “Contributor” means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. “Contributor Version” means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor’s Contribution. 1.3. “Contribution” means Covered Software of a particular Contributor. 1.4. “Covered Software” means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. “Incompatible With Secondary Licenses” means a. that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or b. that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. “Executable Form” means any form of the work other than Source Code Form. 1.7. “Larger Work” means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. “License” means this document. 1.9. “Licensable” means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. “Modifications” means any of the following: a. any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or b. any new file in Source Code Form that contains any Covered Software. 1.11. “Patent Claims” of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. “Secondary License” means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. “Source Code Form” means the form of the work preferred for making modifications. 1.14. “You” (or “Your”) means an individual or a legal entity exercising rights under this License. For legal entities, “You” includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, “control” means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: a. under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and b. under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: a. for any code that a Contributor has removed from Covered Software; or b. for infringements caused by: (i) Your and any other third party’s modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or c. under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients’ rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: a. such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and b. You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients’ rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. 6. Disclaimer of Warranty Covered Software is provided under this License on an “as is” basis, without warranty of any kind, either expressed, implied, or statutory, including, without limitation, warranties that the Covered Software is free of defects, merchantable, fit for a particular purpose or non-infringing. The entire risk as to the quality and performance of the Covered Software is with You. Should any Covered Software prove defective in any respect, You (not any Contributor) assume the cost of any necessary servicing, repair, or correction. This disclaimer of warranty constitutes an essential part of this License. No use of any Covered Software is authorized under this License except under this disclaimer. 7. Limitation of Liability Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Contributor, or anyone who distributes Covered Software as permitted above, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if such party shall have been informed of the possibility of such damages. This limitation of liability shall not apply to liability for death or personal injury resulting from such party’s negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. 8. Litigation Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party’s ability to bring cross-claims or counter-claims. 9. Miscellaneous This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - “Incompatible With Secondary Licenses” Notice This Source Code Form is “Incompatible With Secondary Licenses”, as defined by the Mozilla Public License, v. 2.0. hcl-2.14.1/README.md000066400000000000000000000162721431334125700136370ustar00rootroot00000000000000# HCL HCL is a toolkit for creating structured configuration languages that are both human- and machine-friendly, for use with command-line tools. Although intended to be generally useful, it is primarily targeted towards devops tools, servers, etc. > **NOTE:** This is major version 2 of HCL, whose Go API is incompatible with > major version 1. Both versions are available for selection in Go Modules > projects. HCL 2 _cannot_ be imported from Go projects that are not using Go Modules. For more information, see > [our version selection guide](https://github.com/hashicorp/hcl/wiki/Version-Selection). HCL has both a _native syntax_, intended to be pleasant to read and write for humans, and a JSON-based variant that is easier for machines to generate and parse. The HCL native syntax is inspired by [libucl](https://github.com/vstakhov/libucl), [nginx configuration](http://nginx.org/en/docs/beginners_guide.html#conf_structure), and others. It includes an expression syntax that allows basic inline computation and, with support from the calling application, use of variables and functions for more dynamic configuration languages. HCL provides a set of constructs that can be used by a calling application to construct a configuration language. The application defines which attribute names and nested block types are expected, and HCL parses the configuration file, verifies that it conforms to the expected structure, and returns high-level objects that the application can use for further processing. ```go package main import ( "log" "github.com/hashicorp/hcl/v2/hclsimple" ) type Config struct { IOMode string `hcl:"io_mode"` Service ServiceConfig `hcl:"service,block"` } type ServiceConfig struct { Protocol string `hcl:"protocol,label"` Type string `hcl:"type,label"` ListenAddr string `hcl:"listen_addr"` Processes []ProcessConfig `hcl:"process,block"` } type ProcessConfig struct { Type string `hcl:"type,label"` Command []string `hcl:"command"` } func main() { var config Config err := hclsimple.DecodeFile("config.hcl", nil, &config) if err != nil { log.Fatalf("Failed to load configuration: %s", err) } log.Printf("Configuration is %#v", config) } ``` A lower-level API is available for applications that need more control over the parsing, decoding, and evaluation of configuration. For more information, see [the package documentation](https://pkg.go.dev/github.com/hashicorp/hcl/v2). ## Why? Newcomers to HCL often ask: why not JSON, YAML, etc? Whereas JSON and YAML are formats for serializing data structures, HCL is a syntax and API specifically designed for building structured configuration formats. HCL attempts to strike a compromise between generic serialization formats such as JSON and configuration formats built around full programming languages such as Ruby. HCL syntax is designed to be easily read and written by humans, and allows _declarative_ logic to permit its use in more complex applications. HCL is intended as a base syntax for configuration formats built around key-value pairs and hierarchical blocks whose structure is well-defined by the calling application, and this definition of the configuration structure allows for better error messages and more convenient definition within the calling application. It can't be denied that JSON is very convenient as a _lingua franca_ for interoperability between different pieces of software. Because of this, HCL defines a common configuration model that can be parsed from either its native syntax or from a well-defined equivalent JSON structure. This allows configuration to be provided as a mixture of human-authored configuration files in the native syntax and machine-generated files in JSON. ## Information Model and Syntax HCL is built around two primary concepts: _attributes_ and _blocks_. In native syntax, a configuration file for a hypothetical application might look something like this: ```hcl io_mode = "async" service "http" "web_proxy" { listen_addr = "127.0.0.1:8080" process "main" { command = ["/usr/local/bin/awesome-app", "server"] } process "mgmt" { command = ["/usr/local/bin/awesome-app", "mgmt"] } } ``` The JSON equivalent of this configuration is the following: ```json { "io_mode": "async", "service": { "http": { "web_proxy": { "listen_addr": "127.0.0.1:8080", "process": { "main": { "command": ["/usr/local/bin/awesome-app", "server"] }, "mgmt": { "command": ["/usr/local/bin/awesome-app", "mgmt"] }, } } } } } ``` Regardless of which syntax is used, the API within the calling application is the same. It can either work directly with the low-level attributes and blocks, for more advanced use-cases, or it can use one of the _decoder_ packages to declaratively extract into either Go structs or dynamic value structures. Attribute values can be expressions as well as just literal values: ```hcl # Arithmetic with literals and application-provided variables sum = 1 + addend # String interpolation and templates message = "Hello, ${name}!" # Application-provided functions shouty_message = upper(message) ``` Although JSON syntax doesn't permit direct use of expressions, the interpolation syntax allows use of arbitrary expressions within JSON strings: ```json { "sum": "${1 + addend}", "message": "Hello, ${name}!", "shouty_message": "${upper(message)}" } ``` For more information, see the detailed specifications: * [Syntax-agnostic Information Model](spec.md) * [HCL Native Syntax](hclsyntax/spec.md) * [JSON Representation](json/spec.md) ## Changes in 2.0 Version 2.0 of HCL combines the features of HCL 1.0 with those of the interpolation language HIL to produce a single configuration language that supports arbitrary expressions. This new version has a completely new parser and Go API, with no direct migration path. Although the syntax is similar, the implementation takes some very different approaches to improve on some "rough edges" that existed with the original implementation and to allow for more robust error handling. It's possible to import both HCL 1 and HCL 2 into the same program using Go's _semantic import versioning_ mechanism: ```go import ( hcl1 "github.com/hashicorp/hcl" hcl2 "github.com/hashicorp/hcl/v2" ) ``` ## Acknowledgements HCL was heavily inspired by [libucl](https://github.com/vstakhov/libucl), by [Vsevolod Stakhov](https://github.com/vstakhov). HCL and HIL originate in [HashiCorp Terraform](https://terraform.io/), with the original parsers for each written by [Mitchell Hashimoto](https://github.com/mitchellh). The original HCL parser was ported to pure Go (from yacc) by [Fatih Arslan](https://github.com/fatih). The structure-related portions of the new native syntax parser build on that work. The original HIL parser was ported to pure Go (from yacc) by [Martin Atkins](https://github.com/apparentlymart). The expression-related portions of the new native syntax parser build on that work. HCL 2, which merged the original HCL and HIL languages into this single new language, builds on design and prototyping work by [Martin Atkins](https://github.com/apparentlymart) in [zcl](https://github.com/zclconf/go-zcl). hcl-2.14.1/cmd/000077500000000000000000000000001431334125700131135ustar00rootroot00000000000000hcl-2.14.1/cmd/hcldec/000077500000000000000000000000001431334125700143355ustar00rootroot00000000000000hcl-2.14.1/cmd/hcldec/README.md000066400000000000000000000056571431334125700156310ustar00rootroot00000000000000# hcldec `hcldec` is a command line tool that transforms HCL input into JSON output using a decoding specification given by the user. This tool is intended as a "glue" tool, with use-cases like the following: * Define a HCL-based configuration format for a third-party tool that takes JSON as input, and then translate the HCL configuration into JSON before running the tool. (See [the `npm-package` example](examples/npm-package).) * Use HCL from languages where a HCL parser/decoder is not yet available. At the time of writing, that's any language other than Go. * In particular, define a HCL-based configuration format for a shell script and then use `jq` to load the result into environment variables for further processing. (See [the `sh-config-file` example](examples/sh-config-file).) ## Installation If you have a working Go development environment, you can install this tool with `go get` in the usual way: ``` $ go get -u github.com/hashicorp/hcl/v2/cmd/hcldec ``` This will install `hcldec` in `$GOPATH/bin`, which usually places it into your shell `PATH` so you can then run it as `hcldec`. ## Usage ``` usage: hcldec --spec= [options] [hcl-file ...] -o, --out string write to the given file, instead of stdout -s, --spec string path to spec file (required) -V, --vars json-or-file provide variables to the given configuration file(s) -v, --version show the version number and immediately exit ``` The most important step in using `hcldec` is to write the specification that defines how to interpret the given configuration files and translate them into JSON. The following is a simple specification that creates a JSON object from two top-level attributes in the input configuration: ```hcl object { attr "name" { type = string required = true } attr "is_member" { type = bool } } ``` Specification files are conventionally kept in files with a `.hcldec` extension. We'll call this one `example.hcldec`. With the above specification, the following input file `example.conf` is valid: ```hcl name = "Raul" ``` The spec and the input file can then be provided to `hcldec` to extract a JSON representation: ``` $ hcldec --spec=example.hcldec example.conf {"name": "Raul"} ``` The specification defines both how to map the input into a JSON data structure and what input is valid. The `required = true` specified for the `name` allows `hcldec` to detect and raise an error when an attribute of that name is not provided: ``` $ hcldec --spec=example.hcldec typo.conf Error: Unsupported attribute on example.conf line 1: 1: namme = "Juan" An attribute named "namme" is not expected here. Did you mean "name"? Error: Missing required attribute on example.conf line 2: The attribute "name" is required, but no definition was found. ``` ## Further Reading For more details on the `.hcldec` specification file format, see [the spec file documentation](spec-format.md). hcl-2.14.1/cmd/hcldec/diags_json.go000066400000000000000000000042511431334125700170060ustar00rootroot00000000000000package main import ( "encoding/json" "io" "github.com/hashicorp/hcl/v2" ) type jsonDiagWriter struct { w io.Writer diags hcl.Diagnostics } var _ hcl.DiagnosticWriter = &jsonDiagWriter{} func (wr *jsonDiagWriter) WriteDiagnostic(diag *hcl.Diagnostic) error { wr.diags = append(wr.diags, diag) return nil } func (wr *jsonDiagWriter) WriteDiagnostics(diags hcl.Diagnostics) error { wr.diags = append(wr.diags, diags...) return nil } func (wr *jsonDiagWriter) Flush() error { if len(wr.diags) == 0 { return nil } type PosJSON struct { Line int `json:"line"` Column int `json:"column"` Byte int `json:"byte"` } type RangeJSON struct { Filename string `json:"filename"` Start PosJSON `json:"start"` End PosJSON `json:"end"` } type DiagnosticJSON struct { Severity string `json:"severity"` Summary string `json:"summary"` Detail string `json:"detail,omitempty"` Subject *RangeJSON `json:"subject,omitempty"` } type DiagnosticsJSON struct { Diagnostics []DiagnosticJSON `json:"diagnostics"` } diagsJSON := make([]DiagnosticJSON, 0, len(wr.diags)) for _, diag := range wr.diags { var diagJSON DiagnosticJSON switch diag.Severity { case hcl.DiagError: diagJSON.Severity = "error" case hcl.DiagWarning: diagJSON.Severity = "warning" default: diagJSON.Severity = "(unknown)" // should never happen } diagJSON.Summary = diag.Summary diagJSON.Detail = diag.Detail if diag.Subject != nil { diagJSON.Subject = &RangeJSON{} sJSON := diagJSON.Subject rng := diag.Subject sJSON.Filename = rng.Filename sJSON.Start.Line = rng.Start.Line sJSON.Start.Column = rng.Start.Column sJSON.Start.Byte = rng.Start.Byte sJSON.End.Line = rng.End.Line sJSON.End.Column = rng.End.Column sJSON.End.Byte = rng.End.Byte } diagsJSON = append(diagsJSON, diagJSON) } src, err := json.MarshalIndent(DiagnosticsJSON{diagsJSON}, "", " ") if err != nil { return err } _, err = wr.w.Write(src) wr.w.Write([]byte{'\n'}) return err } type flusher interface { Flush() error } func flush(maybeFlusher interface{}) error { if f, ok := maybeFlusher.(flusher); ok { return f.Flush() } return nil } hcl-2.14.1/cmd/hcldec/examples/000077500000000000000000000000001431334125700161535ustar00rootroot00000000000000hcl-2.14.1/cmd/hcldec/examples/npm-package/000077500000000000000000000000001431334125700203365ustar00rootroot00000000000000hcl-2.14.1/cmd/hcldec/examples/npm-package/example.npmhcl000066400000000000000000000002761431334125700232010ustar00rootroot00000000000000name = "hello-world" version = "v0.0.1" author { name = "Иван Петрович Сидоров" } contributor { name = "Juan Pérez" } dependencies = { left-pad = "1.2.0" } hcl-2.14.1/cmd/hcldec/examples/npm-package/spec.hcldec000066400000000000000000000042631431334125700224410ustar00rootroot00000000000000object { attr "name" { type = string required = true } attr "version" { type = string required = true } attr "description" { type = string } attr "keywords" { type = list(string) } attr "homepage" { # "homepage_url" in input file is translated to "homepage" in output name = "homepage_url" } block "bugs" { object { attr "url" { type = string } attr "email" { type = string } } } attr "license" { type = string } block "author" { object { attr "name" { type = string } attr "email" { type = string } attr "url" { type = string } } } block_list "contributors" { block_type = "contributor" object { attr "name" { type = string } attr "email" { type = string } attr "url" { type = string } } } attr "files" { type = list(string) } attr "main" { type = string } attr "bin" { type = map(string) } attr "man" { type = list(string) } attr "directories" { type = map(string) } block "repository" { object { attr "type" { type = string required = true } attr "url" { type = string required = true } } } attr "scripts" { type = map(string) } attr "config" { type = map(string) } attr "dependencies" { type = map(string) } attr "devDependencies" { name = "dev_dependencies" type = map(string) } attr "peerDependencies" { name = "peer_dependencies" type = map(string) } attr "bundledDependencies" { name = "bundled_dependencies" type = map(string) } attr "optionalDependencies" { name = "optional_dependencies" type = map(string) } attr "engines" { type = map(string) } attr "os" { type = list(string) } attr "cpu" { type = list(string) } attr "prefer_global" { type = bool } default "private" { attr { name = "private" type = bool } literal { value = false } } attr "publishConfig" { type = map(any) } } hcl-2.14.1/cmd/hcldec/examples/sh-config-file/000077500000000000000000000000001431334125700207455ustar00rootroot00000000000000hcl-2.14.1/cmd/hcldec/examples/sh-config-file/example.conf000066400000000000000000000001451431334125700232470ustar00rootroot00000000000000name = "Juan" friend { name = "John" } friend { name = "Yann" } friend { name = "Ermintrude" } hcl-2.14.1/cmd/hcldec/examples/sh-config-file/example.sh000077500000000000000000000015461431334125700227450ustar00rootroot00000000000000#!/bin/bash set -euo pipefail # All paths from this point on are relative to the directory containing this # script, for simplicity's sake. cd "$( dirname "${BASH_SOURCE[0]}" )" # Read the config file using hcldec and then use jq to extract values in a # shell-friendly form. jq will ensure that the values are properly quoted and # escaped for consumption by the shell. CONFIG_VARS="$(hcldec --spec=spec.hcldec example.conf | jq -r '@sh "NAME=\(.name) GREETING=\(.greeting) FRIENDS=(\(.friends))"')" if [ $? != 0 ]; then # If hcldec or jq failed then it has already printed out some error messages # and so we can bail out. exit $? fi # Import our settings into our environment eval "$CONFIG_VARS" # ...and now, some contrived usage of the settings we loaded: echo "$GREETING $NAME!" for name in ${FRIENDS[@]}; do echo "$GREETING $name, too!" done hcl-2.14.1/cmd/hcldec/examples/sh-config-file/spec.hcldec000066400000000000000000000005311431334125700230420ustar00rootroot00000000000000object { attr "name" { type = string required = true } default "greeting" { attr { name = "greeting" type = string } literal { value = "Hello" } } block_list "friends" { block_type = "friend" attr { name = "name" type = string required = true } } } hcl-2.14.1/cmd/hcldec/main.go000066400000000000000000000225651431334125700156220ustar00rootroot00000000000000package main import ( "bytes" "encoding/json" "fmt" "io/ioutil" "os" "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hcldec" "github.com/hashicorp/hcl/v2/hclparse" flag "github.com/spf13/pflag" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ctyjson "github.com/zclconf/go-cty/cty/json" "golang.org/x/crypto/ssh/terminal" ) const versionStr = "0.0.1-dev" // vars is populated from --vars arguments on the command line, via a flag // registration in init() below. var vars = &varSpecs{} var ( specFile = flag.StringP("spec", "s", "", "path to spec file (required)") outputFile = flag.StringP("out", "o", "", "write to the given file, instead of stdout") diagsFormat = flag.StringP("diags", "", "", "format any returned diagnostics in the given format; currently only \"json\" is accepted") showVarRefs = flag.BoolP("var-refs", "", false, "rather than decoding input, produce a JSON description of the variables referenced by it") withType = flag.BoolP("with-type", "", false, "include an additional object level at the top describing the HCL-oriented type of the result value") showVersion = flag.BoolP("version", "v", false, "show the version number and immediately exit") keepNulls = flag.BoolP("keep-nulls", "", false, "retain object properties that have null as their value (they are removed by default)") ) var parser = hclparse.NewParser() var diagWr hcl.DiagnosticWriter // initialized in init func init() { flag.VarP(vars, "vars", "V", "provide variables to the given configuration file(s)") } func main() { flag.Usage = usage flag.Parse() if *showVersion { fmt.Println(versionStr) os.Exit(0) } args := flag.Args() switch *diagsFormat { case "": color := terminal.IsTerminal(int(os.Stderr.Fd())) w, _, err := terminal.GetSize(int(os.Stdout.Fd())) if err != nil { w = 80 } diagWr = hcl.NewDiagnosticTextWriter(os.Stderr, parser.Files(), uint(w), color) case "json": diagWr = &jsonDiagWriter{w: os.Stderr} default: fmt.Fprintf(os.Stderr, "Invalid diagnostics format %q: only \"json\" is supported.\n", *diagsFormat) os.Exit(2) } err := realmain(args) if err != nil { fmt.Fprintf(os.Stderr, "Error: %s\n\n", err.Error()) os.Exit(1) } } func realmain(args []string) error { if *specFile == "" { return fmt.Errorf("the --spec=... argument is required") } var diags hcl.Diagnostics specContent, specDiags := loadSpecFile(*specFile) diags = append(diags, specDiags...) if specDiags.HasErrors() { diagWr.WriteDiagnostics(diags) flush(diagWr) os.Exit(2) } spec := specContent.RootSpec ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{}, Functions: map[string]function.Function{}, } for name, val := range specContent.Variables { ctx.Variables[name] = val } for name, f := range specContent.Functions { ctx.Functions[name] = f } if len(*vars) != 0 { for i, varsSpec := range *vars { var vals map[string]cty.Value var valsDiags hcl.Diagnostics if strings.HasPrefix(strings.TrimSpace(varsSpec), "{") { // literal JSON object on the command line vals, valsDiags = parseVarsArg(varsSpec, i) } else { // path to a file containing either HCL or JSON (by file extension) vals, valsDiags = parseVarsFile(varsSpec) } diags = append(diags, valsDiags...) for k, v := range vals { ctx.Variables[k] = v } } } // If we have empty context elements then we'll nil them out so that // we'll produce e.g. "variables are not allowed" errors instead of // "variable not found" errors. if len(ctx.Variables) == 0 { ctx.Variables = nil } if len(ctx.Functions) == 0 { ctx.Functions = nil } if ctx.Variables == nil && ctx.Functions == nil { ctx = nil } var bodies []hcl.Body if len(args) == 0 { src, err := ioutil.ReadAll(os.Stdin) if err != nil { return fmt.Errorf("failed to read stdin: %s", err) } f, fDiags := parser.ParseHCL(src, "") diags = append(diags, fDiags...) if !fDiags.HasErrors() { bodies = append(bodies, f.Body) } } else { for _, filename := range args { var f *hcl.File var fDiags hcl.Diagnostics if strings.HasSuffix(filename, ".json") { f, fDiags = parser.ParseJSONFile(filename) } else { f, fDiags = parser.ParseHCLFile(filename) } diags = append(diags, fDiags...) if !fDiags.HasErrors() { bodies = append(bodies, f.Body) } } } if diags.HasErrors() { diagWr.WriteDiagnostics(diags) flush(diagWr) os.Exit(2) } var body hcl.Body switch len(bodies) { case 0: // should never happen, but... okay? body = hcl.EmptyBody() case 1: body = bodies[0] default: body = hcl.MergeBodies(bodies) } if *showVarRefs { vars := hcldec.Variables(body, spec) return showVarRefsJSON(vars, ctx) } val, decDiags := hcldec.Decode(body, spec, ctx) diags = append(diags, decDiags...) if diags.HasErrors() { diagWr.WriteDiagnostics(diags) flush(diagWr) os.Exit(2) } wantType := val.Type() if *withType { // We'll instead ask to encode as dynamic, which will make the // marshaler include type information. wantType = cty.DynamicPseudoType } out, err := ctyjson.Marshal(val, wantType) if err != nil { return err } // hcldec will include explicit nulls where an ObjectSpec has a spec // that refers to a missing item, but that'll probably be annoying for // a consumer of our output to deal with so we'll just strip those // out and reduce to only the non-null values. if !*keepNulls { out = stripJSONNullProperties(out) } target := os.Stdout if *outputFile != "" { target, err = os.OpenFile(*outputFile, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, os.ModePerm) if err != nil { return fmt.Errorf("can't open %s for writing: %s", *outputFile, err) } } fmt.Fprintf(target, "%s\n", out) return nil } func usage() { fmt.Fprintf(os.Stderr, "usage: hcldec --spec= [options] [hcl-file ...]\n") flag.PrintDefaults() os.Exit(2) } func showVarRefsJSON(vars []hcl.Traversal, ctx *hcl.EvalContext) error { type PosJSON struct { Line int `json:"line"` Column int `json:"column"` Byte int `json:"byte"` } type RangeJSON struct { Filename string `json:"filename"` Start PosJSON `json:"start"` End PosJSON `json:"end"` } type StepJSON struct { Kind string `json:"kind"` Name string `json:"name,omitempty"` Key json.RawMessage `json:"key,omitempty"` Range RangeJSON `json:"range"` } type TraversalJSON struct { RootName string `json:"root_name"` Value json.RawMessage `json:"value,omitempty"` Steps []StepJSON `json:"steps"` Range RangeJSON `json:"range"` } ret := make([]TraversalJSON, 0, len(vars)) for _, traversal := range vars { tJSON := TraversalJSON{ Steps: make([]StepJSON, 0, len(traversal)), } for _, step := range traversal { var sJSON StepJSON rng := step.SourceRange() sJSON.Range.Filename = rng.Filename sJSON.Range.Start.Line = rng.Start.Line sJSON.Range.Start.Column = rng.Start.Column sJSON.Range.Start.Byte = rng.Start.Byte sJSON.Range.End.Line = rng.End.Line sJSON.Range.End.Column = rng.End.Column sJSON.Range.End.Byte = rng.End.Byte switch ts := step.(type) { case hcl.TraverseRoot: sJSON.Kind = "root" sJSON.Name = ts.Name tJSON.RootName = ts.Name case hcl.TraverseAttr: sJSON.Kind = "attr" sJSON.Name = ts.Name case hcl.TraverseIndex: sJSON.Kind = "index" src, err := ctyjson.Marshal(ts.Key, ts.Key.Type()) if err == nil { sJSON.Key = json.RawMessage(src) } default: // Should never get here, since the above should be exhaustive // for all possible traversal step types. sJSON.Kind = "(unknown)" } tJSON.Steps = append(tJSON.Steps, sJSON) } // Best effort, we'll try to include the current known value of this // traversal, if any. val, diags := traversal.TraverseAbs(ctx) if !diags.HasErrors() { enc, err := ctyjson.Marshal(val, val.Type()) if err == nil { tJSON.Value = json.RawMessage(enc) } } rng := traversal.SourceRange() tJSON.Range.Filename = rng.Filename tJSON.Range.Start.Line = rng.Start.Line tJSON.Range.Start.Column = rng.Start.Column tJSON.Range.Start.Byte = rng.Start.Byte tJSON.Range.End.Line = rng.End.Line tJSON.Range.End.Column = rng.End.Column tJSON.Range.End.Byte = rng.End.Byte ret = append(ret, tJSON) } out, err := json.MarshalIndent(ret, "", " ") if err != nil { return fmt.Errorf("failed to marshal variable references as JSON: %s", err) } target := os.Stdout if *outputFile != "" { target, err = os.OpenFile(*outputFile, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, os.ModePerm) if err != nil { return fmt.Errorf("can't open %s for writing: %s", *outputFile, err) } } fmt.Fprintf(target, "%s\n", out) return nil } func stripJSONNullProperties(src []byte) []byte { dec := json.NewDecoder(bytes.NewReader(src)) dec.UseNumber() var v interface{} err := dec.Decode(&v) if err != nil { // We expect valid JSON panic(err) } v = stripNullMapElements(v) new, err := json.Marshal(v) if err != nil { panic(err) } return new } func stripNullMapElements(v interface{}) interface{} { switch tv := v.(type) { case map[string]interface{}: for k, ev := range tv { if ev == nil { delete(tv, k) } else { tv[k] = stripNullMapElements(ev) } } return v case []interface{}: for i, ev := range tv { tv[i] = stripNullMapElements(ev) } return v default: return v } } hcl-2.14.1/cmd/hcldec/spec-format.md000066400000000000000000000403171431334125700171040ustar00rootroot00000000000000# `hcldec` spec format The `hcldec` spec format instructs [`hcldec`](README.md) on how to validate one or more configuration files given in the HCL syntax and how to translate the result into JSON format. The spec format is itself built from HCL syntax, with each HCL block serving as a _spec_ whose block type and contents together describe a single mapping action and, in most cases, a validation constraint. Each spec block produces one JSON value. A spec _file_ must have a single top-level spec block that describes the top-level JSON value `hcldec` will return, and that spec block may have other nested spec blocks (depending on its type) that produce nested structures and additional validation constraints. The most common usage of `hcldec` is to produce a JSON object whose properties are derived from the top-level content of the input file. In this case, the root of the given spec file will have an `object` spec block whose contents describe how each of the object's properties are to be populated using nested spec blocks. Each spec is evaluated in the context of an HCL _body_, which is the HCL terminology for one level of nesting in a configuration file. The top-level objects in a file all belong to the root body of that file, and then each nested block has its own body containing the elements within that block. Some spec types select a new body as the context for their nested specs, allowing nested HCL structures to be decoded. ## Spec Block Types The following sections describe the different block types that can be used to define specs within a spec file. ### `object` spec blocks The `object` spec type is the most commonly used at the root of a spec file. Its result is a JSON object whose properties are set based on any nested spec blocks: ```hcl object { attr "name" { type = string } block "address" { object { attr "street" { type = string } # ... } } } ``` Nested spec blocks inside `object` must always have an extra block label `"name"`, `"address"` and `"street"` in the above example) that specifies the name of the property that should be created in the JSON object result. This label also acts as a default name selector for the nested spec, allowing the `attr` blocks in the above example to omit the usually-required `name` argument in cases where the HCL input name and JSON output name are the same. An `object` spec block creates no validation constraints, but it passes on any validation constraints created by the nested specs. ### `array` spec blocks The `array` spec type produces a JSON array whose elements are set based on any nested spec blocks: ```hcl array { attr { name = "first_element" type = string } attr { name = "second_element" type = string } } ``` An `array` spec block creates no validation constraints, but it passes on any validation constraints created by the nested specs. ### `attr` spec blocks The `attr` spec type reads the value of an attribute in the current body and returns that value as its result. It also creates validation constraints for the given attribute name and its value. ```hcl attr { name = "document_root" type = string required = true } ``` `attr` spec blocks accept the following arguments: * `name` (required) - The attribute name to expect within the HCL input file. This may be omitted when a default name selector is created by a parent `object` spec, if the input attribute name should match the output JSON object property name. * `type` (optional) - A [type expression](#type-expressions) that the given attribute value must conform to. If this argument is set, `hcldec` will automatically convert the given input value to this type or produce an error if that is not possible. * `required` (optional) - If set to `true`, `hcldec` will produce an error if a value is not provided for the source attribute. `attr` is a leaf spec type, so no nested spec blocks are permitted. ### `block` spec blocks The `block` spec type applies one nested spec block to the contents of a block within the current body and returns the result of that spec. It also creates validation constraints for the given block type name. ```hcl block { block_type = "logging" object { attr "level" { type = string } attr "file" { type = string } } } ``` `block` spec blocks accept the following arguments: * `block_type` (required) - The block type name to expect within the HCL input file. This may be omitted when a default name selector is created by a parent `object` spec, if the input block type name should match the output JSON object property name. * `required` (optional) - If set to `true`, `hcldec` will produce an error if a block of the specified type is not present in the current body. `block` creates a validation constraint that there must be zero or one blocks of the given type name, or exactly one if `required` is set. `block` expects a single nested spec block, which is applied to the body of the block of the given type when it is present. ### `block_list` spec blocks The `block_list` spec type is similar to `block`, but it accepts zero or more blocks of a specified type rather than requiring zero or one. The result is a JSON array with one entry per block of the given type. ```hcl block_list { block_type = "log_file" object { attr "level" { type = string } attr "filename" { type = string required = true } } } ``` `block_list` spec blocks accept the following arguments: * `block_type` (required) - The block type name to expect within the HCL input file. This may be omitted when a default name selector is created by a parent `object` spec, if the input block type name should match the output JSON object property name. * `min_items` (optional) - If set to a number greater than zero, `hcldec` will produce an error if fewer than the given number of blocks are present. * `max_items` (optional) - If set to a number greater than zero, `hcldec` will produce an error if more than the given number of blocks are present. This attribute must be greater than or equal to `min_items` if both are set. `block` creates a validation constraint on the number of blocks of the given type that must be present. `block` expects a single nested spec block, which is applied to the body of each matching block to produce the resulting list items. ### `block_set` spec blocks The `block_set` spec type behaves the same as `block_list` except that the result is in no specific order and any duplicate items are removed. ```hcl block_set { block_type = "log_file" object { attr "level" { type = string } attr "filename" { type = string required = true } } } ``` The contents of `block_set` are the same as for `block_list`. ### `block_map` spec blocks The `block_map` spec type is similar to `block`, but it accepts zero or more blocks of a specified type rather than requiring zero or one. The result is a JSON object, or possibly multiple nested JSON objects, whose properties are derived from the labels set on each matching block. ```hcl block_map { block_type = "log_file" labels = ["filename"] object { attr "level" { type = string required = true } } } ``` `block_map` spec blocks accept the following arguments: * `block_type` (required) - The block type name to expect within the HCL input file. This may be omitted when a default name selector is created by a parent `object` spec, if the input block type name should match the output JSON object property name. * `labels` (required) - A list of user-oriented block label names. Each entry in this list creates one level of object within the output value, and requires one additional block header label on any child block of this type. Block header labels are the quoted strings that appear after the block type name but before the opening `{`. `block` creates a validation constraint on the number of labels that blocks of the given type must have. `block` expects a single nested spec block, which is applied to the body of each matching block to produce the resulting map items. ## `block_attrs` spec blocks The `block_attrs` spec type is similar to an `attr` spec block of a map type, but it produces a map from the attributes of a block rather than from an attribute's expression. ```hcl block_attrs { block_type = "variables" element_type = string required = false } ``` This allows a map with user-defined keys to be produced within block syntax, but due to the constraints of that syntax it also means that the user will be unable to dynamically-generate either individual key names using key expressions or the entire map value using a `for` expression. `block_attrs` spec blocks accept the following arguments: * `block_type` (required) - The block type name to expect within the HCL input file. This may be omitted when a default name selector is created by a parent `object` spec, if the input block type name should match the output JSON object property name. * `element_type` (required) - The value type to require for each of the attributes within a matched block. The resulting value will be a JSON object whose property values are of this type. * `required` (optional) - If `true`, an error will be produced if a block of the given type is not present. If `false` -- the default -- an absent block will be indicated by producing `null`. ## `literal` spec blocks The `literal` spec type returns a given literal value, and creates no validation constraints. It is most commonly used with the `default` spec type to create a fallback value, but can also be used e.g. to fill out required properties in an `object` spec that do not correspond to any construct in the input configuration. ```hcl literal { value = "hello world" } ``` `literal` spec blocks accept the following argument: * `value` (required) - The value to return. This attribute may be an expression that uses [functions](#spec-definition-functions). `literal` is a leaf spec type, so no nested spec blocks are permitted. ## `default` spec blocks The `default` spec type evaluates a sequence of nested specs in turn and returns the result of the first one that produces a non-null value. It creates no validation constraints of its own, but passes on the validation constraints from its first nested block. ```hcl default { attr { name = "private" type = bool } literal { value = false } } ``` A `default` spec block must have at least one nested spec block, and should generally have at least two since otherwise the `default` wrapper is a no-op. The second and any subsequent spec blocks are _fallback_ specs. These exhibit their usual behavior but are not able to impose validation constraints on the current body since they are not evaluated unless all prior specs produce `null` as their result. ## `transform` spec blocks The `transform` spec type evaluates one nested spec and then evaluates a given expression with that nested spec result to produce a final value. It creates no validation constraints of its own, but passes on the validation constraints from its nested block. ```hcl transform { attr { name = "size_in_mb" type = number } # Convert result to a size in bytes result = nested * 1024 * 1024 } ``` `transform` spec blocks accept the following argument: * `result` (required) - The expression to evaluate on the result of the nested spec. The variable `nested` is defined when evaluating this expression, with the result value of the nested spec. The `result` expression may use [functions](#spec-definition-functions). ## Predefined Variables `hcldec` accepts values for variables to expose into the input file's expression scope as CLI options, and this is the most common way to pass values since it allows them to be dynamically populated by the calling application. However, it's also possible to pre-define variables with constant values within a spec file, using the top-level `variables` block type: ```hcl variables { name = "Stephen" } ``` Variables of the same name defined via the `hcldec` command line with override predefined variables of the same name, so this mechanism can also be used to provide defaults for variables that are overridden only in certain contexts. ## Custom Functions The spec can make arbitrary HCL functions available in the input file's expression scope, and thus allow simple computation within the input file, in addition to HCL's built-in operators. Custom functions are defined in the spec file with the top-level `function` block type: ``` function "add_one" { params = [n] result = n + 1 } ``` Functions behave in a similar way to the `transform` spec type in that the given `result` attribute expression is evaluated with additional variables defined with the same names as the defined `params`. The [spec definition functions](#spec-definition-functions) can be used within custom function expressions, allowing them to be optionally exposed into the input file: ``` function "upper" { params = [str] result = upper(str) } function "min" { params = [] variadic_param = nums result = min(nums...) } ``` Custom functions defined in the spec cannot be called from the spec itself. ## Spec Definition Functions Certain expressions within a specification may use the following functions. The documentation for each spec type above specifies where functions may be used. * `abs(number)` returns the absolute (positive) value of the given number. * `coalesce(vals...)` returns the first non-null value given. * `concat(lists...)` concatenates together all of the given lists to produce a new list. * `hasindex(val, idx)` returns true if the expression `val[idx]` could succeed. * `int(number)` returns the integer portion of the given number, rounding towards zero. * `jsondecode(str)` interprets the given string as JSON and returns the resulting data structure. * `jsonencode(val)` returns a JSON-serialized version of the given value. * `length(collection)` returns the number of elements in the given collection (list, set, map, object, or tuple). * `lower(string)` returns the given string with all uppercase letters converted to lowercase. * `max(numbers...)` returns the greatest of the given numbers. * `min(numbers...)` returns the smallest of the given numbers. * `reverse(string)` returns the given string with all of the characters in reverse order. * `strlen(string)` returns the number of characters in the given string. * `substr(string, offset, length)` returns the requested substring of the given string. * `upper(string)` returns the given string with all lowercase letters converted to uppercase. Note that these expressions are valid in the context of the _spec_ file, not the _input_. Functions can be exposed into the input file using [Custom Functions](#custom-functions) within the spec, which may in turn refer to these spec definition functions. ## Type Expressions Type expressions are used to describe the expected type of an attribute, as an additional validation constraint. A type expression uses primitive type names and compound type constructors. A type constructor builds a new type based on one or more type expression arguments. The following type names and type constructors are supported: * `any` is a wildcard that accepts a value of any type. (In HCL terms, this is the _dynamic pseudo-type_.) * `string` is a Unicode string. * `number` is an arbitrary-precision floating point number. * `bool` is a boolean value (`true` or `false`) * `list(element_type)` constructs a list type with the given element type * `set(element_type)` constructs a set type with the given element type * `map(element_type)` constructs a map type with the given element type * `object({name1 = element_type, name2 = element_type, ...})` constructs an object type with the given attribute types. * `tuple([element_type, element_type, ...])` constructs a tuple type with the given element types. This can be used, for example, to require an array with a particular number of elements, or with elements of different types. The above types are as defined by [the HCL syntax-agnostic information model](../../spec.md). After validation, values are lowered to JSON's type system, which is a subset of the HCL type system. `null` is a valid value of any type, and not a type itself. hcl-2.14.1/cmd/hcldec/spec.go000066400000000000000000000376301431334125700156270ustar00rootroot00000000000000package main import ( "fmt" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/userfunc" "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2/hcldec" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) type specFileContent struct { Variables map[string]cty.Value Functions map[string]function.Function RootSpec hcldec.Spec } var specCtx = &hcl.EvalContext{ Functions: specFuncs, } func loadSpecFile(filename string) (specFileContent, hcl.Diagnostics) { file, diags := parser.ParseHCLFile(filename) if diags.HasErrors() { return specFileContent{RootSpec: errSpec}, diags } vars, funcs, specBody, declDiags := decodeSpecDecls(file.Body) diags = append(diags, declDiags...) spec, specDiags := decodeSpecRoot(specBody) diags = append(diags, specDiags...) return specFileContent{ Variables: vars, Functions: funcs, RootSpec: spec, }, diags } func decodeSpecDecls(body hcl.Body) (map[string]cty.Value, map[string]function.Function, hcl.Body, hcl.Diagnostics) { funcs, body, diags := userfunc.DecodeUserFunctions(body, "function", func() *hcl.EvalContext { return specCtx }) content, body, moreDiags := body.PartialContent(&hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "variables", }, }, }) diags = append(diags, moreDiags...) vars := make(map[string]cty.Value) for _, block := range content.Blocks { // We only have one block type in our schema, so we can assume all // blocks are of that type. attrs, moreDiags := block.Body.JustAttributes() diags = append(diags, moreDiags...) for name, attr := range attrs { val, moreDiags := attr.Expr.Value(specCtx) diags = append(diags, moreDiags...) vars[name] = val } } return vars, funcs, body, diags } func decodeSpecRoot(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { content, diags := body.Content(specSchemaUnlabelled) if len(content.Blocks) == 0 { if diags.HasErrors() { // If we already have errors then they probably explain // why we have no blocks, so we'll skip our additional // error message added below. return errSpec, diags } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing spec block", Detail: "A spec file must have exactly one root block specifying how to map to a JSON value.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } if len(content.Blocks) > 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous spec block", Detail: "A spec file must have exactly one root block specifying how to map to a JSON value.", Subject: &content.Blocks[1].DefRange, }) return errSpec, diags } spec, specDiags := decodeSpecBlock(content.Blocks[0]) diags = append(diags, specDiags...) return spec, diags } func decodeSpecBlock(block *hcl.Block) (hcldec.Spec, hcl.Diagnostics) { var impliedName string if len(block.Labels) > 0 { impliedName = block.Labels[0] } switch block.Type { case "object": return decodeObjectSpec(block.Body) case "array": return decodeArraySpec(block.Body) case "attr": return decodeAttrSpec(block.Body, impliedName) case "block": return decodeBlockSpec(block.Body, impliedName) case "block_list": return decodeBlockListSpec(block.Body, impliedName) case "block_set": return decodeBlockSetSpec(block.Body, impliedName) case "block_map": return decodeBlockMapSpec(block.Body, impliedName) case "block_attrs": return decodeBlockAttrsSpec(block.Body, impliedName) case "default": return decodeDefaultSpec(block.Body) case "transform": return decodeTransformSpec(block.Body) case "literal": return decodeLiteralSpec(block.Body) default: // Should never happen, because the above cases should be exhaustive // for our schema. var diags hcl.Diagnostics diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid spec block", Detail: fmt.Sprintf("Blocks of type %q are not expected here.", block.Type), Subject: &block.TypeRange, }) return errSpec, diags } } func decodeObjectSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { content, diags := body.Content(specSchemaLabelled) spec := make(hcldec.ObjectSpec) for _, block := range content.Blocks { propSpec, propDiags := decodeSpecBlock(block) diags = append(diags, propDiags...) spec[block.Labels[0]] = propSpec } return spec, diags } func decodeArraySpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { content, diags := body.Content(specSchemaUnlabelled) spec := make(hcldec.TupleSpec, 0, len(content.Blocks)) for _, block := range content.Blocks { elemSpec, elemDiags := decodeSpecBlock(block) diags = append(diags, elemDiags...) spec = append(spec, elemSpec) } return spec, diags } func decodeAttrSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) { type content struct { Name *string `hcl:"name"` Type hcl.Expression `hcl:"type"` Required *bool `hcl:"required"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.AttrSpec{ Name: impliedName, } if args.Required != nil { spec.Required = *args.Required } if args.Name != nil { spec.Name = *args.Name } var typeDiags hcl.Diagnostics spec.Type, typeDiags = evalTypeExpr(args.Type) diags = append(diags, typeDiags...) if spec.Name == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing name in attribute spec", Detail: "The name attribute is required, to specify the attribute name that is expected in an input HCL file.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } return spec, diags } func decodeBlockSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) { type content struct { TypeName *string `hcl:"block_type"` Required *bool `hcl:"required"` Nested hcl.Body `hcl:",remain"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.BlockSpec{ TypeName: impliedName, } if args.Required != nil { spec.Required = *args.Required } if args.TypeName != nil { spec.TypeName = *args.TypeName } nested, nestedDiags := decodeBlockNestedSpec(args.Nested) diags = append(diags, nestedDiags...) spec.Nested = nested return spec, diags } func decodeBlockListSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) { type content struct { TypeName *string `hcl:"block_type"` MinItems *int `hcl:"min_items"` MaxItems *int `hcl:"max_items"` Nested hcl.Body `hcl:",remain"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.BlockListSpec{ TypeName: impliedName, } if args.MinItems != nil { spec.MinItems = *args.MinItems } if args.MaxItems != nil { spec.MaxItems = *args.MaxItems } if args.TypeName != nil { spec.TypeName = *args.TypeName } nested, nestedDiags := decodeBlockNestedSpec(args.Nested) diags = append(diags, nestedDiags...) spec.Nested = nested if spec.TypeName == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing block_type in block_list spec", Detail: "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } return spec, diags } func decodeBlockSetSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) { type content struct { TypeName *string `hcl:"block_type"` MinItems *int `hcl:"min_items"` MaxItems *int `hcl:"max_items"` Nested hcl.Body `hcl:",remain"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.BlockSetSpec{ TypeName: impliedName, } if args.MinItems != nil { spec.MinItems = *args.MinItems } if args.MaxItems != nil { spec.MaxItems = *args.MaxItems } if args.TypeName != nil { spec.TypeName = *args.TypeName } nested, nestedDiags := decodeBlockNestedSpec(args.Nested) diags = append(diags, nestedDiags...) spec.Nested = nested if spec.TypeName == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing block_type in block_set spec", Detail: "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } return spec, diags } func decodeBlockMapSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) { type content struct { TypeName *string `hcl:"block_type"` Labels []string `hcl:"labels"` Nested hcl.Body `hcl:",remain"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.BlockMapSpec{ TypeName: impliedName, } if args.TypeName != nil { spec.TypeName = *args.TypeName } spec.LabelNames = args.Labels nested, nestedDiags := decodeBlockNestedSpec(args.Nested) diags = append(diags, nestedDiags...) spec.Nested = nested if spec.TypeName == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing block_type in block_map spec", Detail: "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } if len(spec.LabelNames) < 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid block label name list", Detail: "A block_map must have at least one label specified.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } if hcldec.ImpliedType(spec).HasDynamicTypes() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid block_map spec", Detail: "A block_map spec may not contain attributes with type 'any'.", Subject: body.MissingItemRange().Ptr(), }) } return spec, diags } func decodeBlockNestedSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { content, diags := body.Content(specSchemaUnlabelled) if len(content.Blocks) == 0 { if diags.HasErrors() { // If we already have errors then they probably explain // why we have no blocks, so we'll skip our additional // error message added below. return errSpec, diags } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing spec block", Detail: "A block spec must have exactly one child spec specifying how to decode block contents.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } if len(content.Blocks) > 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous spec block", Detail: "A block spec must have exactly one child spec specifying how to decode block contents.", Subject: &content.Blocks[1].DefRange, }) return errSpec, diags } spec, specDiags := decodeSpecBlock(content.Blocks[0]) diags = append(diags, specDiags...) return spec, diags } func decodeBlockAttrsSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) { type content struct { TypeName *string `hcl:"block_type"` ElementType hcl.Expression `hcl:"element_type"` Required *bool `hcl:"required"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.BlockAttrsSpec{ TypeName: impliedName, } if args.Required != nil { spec.Required = *args.Required } if args.TypeName != nil { spec.TypeName = *args.TypeName } var typeDiags hcl.Diagnostics spec.ElementType, typeDiags = evalTypeExpr(args.ElementType) diags = append(diags, typeDiags...) if spec.TypeName == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing block_type in block_attrs spec", Detail: "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } return spec, diags } func decodeLiteralSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { type content struct { Value cty.Value `hcl:"value"` } var args content diags := gohcl.DecodeBody(body, specCtx, &args) if diags.HasErrors() { return errSpec, diags } return &hcldec.LiteralSpec{ Value: args.Value, }, diags } func decodeDefaultSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { content, diags := body.Content(specSchemaUnlabelled) if len(content.Blocks) == 0 { if diags.HasErrors() { // If we already have errors then they probably explain // why we have no blocks, so we'll skip our additional // error message added below. return errSpec, diags } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing spec block", Detail: "A default block must have at least one nested spec, each specifying a possible outcome.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } if len(content.Blocks) == 1 && !diags.HasErrors() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagWarning, Summary: "Useless default block", Detail: "A default block with only one spec is equivalent to using that spec alone.", Subject: &content.Blocks[1].DefRange, }) } var spec hcldec.Spec for _, block := range content.Blocks { candidateSpec, candidateDiags := decodeSpecBlock(block) diags = append(diags, candidateDiags...) if candidateDiags.HasErrors() { continue } if spec == nil { spec = candidateSpec } else { spec = &hcldec.DefaultSpec{ Primary: spec, Default: candidateSpec, } } } return spec, diags } func decodeTransformSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) { type content struct { Result hcl.Expression `hcl:"result"` Nested hcl.Body `hcl:",remain"` } var args content diags := gohcl.DecodeBody(body, nil, &args) if diags.HasErrors() { return errSpec, diags } spec := &hcldec.TransformExprSpec{ Expr: args.Result, VarName: "nested", TransformCtx: specCtx, } nestedContent, nestedDiags := args.Nested.Content(specSchemaUnlabelled) diags = append(diags, nestedDiags...) if len(nestedContent.Blocks) != 1 { if nestedDiags.HasErrors() { // If we already have errors then they probably explain // why we have the wrong number of blocks, so we'll skip our // additional error message added below. return errSpec, diags } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid transform spec", Detail: "A transform spec block must have exactly one nested spec block.", Subject: body.MissingItemRange().Ptr(), }) return errSpec, diags } nestedSpec, nestedDiags := decodeSpecBlock(nestedContent.Blocks[0]) diags = append(diags, nestedDiags...) spec.Wrapped = nestedSpec return spec, diags } var errSpec = &hcldec.LiteralSpec{ Value: cty.NullVal(cty.DynamicPseudoType), } var specBlockTypes = []string{ "object", "array", "literal", "attr", "block", "block_list", "block_map", "block_set", "default", "transform", } var specSchemaUnlabelled *hcl.BodySchema var specSchemaLabelled *hcl.BodySchema var specSchemaLabelledLabels = []string{"key"} func init() { specSchemaLabelled = &hcl.BodySchema{ Blocks: make([]hcl.BlockHeaderSchema, 0, len(specBlockTypes)), } specSchemaUnlabelled = &hcl.BodySchema{ Blocks: make([]hcl.BlockHeaderSchema, 0, len(specBlockTypes)), } for _, name := range specBlockTypes { specSchemaLabelled.Blocks = append( specSchemaLabelled.Blocks, hcl.BlockHeaderSchema{ Type: name, LabelNames: specSchemaLabelledLabels, }, ) specSchemaUnlabelled.Blocks = append( specSchemaUnlabelled.Blocks, hcl.BlockHeaderSchema{ Type: name, }, ) } } hcl-2.14.1/cmd/hcldec/spec_funcs.go000066400000000000000000000012471431334125700170200ustar00rootroot00000000000000package main import ( "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" ) var specFuncs = map[string]function.Function{ "abs": stdlib.AbsoluteFunc, "coalesce": stdlib.CoalesceFunc, "concat": stdlib.ConcatFunc, "hasindex": stdlib.HasIndexFunc, "int": stdlib.IntFunc, "jsondecode": stdlib.JSONDecodeFunc, "jsonencode": stdlib.JSONEncodeFunc, "length": stdlib.LengthFunc, "lower": stdlib.LowerFunc, "max": stdlib.MaxFunc, "min": stdlib.MinFunc, "reverse": stdlib.ReverseFunc, "strlen": stdlib.StrlenFunc, "substr": stdlib.SubstrFunc, "upper": stdlib.UpperFunc, } hcl-2.14.1/cmd/hcldec/type_expr.go000066400000000000000000000064671431334125700167200ustar00rootroot00000000000000package main import ( "fmt" "reflect" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) var typeType = cty.Capsule("type", reflect.TypeOf(cty.NilType)) var typeEvalCtx = &hcl.EvalContext{ Variables: map[string]cty.Value{ "string": wrapTypeType(cty.String), "bool": wrapTypeType(cty.Bool), "number": wrapTypeType(cty.Number), "any": wrapTypeType(cty.DynamicPseudoType), }, Functions: map[string]function.Function{ "list": function.New(&function.Spec{ Params: []function.Parameter{ { Name: "element_type", Type: typeType, }, }, Type: function.StaticReturnType(typeType), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { ety := unwrapTypeType(args[0]) ty := cty.List(ety) return wrapTypeType(ty), nil }, }), "set": function.New(&function.Spec{ Params: []function.Parameter{ { Name: "element_type", Type: typeType, }, }, Type: function.StaticReturnType(typeType), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { ety := unwrapTypeType(args[0]) ty := cty.Set(ety) return wrapTypeType(ty), nil }, }), "map": function.New(&function.Spec{ Params: []function.Parameter{ { Name: "element_type", Type: typeType, }, }, Type: function.StaticReturnType(typeType), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { ety := unwrapTypeType(args[0]) ty := cty.Map(ety) return wrapTypeType(ty), nil }, }), "tuple": function.New(&function.Spec{ Params: []function.Parameter{ { Name: "element_types", Type: cty.List(typeType), }, }, Type: function.StaticReturnType(typeType), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { etysVal := args[0] etys := make([]cty.Type, 0, etysVal.LengthInt()) for it := etysVal.ElementIterator(); it.Next(); { _, wrapEty := it.Element() etys = append(etys, unwrapTypeType(wrapEty)) } ty := cty.Tuple(etys) return wrapTypeType(ty), nil }, }), "object": function.New(&function.Spec{ Params: []function.Parameter{ { Name: "attribute_types", Type: cty.Map(typeType), }, }, Type: function.StaticReturnType(typeType), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { atysVal := args[0] atys := make(map[string]cty.Type) for it := atysVal.ElementIterator(); it.Next(); { nameVal, wrapAty := it.Element() name := nameVal.AsString() atys[name] = unwrapTypeType(wrapAty) } ty := cty.Object(atys) return wrapTypeType(ty), nil }, }), }, } func evalTypeExpr(expr hcl.Expression) (cty.Type, hcl.Diagnostics) { result, diags := expr.Value(typeEvalCtx) if result.IsNull() { return cty.DynamicPseudoType, diags } if !result.Type().Equals(typeType) { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid type expression", Detail: fmt.Sprintf("A type is required, not %s.", result.Type().FriendlyName()), }) return cty.DynamicPseudoType, diags } return unwrapTypeType(result), diags } func wrapTypeType(ty cty.Type) cty.Value { return cty.CapsuleVal(typeType, &ty) } func unwrapTypeType(val cty.Value) cty.Type { return *(val.EncapsulatedValue().(*cty.Type)) } hcl-2.14.1/cmd/hcldec/vars.go000066400000000000000000000033111431334125700156350ustar00rootroot00000000000000package main import ( "fmt" "strings" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func parseVarsArg(src string, argIdx int) (map[string]cty.Value, hcl.Diagnostics) { fakeFn := fmt.Sprintf("", argIdx) f, diags := parser.ParseJSON([]byte(src), fakeFn) if f == nil { return nil, diags } vals, valsDiags := parseVarsBody(f.Body) diags = append(diags, valsDiags...) return vals, diags } func parseVarsFile(filename string) (map[string]cty.Value, hcl.Diagnostics) { var f *hcl.File var diags hcl.Diagnostics if strings.HasSuffix(filename, ".json") { f, diags = parser.ParseJSONFile(filename) } else { f, diags = parser.ParseHCLFile(filename) } if f == nil { return nil, diags } vals, valsDiags := parseVarsBody(f.Body) diags = append(diags, valsDiags...) return vals, diags } func parseVarsBody(body hcl.Body) (map[string]cty.Value, hcl.Diagnostics) { attrs, diags := body.JustAttributes() if attrs == nil { return nil, diags } vals := make(map[string]cty.Value, len(attrs)) for name, attr := range attrs { val, valDiags := attr.Expr.Value(nil) diags = append(diags, valDiags...) vals[name] = val } return vals, diags } // varSpecs is an implementation of pflag.Value that accumulates a list of // raw values, ignoring any quoting. This is similar to pflag.StringSlice // but does not complain if there are literal quotes inside the value, which // is important for us to accept JSON literals here. type varSpecs []string func (vs *varSpecs) String() string { return strings.Join([]string(*vs), ", ") } func (vs *varSpecs) Set(new string) error { *vs = append(*vs, new) return nil } func (vs *varSpecs) Type() string { return "json-or-file" } hcl-2.14.1/cmd/hclfmt/000077500000000000000000000000001431334125700143705ustar00rootroot00000000000000hcl-2.14.1/cmd/hclfmt/main.go000066400000000000000000000060221431334125700156430ustar00rootroot00000000000000package main import ( "bytes" "errors" "flag" "fmt" "io/ioutil" "os" "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/hashicorp/hcl/v2/hclwrite" "golang.org/x/crypto/ssh/terminal" ) const versionStr = "0.0.1-dev" var ( check = flag.Bool("check", false, "perform a syntax check on the given files and produce diagnostics") reqNoChange = flag.Bool("require-no-change", false, "return a non-zero status if any files are changed during formatting") overwrite = flag.Bool("w", false, "overwrite source files instead of writing to stdout") showVersion = flag.Bool("version", false, "show the version number and immediately exit") ) var parser = hclparse.NewParser() var diagWr hcl.DiagnosticWriter // initialized in init var checkErrs = false var changed []string func init() { color := terminal.IsTerminal(int(os.Stderr.Fd())) w, _, err := terminal.GetSize(int(os.Stdout.Fd())) if err != nil { w = 80 } diagWr = hcl.NewDiagnosticTextWriter(os.Stderr, parser.Files(), uint(w), color) } func main() { err := realmain() if err != nil { fmt.Fprintln(os.Stderr, err.Error()) os.Exit(1) } } func realmain() error { flag.Usage = usage flag.Parse() if *showVersion { fmt.Println(versionStr) return nil } err := processFiles() if err != nil { return err } if checkErrs { return errors.New("one or more files contained errors") } if *reqNoChange { if len(changed) != 0 { return fmt.Errorf("file(s) were changed: %s", strings.Join(changed, ", ")) } } return nil } func processFiles() error { if flag.NArg() == 0 { if *overwrite { return errors.New("error: cannot use -w without source filenames") } return processFile("", os.Stdin) } for i := 0; i < flag.NArg(); i++ { path := flag.Arg(i) switch dir, err := os.Stat(path); { case err != nil: return err case dir.IsDir(): // This tool can't walk a whole directory because it doesn't // know what file naming schemes will be used by different // HCL-embedding applications, so it'll leave that sort of // functionality for apps themselves to implement. return fmt.Errorf("can't format directory %s", path) default: if err := processFile(path, nil); err != nil { return err } } } return nil } func processFile(fn string, in *os.File) error { var err error if in == nil { in, err = os.Open(fn) if err != nil { return fmt.Errorf("failed to open %s: %s", fn, err) } } inSrc, err := ioutil.ReadAll(in) if err != nil { return fmt.Errorf("failed to read %s: %s", fn, err) } if *check { _, diags := parser.ParseHCL(inSrc, fn) diagWr.WriteDiagnostics(diags) if diags.HasErrors() { checkErrs = true return nil } } outSrc := hclwrite.Format(inSrc) if !bytes.Equal(inSrc, outSrc) { changed = append(changed, fn) } if *overwrite { return ioutil.WriteFile(fn, outSrc, 0644) } _, err = os.Stdout.Write(outSrc) return err } func usage() { fmt.Fprintf(os.Stderr, "usage: hclfmt [flags] [path ...]\n") flag.PrintDefaults() os.Exit(2) } hcl-2.14.1/cmd/hclspecsuite/000077500000000000000000000000001431334125700156065ustar00rootroot00000000000000hcl-2.14.1/cmd/hclspecsuite/README.md000066400000000000000000000001701431334125700170630ustar00rootroot00000000000000# `hclspecsuite` `hclspecsuite` is the test harness for [the HCL specification test suite](../../specsuite/README.md). hcl-2.14.1/cmd/hclspecsuite/diagnostics.go000066400000000000000000000050041431334125700204430ustar00rootroot00000000000000package main import ( "encoding/json" "fmt" "github.com/hashicorp/hcl/v2" ) func decodeJSONDiagnostics(src []byte) hcl.Diagnostics { type PosJSON struct { Line int `json:"line"` Column int `json:"column"` Byte int `json:"byte"` } type RangeJSON struct { Filename string `json:"filename"` Start PosJSON `json:"start"` End PosJSON `json:"end"` } type DiagnosticJSON struct { Severity string `json:"severity"` Summary string `json:"summary"` Detail string `json:"detail,omitempty"` Subject *RangeJSON `json:"subject,omitempty"` } type DiagnosticsJSON struct { Diagnostics []DiagnosticJSON `json:"diagnostics"` } var raw DiagnosticsJSON var diags hcl.Diagnostics err := json.Unmarshal(src, &raw) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec diagnostics result", Detail: fmt.Sprintf("Sub-program hcldec produced invalid diagnostics: %s.", err), }) return diags } if len(raw.Diagnostics) == 0 { return nil } diags = make(hcl.Diagnostics, 0, len(raw.Diagnostics)) for _, rawDiag := range raw.Diagnostics { var severity hcl.DiagnosticSeverity switch rawDiag.Severity { case "error": severity = hcl.DiagError case "warning": severity = hcl.DiagWarning default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec diagnostics result", Detail: fmt.Sprintf("Diagnostic has unsupported severity %q.", rawDiag.Severity), }) continue } diag := &hcl.Diagnostic{ Severity: severity, Summary: rawDiag.Summary, Detail: rawDiag.Detail, } if rawDiag.Subject != nil { rawRange := rawDiag.Subject diag.Subject = &hcl.Range{ Filename: rawRange.Filename, Start: hcl.Pos{ Line: rawRange.Start.Line, Column: rawRange.Start.Column, Byte: rawRange.Start.Byte, }, End: hcl.Pos{ Line: rawRange.End.Line, Column: rawRange.End.Column, Byte: rawRange.End.Byte, }, } } diags = append(diags, diag) } return diags } func severityString(severity hcl.DiagnosticSeverity) string { switch severity { case hcl.DiagError: return "error" case hcl.DiagWarning: return "warning" default: return "unsupported-severity" } } func rangeString(rng hcl.Range) string { return fmt.Sprintf( "from line %d column %d byte %d to line %d column %d byte %d", rng.Start.Line, rng.Start.Column, rng.Start.Byte, rng.End.Line, rng.End.Column, rng.End.Byte, ) } hcl-2.14.1/cmd/hclspecsuite/log.go000066400000000000000000000003231431334125700167140ustar00rootroot00000000000000package main import ( "github.com/hashicorp/hcl/v2" ) type LogBeginCallback func(testName string, testFile *TestFile) type LogProblemsCallback func(testName string, testFile *TestFile, diags hcl.Diagnostics) hcl-2.14.1/cmd/hclspecsuite/main.go000066400000000000000000000026041431334125700170630ustar00rootroot00000000000000package main import ( "fmt" "os" "os/exec" "golang.org/x/crypto/ssh/terminal" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" ) func main() { os.Exit(realMain(os.Args[1:])) } func realMain(args []string) int { if len(args) != 2 { fmt.Fprintf(os.Stderr, "Usage: hclspecsuite \n") return 2 } testsDir := args[0] hcldecPath := args[1] hcldecPath, err := exec.LookPath(hcldecPath) if err != nil { fmt.Fprintf(os.Stderr, "%s\n", err) return 2 } parser := hclparse.NewParser() color := terminal.IsTerminal(int(os.Stderr.Fd())) w, _, err := terminal.GetSize(int(os.Stdout.Fd())) if err != nil { w = 80 } diagWr := hcl.NewDiagnosticTextWriter(os.Stderr, parser.Files(), uint(w), color) var diagCount int runner := &Runner{ parser: parser, hcldecPath: hcldecPath, baseDir: testsDir, logBegin: func(name string, file *TestFile) { fmt.Printf("- %s\n", name) }, logProblems: func(name string, file *TestFile, diags hcl.Diagnostics) { if len(diags) != 0 { os.Stderr.WriteString("\n") diagWr.WriteDiagnostics(diags) diagCount += len(diags) } fmt.Printf("- %s\n", name) }, } diags := runner.Run() if len(diags) != 0 { os.Stderr.WriteString("\n\n\n== Test harness problems:\n\n") diagWr.WriteDiagnostics(diags) diagCount += len(diags) } if diagCount > 0 { return 2 } return 0 } hcl-2.14.1/cmd/hclspecsuite/runner.go000066400000000000000000000346141431334125700174560ustar00rootroot00000000000000package main import ( "bytes" "encoding/json" "fmt" "io/ioutil" "os" "os/exec" "path/filepath" "sort" "strings" "github.com/zclconf/go-cty-debug/ctydebug" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" ctyjson "github.com/zclconf/go-cty/cty/json" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" "github.com/hashicorp/hcl/v2/hclparse" ) type Runner struct { parser *hclparse.Parser hcldecPath string baseDir string logBegin LogBeginCallback logProblems LogProblemsCallback } func (r *Runner) Run() hcl.Diagnostics { return r.runDir(r.baseDir) } func (r *Runner) runDir(dir string) hcl.Diagnostics { var diags hcl.Diagnostics infos, err := ioutil.ReadDir(dir) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to read test directory", Detail: fmt.Sprintf("The directory %q could not be opened: %s.", dir, err), }) return diags } var tests []string var subDirs []string for _, info := range infos { name := info.Name() if strings.HasPrefix(name, ".") { continue } if info.IsDir() { subDirs = append(subDirs, name) } if strings.HasSuffix(name, ".t") { tests = append(tests, name) } } sort.Strings(tests) sort.Strings(subDirs) for _, filename := range tests { filename = filepath.Join(dir, filename) testDiags := r.runTest(filename) diags = append(diags, testDiags...) } for _, dirName := range subDirs { dir := filepath.Join(dir, dirName) dirDiags := r.runDir(dir) diags = append(diags, dirDiags...) } return diags } func (r *Runner) runTest(filename string) hcl.Diagnostics { prettyName := r.prettyTestName(filename) tf, diags := r.LoadTestFile(filename) if diags.HasErrors() { // We'll still log, so it's clearer which test the diagnostics belong to. if r.logBegin != nil { r.logBegin(prettyName, nil) } if r.logProblems != nil { r.logProblems(prettyName, nil, diags) return nil // don't duplicate the diagnostics we already reported } return diags } if r.logBegin != nil { r.logBegin(prettyName, tf) } basePath := filename[:len(filename)-2] specFilename := basePath + ".hcldec" nativeFilename := basePath + ".hcl" jsonFilename := basePath + ".hcl.json" // We'll add the source code of the spec file to our own parser, even // though it'll actually be parsed by the hcldec child process, since that // way we can produce nice diagnostic messages if hcldec fails to process // the spec file. src, err := ioutil.ReadFile(specFilename) if err == nil { r.parser.AddFile(specFilename, &hcl.File{ Bytes: src, }) } if _, err := os.Stat(specFilename); err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing .hcldec file", Detail: fmt.Sprintf("No specification file for test %s: %s.", prettyName, err), }) return diags } if _, err := os.Stat(nativeFilename); err == nil { moreDiags := r.runTestInput(specFilename, nativeFilename, tf) diags = append(diags, moreDiags...) } if _, err := os.Stat(jsonFilename); err == nil { moreDiags := r.runTestInput(specFilename, jsonFilename, tf) diags = append(diags, moreDiags...) } if r.logProblems != nil { r.logProblems(prettyName, nil, diags) return nil // don't duplicate the diagnostics we already reported } return diags } func (r *Runner) runTestInput(specFilename, inputFilename string, tf *TestFile) hcl.Diagnostics { // We'll add the source code of the input file to our own parser, even // though it'll actually be parsed by the hcldec child process, since that // way we can produce nice diagnostic messages if hcldec fails to process // the input file. src, err := ioutil.ReadFile(inputFilename) if err == nil { r.parser.AddFile(inputFilename, &hcl.File{ Bytes: src, }) } var diags hcl.Diagnostics if tf.ChecksTraversals { gotTraversals, moreDiags := r.hcldecVariables(specFilename, inputFilename) diags = append(diags, moreDiags...) if !moreDiags.HasErrors() { expected := tf.ExpectedTraversals for _, got := range gotTraversals { e := findTraversalSpec(got, expected) rng := got.SourceRange() if e == nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unexpected traversal", Detail: "Detected traversal that is not indicated as expected in the test file.", Subject: &rng, }) } else { moreDiags := checkTraversalsMatch(got, inputFilename, e) diags = append(diags, moreDiags...) } } // Look for any traversals that didn't show up at all. for _, e := range expected { if t := findTraversalForSpec(e, gotTraversals); t == nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing expected traversal", Detail: "This expected traversal was not detected.", Subject: e.Traversal.SourceRange().Ptr(), }) } } } } val, transformDiags := r.hcldecTransform(specFilename, inputFilename) if len(tf.ExpectedDiags) == 0 { diags = append(diags, transformDiags...) if transformDiags.HasErrors() { // If hcldec failed then there's no point in continuing. return diags } if errs := val.Type().TestConformance(tf.ResultType); len(errs) > 0 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect result type", Detail: fmt.Sprintf( "Input file %s produced %s, but was expecting %s.", inputFilename, typeexpr.TypeString(val.Type()), typeexpr.TypeString(tf.ResultType), ), }) } if tf.Result != cty.NilVal { cmpVal, err := convert.Convert(tf.Result, tf.ResultType) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect type for result value", Detail: fmt.Sprintf( "Result does not conform to the given result type: %s.", err, ), Subject: &tf.ResultRange, }) } else { if !val.RawEquals(cmpVal) { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect result value", Detail: fmt.Sprintf( "Input file %s produced %#v, but was expecting %#v.\n\n%s", inputFilename, val, tf.Result, ctydebug.DiffValues(tf.Result, val), ), }) } } } } else { // We're expecting diagnostics, and so we'll need to correlate the // severities and source ranges of our actual diagnostics against // what we were expecting. type DiagnosticEntry struct { Severity hcl.DiagnosticSeverity Range hcl.Range } got := make(map[DiagnosticEntry]*hcl.Diagnostic) want := make(map[DiagnosticEntry]hcl.Range) for _, diag := range transformDiags { if diag.Subject == nil { // Sourceless diagnostics can never be expected, so we'll just // pass these through as-is and assume they are hcldec // operational errors. diags = append(diags, diag) continue } if diag.Subject.Filename != inputFilename { // If the problem is for something other than the input file // then it can't be expected. diags = append(diags, diag) continue } entry := DiagnosticEntry{ Severity: diag.Severity, Range: *diag.Subject, } got[entry] = diag } for _, e := range tf.ExpectedDiags { e.Range.Filename = inputFilename // assumed here, since we don't allow any other filename to be expected entry := DiagnosticEntry{ Severity: e.Severity, Range: e.Range, } want[entry] = e.DeclRange } for gotEntry, diag := range got { if _, wanted := want[gotEntry]; !wanted { // Pass through the diagnostic itself so the user can see what happened diags = append(diags, diag) diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unexpected diagnostic", Detail: fmt.Sprintf( "No %s diagnostic was expected %s. The unexpected diagnostic was shown above.", severityString(gotEntry.Severity), rangeString(gotEntry.Range), ), Subject: gotEntry.Range.Ptr(), }) } } for wantEntry, declRange := range want { if _, gotted := got[wantEntry]; !gotted { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing expected diagnostic", Detail: fmt.Sprintf( "No %s diagnostic was generated %s.", severityString(wantEntry.Severity), rangeString(wantEntry.Range), ), Subject: declRange.Ptr(), }) } } } return diags } func (r *Runner) hcldecTransform(specFile, inputFile string) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics var outBuffer bytes.Buffer var errBuffer bytes.Buffer cmd := &exec.Cmd{ Path: r.hcldecPath, Args: []string{ r.hcldecPath, "--spec=" + specFile, "--diags=json", "--with-type", "--keep-nulls", inputFile, }, Stdout: &outBuffer, Stderr: &errBuffer, } err := cmd.Run() if err != nil { if _, isExit := err.(*exec.ExitError); !isExit { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to run hcldec", Detail: fmt.Sprintf("Sub-program hcldec failed to start: %s.", err), }) return cty.DynamicVal, diags } // If we exited unsuccessfully then we'll expect diagnostics on stderr moreDiags := decodeJSONDiagnostics(errBuffer.Bytes()) diags = append(diags, moreDiags...) return cty.DynamicVal, diags } else { // Otherwise, we expect a JSON result value on stdout. Since we used // --with-type above, we can decode as DynamicPseudoType to recover // exactly the type that was saved, without the usual JSON lossiness. val, err := ctyjson.Unmarshal(outBuffer.Bytes(), cty.DynamicPseudoType) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec result", Detail: fmt.Sprintf("Sub-program hcldec produced an invalid result: %s.", err), }) return cty.DynamicVal, diags } return val, diags } } func (r *Runner) hcldecVariables(specFile, inputFile string) ([]hcl.Traversal, hcl.Diagnostics) { var diags hcl.Diagnostics var outBuffer bytes.Buffer var errBuffer bytes.Buffer cmd := &exec.Cmd{ Path: r.hcldecPath, Args: []string{ r.hcldecPath, "--spec=" + specFile, "--diags=json", "--var-refs", inputFile, }, Stdout: &outBuffer, Stderr: &errBuffer, } err := cmd.Run() if err != nil { if _, isExit := err.(*exec.ExitError); !isExit { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to run hcldec", Detail: fmt.Sprintf("Sub-program hcldec (evaluating input) failed to start: %s.", err), }) return nil, diags } // If we exited unsuccessfully then we'll expect diagnostics on stderr moreDiags := decodeJSONDiagnostics(errBuffer.Bytes()) diags = append(diags, moreDiags...) return nil, diags } else { // Otherwise, we expect a JSON description of the traversals on stdout. type PosJSON struct { Line int `json:"line"` Column int `json:"column"` Byte int `json:"byte"` } type RangeJSON struct { Filename string `json:"filename"` Start PosJSON `json:"start"` End PosJSON `json:"end"` } type StepJSON struct { Kind string `json:"kind"` Name string `json:"name,omitempty"` Key json.RawMessage `json:"key,omitempty"` Range RangeJSON `json:"range"` } type TraversalJSON struct { Steps []StepJSON `json:"steps"` } var raw []TraversalJSON err := json.Unmarshal(outBuffer.Bytes(), &raw) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec result", Detail: fmt.Sprintf("Sub-program hcldec (with --var-refs) produced an invalid result: %s.", err), }) return nil, diags } var ret []hcl.Traversal if len(raw) == 0 { return ret, diags } ret = make([]hcl.Traversal, 0, len(raw)) for _, rawT := range raw { traversal := make(hcl.Traversal, 0, len(rawT.Steps)) for _, rawS := range rawT.Steps { rng := hcl.Range{ Filename: rawS.Range.Filename, Start: hcl.Pos{ Line: rawS.Range.Start.Line, Column: rawS.Range.Start.Column, Byte: rawS.Range.Start.Byte, }, End: hcl.Pos{ Line: rawS.Range.End.Line, Column: rawS.Range.End.Column, Byte: rawS.Range.End.Byte, }, } switch rawS.Kind { case "root": traversal = append(traversal, hcl.TraverseRoot{ Name: rawS.Name, SrcRange: rng, }) case "attr": traversal = append(traversal, hcl.TraverseAttr{ Name: rawS.Name, SrcRange: rng, }) case "index": ty, err := ctyjson.ImpliedType([]byte(rawS.Key)) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec result", Detail: fmt.Sprintf("Sub-program hcldec (with --var-refs) produced an invalid result: traversal step has invalid index key %s.", rawS.Key), }) return nil, diags } keyVal, err := ctyjson.Unmarshal([]byte(rawS.Key), ty) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec result", Detail: fmt.Sprintf("Sub-program hcldec (with --var-refs) produced a result with an invalid index key %s: %s.", rawS.Key, err), }) return nil, diags } traversal = append(traversal, hcl.TraverseIndex{ Key: keyVal, SrcRange: rng, }) default: // Should never happen since the above cases are exhaustive, // but we'll catch it gracefully since this is coming from // a possibly-buggy hcldec implementation that we're testing. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Failed to parse hcldec result", Detail: fmt.Sprintf("Sub-program hcldec (with --var-refs) produced an invalid result: traversal step of unsupported kind %q.", rawS.Kind), }) return nil, diags } } ret = append(ret, traversal) } return ret, diags } } func (r *Runner) prettyDirName(dir string) string { rel, err := filepath.Rel(r.baseDir, dir) if err != nil { return filepath.ToSlash(dir) } return filepath.ToSlash(rel) } func (r *Runner) prettyTestName(filename string) string { dir := filepath.Dir(filename) dirName := r.prettyDirName(dir) filename = filepath.Base(filename) testName := filename[:len(filename)-2] if dirName == "." { return testName } return fmt.Sprintf("%s/%s", dirName, testName) } hcl-2.14.1/cmd/hclspecsuite/test_file.go000066400000000000000000000201731431334125700201160ustar00rootroot00000000000000package main import ( "fmt" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" "github.com/hashicorp/hcl/v2/gohcl" ) type TestFile struct { Result cty.Value ResultType cty.Type ChecksTraversals bool ExpectedTraversals []*TestFileExpectTraversal ExpectedDiags []*TestFileExpectDiag ResultRange hcl.Range ResultTypeRange hcl.Range } type TestFileExpectTraversal struct { Traversal hcl.Traversal Range hcl.Range DeclRange hcl.Range } type TestFileExpectDiag struct { Severity hcl.DiagnosticSeverity Range hcl.Range DeclRange hcl.Range } func (r *Runner) LoadTestFile(filename string) (*TestFile, hcl.Diagnostics) { f, diags := r.parser.ParseHCLFile(filename) if diags.HasErrors() { return nil, diags } content, moreDiags := f.Body.Content(testFileSchema) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { return nil, diags } ret := &TestFile{ ResultType: cty.DynamicPseudoType, } if typeAttr, exists := content.Attributes["result_type"]; exists { ty, moreDiags := typeexpr.TypeConstraint(typeAttr.Expr) diags = append(diags, moreDiags...) if !moreDiags.HasErrors() { ret.ResultType = ty } ret.ResultTypeRange = typeAttr.Expr.Range() } if resultAttr, exists := content.Attributes["result"]; exists { resultVal, moreDiags := resultAttr.Expr.Value(nil) diags = append(diags, moreDiags...) if !moreDiags.HasErrors() { resultVal, err := convert.Convert(resultVal, ret.ResultType) if err != nil { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid result value", Detail: fmt.Sprintf("The result value does not conform to the given result type: %s.", err), Subject: resultAttr.Expr.Range().Ptr(), }) } else { ret.Result = resultVal } } ret.ResultRange = resultAttr.Expr.Range() } for _, block := range content.Blocks { switch block.Type { case "traversals": if ret.ChecksTraversals { // Indicates a duplicate traversals block diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Duplicate \"traversals\" block", Detail: fmt.Sprintf("Only one traversals block is expected."), Subject: &block.TypeRange, }) continue } expectTraversals, moreDiags := r.decodeTraversalsBlock(block) diags = append(diags, moreDiags...) if !moreDiags.HasErrors() { ret.ChecksTraversals = true ret.ExpectedTraversals = expectTraversals } case "diagnostics": if len(ret.ExpectedDiags) > 0 { // Indicates a duplicate diagnostics block diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Duplicate \"diagnostics\" block", Detail: fmt.Sprintf("Only one diagnostics block is expected."), Subject: &block.TypeRange, }) continue } expectDiags, moreDiags := r.decodeDiagnosticsBlock(block) diags = append(diags, moreDiags...) ret.ExpectedDiags = expectDiags default: // Shouldn't get here, because the above cases are exhaustive for // our test file schema. panic(fmt.Sprintf("unsupported block type %q", block.Type)) } } if ret.Result != cty.NilVal && len(ret.ExpectedDiags) > 0 { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Conflicting spec expectations", Detail: "This test spec includes expected diagnostics, so it may not also include an expected result.", Subject: &content.Attributes["result"].Range, }) } return ret, diags } func (r *Runner) decodeTraversalsBlock(block *hcl.Block) ([]*TestFileExpectTraversal, hcl.Diagnostics) { var diags hcl.Diagnostics content, moreDiags := block.Body.Content(testFileTraversalsSchema) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { return nil, diags } var ret []*TestFileExpectTraversal for _, block := range content.Blocks { // There's only one block type in our schema, so we can assume all // blocks are of that type. expectTraversal, moreDiags := r.decodeTraversalExpectBlock(block) diags = append(diags, moreDiags...) if expectTraversal != nil { ret = append(ret, expectTraversal) } } return ret, diags } func (r *Runner) decodeTraversalExpectBlock(block *hcl.Block) (*TestFileExpectTraversal, hcl.Diagnostics) { var diags hcl.Diagnostics rng, body, moreDiags := r.decodeRangeFromBody(block.Body) diags = append(diags, moreDiags...) content, moreDiags := body.Content(testFileTraversalExpectSchema) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { return nil, diags } var traversal hcl.Traversal { refAttr := content.Attributes["ref"] traversal, moreDiags = hcl.AbsTraversalForExpr(refAttr.Expr) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { return nil, diags } } return &TestFileExpectTraversal{ Traversal: traversal, Range: rng, DeclRange: block.DefRange, }, diags } func (r *Runner) decodeDiagnosticsBlock(block *hcl.Block) ([]*TestFileExpectDiag, hcl.Diagnostics) { var diags hcl.Diagnostics content, moreDiags := block.Body.Content(testFileDiagnosticsSchema) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { return nil, diags } if len(content.Blocks) == 0 { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Empty diagnostics block", Detail: "If a diagnostics block is present, at least one expectation statement (\"error\" or \"warning\" block) must be included.", Subject: &block.TypeRange, }) return nil, diags } ret := make([]*TestFileExpectDiag, 0, len(content.Blocks)) for _, block := range content.Blocks { rng, remain, moreDiags := r.decodeRangeFromBody(block.Body) diags = append(diags, moreDiags...) if diags.HasErrors() { continue } // Should have nothing else in the block aside from the range definition. _, moreDiags = remain.Content(&hcl.BodySchema{}) diags = append(diags, moreDiags...) var severity hcl.DiagnosticSeverity switch block.Type { case "error": severity = hcl.DiagError case "warning": severity = hcl.DiagWarning default: panic(fmt.Sprintf("unsupported block type %q", block.Type)) } ret = append(ret, &TestFileExpectDiag{ Severity: severity, Range: rng, DeclRange: block.TypeRange, }) } return ret, diags } func (r *Runner) decodeRangeFromBody(body hcl.Body) (hcl.Range, hcl.Body, hcl.Diagnostics) { type RawPos struct { Line int `hcl:"line"` Column int `hcl:"column"` Byte int `hcl:"byte"` } type RawRange struct { From RawPos `hcl:"from,block"` To RawPos `hcl:"to,block"` Remain hcl.Body `hcl:",remain"` } var raw RawRange diags := gohcl.DecodeBody(body, nil, &raw) return hcl.Range{ // We intentionally omit Filename here, because the test spec doesn't // need to specify that explicitly: we can infer it to be the file // path we pass to hcldec. Start: hcl.Pos{ Line: raw.From.Line, Column: raw.From.Column, Byte: raw.From.Byte, }, End: hcl.Pos{ Line: raw.To.Line, Column: raw.To.Column, Byte: raw.To.Byte, }, }, raw.Remain, diags } var testFileSchema = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "result", }, { Name: "result_type", }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "traversals", }, { Type: "diagnostics", }, }, } var testFileTraversalsSchema = &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "expect", }, }, } var testFileTraversalExpectSchema = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "ref", Required: true, }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "range", }, }, } var testFileDiagnosticsSchema = &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "error", }, { Type: "warning", }, }, } var testFileRangeSchema = &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "from", }, { Type: "to", }, }, } var testFilePosSchema = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "line", Required: true, }, { Name: "column", Required: true, }, { Name: "byte", Required: true, }, }, } hcl-2.14.1/cmd/hclspecsuite/traversals.go000066400000000000000000000057621431334125700203350ustar00rootroot00000000000000package main import ( "fmt" "reflect" "github.com/hashicorp/hcl/v2" ) func findTraversalSpec(got hcl.Traversal, candidates []*TestFileExpectTraversal) *TestFileExpectTraversal { for _, candidate := range candidates { if traversalsAreEquivalent(candidate.Traversal, got) { return candidate } } return nil } func findTraversalForSpec(want *TestFileExpectTraversal, have []hcl.Traversal) hcl.Traversal { for _, candidate := range have { if traversalsAreEquivalent(candidate, want.Traversal) { return candidate } } return nil } func traversalsAreEquivalent(a, b hcl.Traversal) bool { if len(a) != len(b) { return false } for i := range a { aStep := a[i] bStep := b[i] if reflect.TypeOf(aStep) != reflect.TypeOf(bStep) { return false } // We can now assume that both are of the same type. switch ts := aStep.(type) { case hcl.TraverseRoot: if bStep.(hcl.TraverseRoot).Name != ts.Name { return false } case hcl.TraverseAttr: if bStep.(hcl.TraverseAttr).Name != ts.Name { return false } case hcl.TraverseIndex: if !bStep.(hcl.TraverseIndex).Key.RawEquals(ts.Key) { return false } default: return false } } return true } // checkTraversalsMatch determines if a given traversal matches the given // expectation, which must've been produced by an earlier call to // findTraversalSpec for the same traversal. func checkTraversalsMatch(got hcl.Traversal, filename string, match *TestFileExpectTraversal) hcl.Diagnostics { var diags hcl.Diagnostics gotRng := got.SourceRange() wantRng := match.Range if got, want := gotRng.Filename, filename; got != want { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect filename in detected traversal", Detail: fmt.Sprintf( "Filename was reported as %q, but was expecting %q.", got, want, ), Subject: match.Traversal.SourceRange().Ptr(), }) return diags } // If we have the expected filename then we'll use that to construct the // full "want range" here so that we can use it to point to the appropriate // location in the remaining diagnostics. wantRng.Filename = filename if got, want := gotRng.Start, wantRng.Start; got != want { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect start position in detected traversal", Detail: fmt.Sprintf( "Start position was reported as line %d column %d byte %d, but was expecting line %d column %d byte %d.", got.Line, got.Column, got.Byte, want.Line, want.Column, want.Byte, ), Subject: &wantRng, }) } if got, want := gotRng.End, wantRng.End; got != want { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect end position in detected traversal", Detail: fmt.Sprintf( "End position was reported as line %d column %d byte %d, but was expecting line %d column %d byte %d.", got.Line, got.Column, got.Byte, want.Line, want.Column, want.Byte, ), Subject: &wantRng, }) } return diags } hcl-2.14.1/diagnostic.go000066400000000000000000000154561431334125700150360ustar00rootroot00000000000000package hcl import ( "fmt" ) // DiagnosticSeverity represents the severity of a diagnostic. type DiagnosticSeverity int const ( // DiagInvalid is the invalid zero value of DiagnosticSeverity DiagInvalid DiagnosticSeverity = iota // DiagError indicates that the problem reported by a diagnostic prevents // further progress in parsing and/or evaluating the subject. DiagError // DiagWarning indicates that the problem reported by a diagnostic warrants // user attention but does not prevent further progress. It is most // commonly used for showing deprecation notices. DiagWarning ) // Diagnostic represents information to be presented to a user about an // error or anomaly in parsing or evaluating configuration. type Diagnostic struct { Severity DiagnosticSeverity // Summary and Detail contain the English-language description of the // problem. Summary is a terse description of the general problem and // detail is a more elaborate, often-multi-sentence description of // the problem and what might be done to solve it. Summary string Detail string // Subject and Context are both source ranges relating to the diagnostic. // // Subject is a tight range referring to exactly the construct that // is problematic, while Context is an optional broader range (which should // fully contain Subject) that ought to be shown around Subject when // generating isolated source-code snippets in diagnostic messages. // If Context is nil, the Subject is also the Context. // // Some diagnostics have no source ranges at all. If Context is set then // Subject should always also be set. Subject *Range Context *Range // For diagnostics that occur when evaluating an expression, Expression // may refer to that expression and EvalContext may point to the // EvalContext that was active when evaluating it. This may allow for the // inclusion of additional useful information when rendering a diagnostic // message to the user. // // It is not always possible to select a single EvalContext for a // diagnostic, and so in some cases this field may be nil even when an // expression causes a problem. // // EvalContexts form a tree, so the given EvalContext may refer to a parent // which in turn refers to another parent, etc. For a full picture of all // of the active variables and functions the caller must walk up this // chain, preferring definitions that are "closer" to the expression in // case of colliding names. Expression Expression EvalContext *EvalContext // Extra is an extension point for additional machine-readable information // about this problem. // // Recipients of diagnostic objects may type-assert this value with // specific interface types they know about to discover if any additional // information is available that is interesting for their use-case. // // Extra is always considered to be optional extra information and so a // diagnostic message should still always be fully described (from the // perspective of a human who understands the language the messages are // written in) by the other fields in case a particular recipient. // // Functions that return diagnostics with Extra populated should typically // document that they place values implementing a particular interface, // rather than a concrete type, and define that interface such that its // methods can dynamically indicate a lack of support at runtime even // if the interface happens to be statically available. An Extra // type that wraps other Extra values should additionally implement // interface DiagnosticExtraUnwrapper to return the value they are wrapping // so that callers can access inner values to type-assert against. Extra interface{} } // Diagnostics is a list of Diagnostic instances. type Diagnostics []*Diagnostic // error implementation, so that diagnostics can be returned via APIs // that normally deal in vanilla Go errors. // // This presents only minimal context about the error, for compatibility // with usual expectations about how errors will present as strings. func (d *Diagnostic) Error() string { return fmt.Sprintf("%s: %s; %s", d.Subject, d.Summary, d.Detail) } // error implementation, so that sets of diagnostics can be returned via // APIs that normally deal in vanilla Go errors. func (d Diagnostics) Error() string { count := len(d) switch { case count == 0: return "no diagnostics" case count == 1: return d[0].Error() default: return fmt.Sprintf("%s, and %d other diagnostic(s)", d[0].Error(), count-1) } } // Append appends a new error to a Diagnostics and return the whole Diagnostics. // // This is provided as a convenience for returning from a function that // collects and then returns a set of diagnostics: // // return nil, diags.Append(&hcl.Diagnostic{ ... }) // // Note that this modifies the array underlying the diagnostics slice, so // must be used carefully within a single codepath. It is incorrect (and rude) // to extend a diagnostics created by a different subsystem. func (d Diagnostics) Append(diag *Diagnostic) Diagnostics { return append(d, diag) } // Extend concatenates the given Diagnostics with the receiver and returns // the whole new Diagnostics. // // This is similar to Append but accepts multiple diagnostics to add. It has // all the same caveats and constraints. func (d Diagnostics) Extend(diags Diagnostics) Diagnostics { return append(d, diags...) } // HasErrors returns true if the receiver contains any diagnostics of // severity DiagError. func (d Diagnostics) HasErrors() bool { for _, diag := range d { if diag.Severity == DiagError { return true } } return false } func (d Diagnostics) Errs() []error { var errs []error for _, diag := range d { if diag.Severity == DiagError { errs = append(errs, diag) } } return errs } // A DiagnosticWriter emits diagnostics somehow. type DiagnosticWriter interface { WriteDiagnostic(*Diagnostic) error WriteDiagnostics(Diagnostics) error } // DiagnosticExtraUnwrapper is an interface implemented by values in the // Extra field of Diagnostic when they are wrapping another "Extra" value that // was generated downstream. // // Diagnostic recipients which want to examine "Extra" values to sniff for // particular types of extra data can either type-assert this interface // directly and repeatedly unwrap until they recieve nil, or can use the // helper function DiagnosticExtra. type DiagnosticExtraUnwrapper interface { // If the reciever is wrapping another "diagnostic extra" value, returns // that value. Otherwise returns nil to indicate dynamically that nothing // is wrapped. // // The "nothing is wrapped" condition can be signalled either by this // method returning nil or by a type not implementing this interface at all. // // Implementers should never create unwrap "cycles" where a nested extra // value returns a value that was also wrapping it. UnwrapDiagnosticExtra() interface{} } hcl-2.14.1/diagnostic_text.go000066400000000000000000000204571431334125700160770ustar00rootroot00000000000000package hcl import ( "bufio" "bytes" "errors" "fmt" "io" "sort" wordwrap "github.com/mitchellh/go-wordwrap" "github.com/zclconf/go-cty/cty" ) type diagnosticTextWriter struct { files map[string]*File wr io.Writer width uint color bool } // NewDiagnosticTextWriter creates a DiagnosticWriter that writes diagnostics // to the given writer as formatted text. // // It is designed to produce text appropriate to print in a monospaced font // in a terminal of a particular width, or optionally with no width limit. // // The given width may be zero to disable word-wrapping of the detail text // and truncation of source code snippets. // // If color is set to true, the output will include VT100 escape sequences to // color-code the severity indicators. It is suggested to turn this off if // the target writer is not a terminal. func NewDiagnosticTextWriter(wr io.Writer, files map[string]*File, width uint, color bool) DiagnosticWriter { return &diagnosticTextWriter{ files: files, wr: wr, width: width, color: color, } } func (w *diagnosticTextWriter) WriteDiagnostic(diag *Diagnostic) error { if diag == nil { return errors.New("nil diagnostic") } var colorCode, highlightCode, resetCode string if w.color { switch diag.Severity { case DiagError: colorCode = "\x1b[31m" case DiagWarning: colorCode = "\x1b[33m" } resetCode = "\x1b[0m" highlightCode = "\x1b[1;4m" } var severityStr string switch diag.Severity { case DiagError: severityStr = "Error" case DiagWarning: severityStr = "Warning" default: // should never happen severityStr = "???????" } fmt.Fprintf(w.wr, "%s%s%s: %s\n\n", colorCode, severityStr, resetCode, diag.Summary) if diag.Subject != nil { snipRange := *diag.Subject highlightRange := snipRange if diag.Context != nil { // Show enough of the source code to include both the subject // and context ranges, which overlap in all reasonable // situations. snipRange = RangeOver(snipRange, *diag.Context) } // We can't illustrate an empty range, so we'll turn such ranges into // single-character ranges, which might not be totally valid (may point // off the end of a line, or off the end of the file) but are good // enough for the bounds checks we do below. if snipRange.Empty() { snipRange.End.Byte++ snipRange.End.Column++ } if highlightRange.Empty() { highlightRange.End.Byte++ highlightRange.End.Column++ } file := w.files[diag.Subject.Filename] if file == nil || file.Bytes == nil { fmt.Fprintf(w.wr, " on %s line %d:\n (source code not available)\n\n", diag.Subject.Filename, diag.Subject.Start.Line) } else { var contextLine string if diag.Subject != nil { contextLine = contextString(file, diag.Subject.Start.Byte) if contextLine != "" { contextLine = ", in " + contextLine } } fmt.Fprintf(w.wr, " on %s line %d%s:\n", diag.Subject.Filename, diag.Subject.Start.Line, contextLine) src := file.Bytes sc := NewRangeScanner(src, diag.Subject.Filename, bufio.ScanLines) for sc.Scan() { lineRange := sc.Range() if !lineRange.Overlaps(snipRange) { continue } beforeRange, highlightedRange, afterRange := lineRange.PartitionAround(highlightRange) if highlightedRange.Empty() { fmt.Fprintf(w.wr, "%4d: %s\n", lineRange.Start.Line, sc.Bytes()) } else { before := beforeRange.SliceBytes(src) highlighted := highlightedRange.SliceBytes(src) after := afterRange.SliceBytes(src) fmt.Fprintf( w.wr, "%4d: %s%s%s%s%s\n", lineRange.Start.Line, before, highlightCode, highlighted, resetCode, after, ) } } w.wr.Write([]byte{'\n'}) } if diag.Expression != nil && diag.EvalContext != nil { // We will attempt to render the values for any variables // referenced in the given expression as additional context, for // situations where the same expression is evaluated multiple // times in different scopes. expr := diag.Expression ctx := diag.EvalContext vars := expr.Variables() stmts := make([]string, 0, len(vars)) seen := make(map[string]struct{}, len(vars)) for _, traversal := range vars { val, diags := traversal.TraverseAbs(ctx) if diags.HasErrors() { // Skip anything that generates errors, since we probably // already have the same error in our diagnostics set // already. continue } traversalStr := w.traversalStr(traversal) if _, exists := seen[traversalStr]; exists { continue // don't show duplicates when the same variable is referenced multiple times } switch { case !val.IsKnown(): // Can't say anything about this yet, then. continue case val.IsNull(): stmts = append(stmts, fmt.Sprintf("%s set to null", traversalStr)) default: stmts = append(stmts, fmt.Sprintf("%s as %s", traversalStr, w.valueStr(val))) } seen[traversalStr] = struct{}{} } sort.Strings(stmts) // FIXME: Should maybe use a traversal-aware sort that can sort numeric indexes properly? last := len(stmts) - 1 for i, stmt := range stmts { switch i { case 0: w.wr.Write([]byte{'w', 'i', 't', 'h', ' '}) default: w.wr.Write([]byte{' ', ' ', ' ', ' ', ' '}) } w.wr.Write([]byte(stmt)) switch i { case last: w.wr.Write([]byte{'.', '\n', '\n'}) default: w.wr.Write([]byte{',', '\n'}) } } } } if diag.Detail != "" { detail := diag.Detail if w.width != 0 { detail = wordwrap.WrapString(detail, w.width) } fmt.Fprintf(w.wr, "%s\n\n", detail) } return nil } func (w *diagnosticTextWriter) WriteDiagnostics(diags Diagnostics) error { for _, diag := range diags { err := w.WriteDiagnostic(diag) if err != nil { return err } } return nil } func (w *diagnosticTextWriter) traversalStr(traversal Traversal) string { // This is a specialized subset of traversal rendering tailored to // producing helpful contextual messages in diagnostics. It is not // comprehensive nor intended to be used for other purposes. var buf bytes.Buffer for _, step := range traversal { switch tStep := step.(type) { case TraverseRoot: buf.WriteString(tStep.Name) case TraverseAttr: buf.WriteByte('.') buf.WriteString(tStep.Name) case TraverseIndex: buf.WriteByte('[') if keyTy := tStep.Key.Type(); keyTy.IsPrimitiveType() { buf.WriteString(w.valueStr(tStep.Key)) } else { // We'll just use a placeholder for more complex values, // since otherwise our result could grow ridiculously long. buf.WriteString("...") } buf.WriteByte(']') } } return buf.String() } func (w *diagnosticTextWriter) valueStr(val cty.Value) string { // This is a specialized subset of value rendering tailored to producing // helpful but concise messages in diagnostics. It is not comprehensive // nor intended to be used for other purposes. ty := val.Type() switch { case val.IsNull(): return "null" case !val.IsKnown(): // Should never happen here because we should filter before we get // in here, but we'll do something reasonable rather than panic. return "(not yet known)" case ty == cty.Bool: if val.True() { return "true" } return "false" case ty == cty.Number: bf := val.AsBigFloat() return bf.Text('g', 10) case ty == cty.String: // Go string syntax is not exactly the same as HCL native string syntax, // but we'll accept the minor edge-cases where this is different here // for now, just to get something reasonable here. return fmt.Sprintf("%q", val.AsString()) case ty.IsCollectionType() || ty.IsTupleType(): l := val.LengthInt() switch l { case 0: return "empty " + ty.FriendlyName() case 1: return ty.FriendlyName() + " with 1 element" default: return fmt.Sprintf("%s with %d elements", ty.FriendlyName(), l) } case ty.IsObjectType(): atys := ty.AttributeTypes() l := len(atys) switch l { case 0: return "object with no attributes" case 1: var name string for k := range atys { name = k } return fmt.Sprintf("object with 1 attribute %q", name) default: return fmt.Sprintf("object with %d attributes", l) } default: return ty.FriendlyName() } } func contextString(file *File, offset int) string { type contextStringer interface { ContextString(offset int) string } if cser, ok := file.Nav.(contextStringer); ok { return cser.ContextString(offset) } return "" } hcl-2.14.1/diagnostic_text_test.go000066400000000000000000000104121431334125700171240ustar00rootroot00000000000000package hcl import ( "bytes" "fmt" "testing" "github.com/zclconf/go-cty/cty" ) func TestDiagnosticTextWriter(t *testing.T) { tests := []struct { Input *Diagnostic Want string }{ { &Diagnostic{ Severity: DiagError, Summary: "Splines not reticulated", Detail: "All splines must be pre-reticulated.", Subject: &Range{ Start: Pos{ Byte: 0, Column: 1, Line: 1, }, End: Pos{ Byte: 3, Column: 4, Line: 1, }, }, }, `Error: Splines not reticulated on line 1, in hardcoded-context: 1: foo = 1 All splines must be pre-reticulated. `, }, { &Diagnostic{ Severity: DiagError, Summary: "Unsupported attribute", Detail: `"baz" is not a supported top-level attribute. Did you mean "bam"?`, Subject: &Range{ Start: Pos{ Byte: 16, Column: 1, Line: 3, }, End: Pos{ Byte: 19, Column: 4, Line: 3, }, }, }, `Error: Unsupported attribute on line 3, in hardcoded-context: 3: baz = 3 "baz" is not a supported top-level attribute. Did you mean "bam"? `, }, { &Diagnostic{ Severity: DiagError, Summary: "Unsupported attribute", Detail: `"pizza" is not a supported attribute. Did you mean "pizzetta"?`, Subject: &Range{ Start: Pos{ Byte: 42, Column: 3, Line: 5, }, End: Pos{ Byte: 47, Column: 8, Line: 5, }, }, // This is actually not a great example of a context, but is here to test // whether we're able to show a multi-line context when needed. Context: &Range{ Start: Pos{ Byte: 24, Column: 1, Line: 4, }, End: Pos{ Byte: 60, Column: 2, Line: 6, }, }, }, `Error: Unsupported attribute on line 5, in hardcoded-context: 4: block "party" { 5: pizza = "cheese" 6: } "pizza" is not a supported attribute. Did you mean "pizzetta"? `, }, { &Diagnostic{ Severity: DiagError, Summary: "Test of including relevant variable values", Detail: `This diagnostic includes an expression and an evalcontext.`, Subject: &Range{ Start: Pos{ Byte: 42, Column: 3, Line: 5, }, End: Pos{ Byte: 47, Column: 8, Line: 5, }, }, Expression: &diagnosticTestExpr{ vars: []Traversal{ { TraverseRoot{ Name: "foo", }, }, { TraverseRoot{ Name: "bar", }, TraverseAttr{ Name: "baz", }, }, { TraverseRoot{ Name: "missing", }, }, { TraverseRoot{ Name: "boz", }, }, }, }, EvalContext: &EvalContext{ parent: &EvalContext{ Variables: map[string]cty.Value{ "foo": cty.StringVal("foo value"), }, }, Variables: map[string]cty.Value{ "bar": cty.ObjectVal(map[string]cty.Value{ "baz": cty.ListValEmpty(cty.String), }), "boz": cty.NumberIntVal(5), "unused": cty.True, }, }, }, `Error: Test of including relevant variable values on line 5, in hardcoded-context: 5: pizza = "cheese" with bar.baz as empty list of string, boz as 5, foo as "foo value". This diagnostic includes an expression and an evalcontext. `, }, } files := map[string]*File{ "": &File{ Bytes: []byte(testDiagnosticTextWriterSource), Nav: &diagnosticTestNav{}, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { bwr := &bytes.Buffer{} dwr := NewDiagnosticTextWriter(bwr, files, 40, false) err := dwr.WriteDiagnostic(test.Input) if err != nil { t.Fatalf("unexpected error: %s", err) } got := bwr.String() if got != test.Want { t.Errorf("wrong result\n\ngot:\n%swant:\n%s", got, test.Want) } }) } } const testDiagnosticTextWriterSource = `foo = 1 bar = 2 baz = 3 block "party" { pizza = "cheese" } ` type diagnosticTestNav struct { } func (tn *diagnosticTestNav) ContextString(offset int) string { return "hardcoded-context" } type diagnosticTestExpr struct { vars []Traversal staticExpr } func (e *diagnosticTestExpr) Variables() []Traversal { return e.vars } hcl-2.14.1/diagnostic_typeparams.go000066400000000000000000000023621431334125700172730ustar00rootroot00000000000000//go:build go1.18 // +build go1.18 package hcl // This file contains additional diagnostics-related symbols that use the // Go 1.18 type parameters syntax and would therefore be incompatible with // Go 1.17 and earlier. // DiagnosticExtra attempts to retrieve an "extra value" of type T from the // given diagnostic, if either the diag.Extra field directly contains a value // of that type or the value implements DiagnosticExtraUnwrapper and directly // or indirectly returns a value of that type. // // Type T should typically be an interface type, so that code which generates // diagnostics can potentially return different implementations of the same // interface dynamically as needed. // // If a value of type T is found, returns that value and true to indicate // success. Otherwise, returns the zero value of T and false to indicate // failure. func DiagnosticExtra[T any](diag *Diagnostic) (T, bool) { extra := diag.Extra var zero T for { if ret, ok := extra.(T); ok { return ret, true } if unwrap, ok := extra.(DiagnosticExtraUnwrapper); ok { // If our "extra" implements DiagnosticExtraUnwrapper then we'll // unwrap one level and try this again. extra = unwrap.UnwrapDiagnosticExtra() } else { return zero, false } } } hcl-2.14.1/didyoumean.go000066400000000000000000000014471431334125700150430ustar00rootroot00000000000000package hcl import ( "github.com/agext/levenshtein" ) // nameSuggestion tries to find a name from the given slice of suggested names // that is close to the given name and returns it if found. If no suggestion // is close enough, returns the empty string. // // The suggestions are tried in order, so earlier suggestions take precedence // if the given string is similar to two or more suggestions. // // This function is intended to be used with a relatively-small number of // suggestions. It's not optimized for hundreds or thousands of them. func nameSuggestion(given string, suggestions []string) string { for _, suggestion := range suggestions { dist := levenshtein.Distance(given, suggestion, nil) if dist < 3 { // threshold determined experimentally return suggestion } } return "" } hcl-2.14.1/doc.go000066400000000000000000000022331431334125700134440ustar00rootroot00000000000000// Package hcl contains the main modelling types and general utility functions // for HCL. // // For a simple entry point into HCL, see the package in the subdirectory // "hclsimple", which has an opinionated function Decode that can decode HCL // configurations in either native HCL syntax or JSON syntax into a Go struct // type: // // package main // // import ( // "log" // "github.com/hashicorp/hcl/v2/hclsimple" // ) // // type Config struct { // LogLevel string `hcl:"log_level"` // } // // func main() { // var config Config // err := hclsimple.DecodeFile("config.hcl", nil, &config) // if err != nil { // log.Fatalf("Failed to load configuration: %s", err) // } // log.Printf("Configuration is %#v", config) // } // // If your application needs more control over the evaluation of the // configuration, you can use the functions in the subdirectories hclparse, // gohcl, hcldec, etc. Splitting the handling of configuration into multiple // phases allows for advanced patterns such as allowing expressions in one // part of the configuration to refer to data defined in another part. package hcl hcl-2.14.1/eval_context.go000066400000000000000000000012041431334125700153670ustar00rootroot00000000000000package hcl import ( "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) // An EvalContext provides the variables and functions that should be used // to evaluate an expression. type EvalContext struct { Variables map[string]cty.Value Functions map[string]function.Function parent *EvalContext } // NewChild returns a new EvalContext that is a child of the receiver. func (ctx *EvalContext) NewChild() *EvalContext { return &EvalContext{parent: ctx} } // Parent returns the parent of the receiver, or nil if the receiver has // no parent. func (ctx *EvalContext) Parent() *EvalContext { return ctx.parent } hcl-2.14.1/expr_call.go000066400000000000000000000026121431334125700146510ustar00rootroot00000000000000package hcl // ExprCall tests if the given expression is a function call and, // if so, extracts the function name and the expressions that represent // the arguments. If the given expression is not statically a function call, // error diagnostics are returned. // // A particular Expression implementation can support this function by // offering a method called ExprCall that takes no arguments and returns // *StaticCall. This method should return nil if a static call cannot // be extracted. Alternatively, an implementation can support // UnwrapExpression to delegate handling of this function to a wrapped // Expression object. func ExprCall(expr Expression) (*StaticCall, Diagnostics) { type exprCall interface { ExprCall() *StaticCall } physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { _, supported := expr.(exprCall) return supported }) if exC, supported := physExpr.(exprCall); supported { if call := exC.ExprCall(); call != nil { return call, nil } } return nil, Diagnostics{ &Diagnostic{ Severity: DiagError, Summary: "Invalid expression", Detail: "A static function call is required.", Subject: expr.StartRange().Ptr(), }, } } // StaticCall represents a function call that was extracted statically from // an expression using ExprCall. type StaticCall struct { Name string NameRange Range Arguments []Expression ArgsRange Range } hcl-2.14.1/expr_list.go000066400000000000000000000022431431334125700147110ustar00rootroot00000000000000package hcl // ExprList tests if the given expression is a static list construct and, // if so, extracts the expressions that represent the list elements. // If the given expression is not a static list, error diagnostics are // returned. // // A particular Expression implementation can support this function by // offering a method called ExprList that takes no arguments and returns // []Expression. This method should return nil if a static list cannot // be extracted. Alternatively, an implementation can support // UnwrapExpression to delegate handling of this function to a wrapped // Expression object. func ExprList(expr Expression) ([]Expression, Diagnostics) { type exprList interface { ExprList() []Expression } physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { _, supported := expr.(exprList) return supported }) if exL, supported := physExpr.(exprList); supported { if list := exL.ExprList(); list != nil { return list, nil } } return nil, Diagnostics{ &Diagnostic{ Severity: DiagError, Summary: "Invalid expression", Detail: "A static list expression is required.", Subject: expr.StartRange().Ptr(), }, } } hcl-2.14.1/expr_map.go000066400000000000000000000025361431334125700145200ustar00rootroot00000000000000package hcl // ExprMap tests if the given expression is a static map construct and, // if so, extracts the expressions that represent the map elements. // If the given expression is not a static map, error diagnostics are // returned. // // A particular Expression implementation can support this function by // offering a method called ExprMap that takes no arguments and returns // []KeyValuePair. This method should return nil if a static map cannot // be extracted. Alternatively, an implementation can support // UnwrapExpression to delegate handling of this function to a wrapped // Expression object. func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) { type exprMap interface { ExprMap() []KeyValuePair } physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { _, supported := expr.(exprMap) return supported }) if exM, supported := physExpr.(exprMap); supported { if pairs := exM.ExprMap(); pairs != nil { return pairs, nil } } return nil, Diagnostics{ &Diagnostic{ Severity: DiagError, Summary: "Invalid expression", Detail: "A static map expression is required.", Subject: expr.StartRange().Ptr(), }, } } // KeyValuePair represents a pair of expressions that serve as a single item // within a map or object definition construct. type KeyValuePair struct { Key Expression Value Expression } hcl-2.14.1/expr_unwrap.go000066400000000000000000000047111431334125700152540ustar00rootroot00000000000000package hcl type unwrapExpression interface { UnwrapExpression() Expression } // UnwrapExpression removes any "wrapper" expressions from the given expression, // to recover the representation of the physical expression given in source // code. // // Sometimes wrapping expressions are used to modify expression behavior, e.g. // in extensions that need to make some local variables available to certain // sub-trees of the configuration. This can make it difficult to reliably // type-assert on the physical AST types used by the underlying syntax. // // Unwrapping an expression may modify its behavior by stripping away any // additional constraints or capabilities being applied to the Value and // Variables methods, so this function should generally only be used prior // to operations that concern themselves with the static syntax of the input // configuration, and not with the effective value of the expression. // // Wrapper expression types must support unwrapping by implementing a method // called UnwrapExpression that takes no arguments and returns the embedded // Expression. Implementations of this method should peel away only one level // of wrapping, if multiple are present. This method may return nil to // indicate _dynamically_ that no wrapped expression is available, for // expression types that might only behave as wrappers in certain cases. func UnwrapExpression(expr Expression) Expression { for { unwrap, wrapped := expr.(unwrapExpression) if !wrapped { return expr } innerExpr := unwrap.UnwrapExpression() if innerExpr == nil { return expr } expr = innerExpr } } // UnwrapExpressionUntil is similar to UnwrapExpression except it gives the // caller an opportunity to test each level of unwrapping to see each a // particular expression is accepted. // // This could be used, for example, to unwrap until a particular other // interface is satisfied, regardless of wrap wrapping level it is satisfied // at. // // The given callback function must return false to continue wrapping, or // true to accept and return the proposed expression given. If the callback // function rejects even the final, physical expression then the result of // this function is nil. func UnwrapExpressionUntil(expr Expression, until func(Expression) bool) Expression { for { if until(expr) { return expr } unwrap, wrapped := expr.(unwrapExpression) if !wrapped { return nil } expr = unwrap.UnwrapExpression() if expr == nil { return nil } } } hcl-2.14.1/ext/000077500000000000000000000000001431334125700131505ustar00rootroot00000000000000hcl-2.14.1/ext/README.md000066400000000000000000000007161431334125700144330ustar00rootroot00000000000000# HCL Extensions This directory contains some packages implementing some extensions to HCL that add features by building on the core API in the main `hcl` package. These serve as optional language extensions for use-cases that are limited only to specific callers. Generally these make the language more expressive at the expense of increased dynamic behavior that may be undesirable for applications that need to impose more rigid structure on configuration. hcl-2.14.1/ext/customdecode/000077500000000000000000000000001431334125700156265ustar00rootroot00000000000000hcl-2.14.1/ext/customdecode/README.md000066400000000000000000000215721431334125700171140ustar00rootroot00000000000000# HCL Custom Static Decoding Extension This HCL extension provides a mechanism for defining arguments in an HCL-based language whose values are derived using custom decoding rules against the HCL expression syntax, overriding the usual behavior of normal expression evaluation. "Arguments", for the purpose of this extension, currently includes the following two contexts: * For applications using `hcldec` for dynamic decoding, a `hcldec.AttrSpec` or `hcldec.BlockAttrsSpec` can be given a special type constraint that opts in to custom decoding behavior for the attribute(s) that are selected by that specification. * When working with the HCL native expression syntax, a function given in the `hcl.EvalContext` during evaluation can have parameters with special type constraints that opt in to custom decoding behavior for the argument expression associated with that parameter in any call. The above use-cases are rather abstract, so we'll consider a motivating real-world example: sometimes we (language designers) need to allow users to specify type constraints directly in the language itself, such as in [Terraform's Input Variables](https://www.terraform.io/docs/configuration/variables.html). Terraform's `variable` blocks include an argument called `type` which takes a type constraint given using HCL expression building-blocks as defined by [the HCL `typeexpr` extension](../typeexpr/README.md). A "type constraint expression" of that sort is not an expression intended to be evaluated in the usual way. Instead, the physical expression is deconstructed using [the static analysis operations](../../spec.md#static-analysis) to produce a `cty.Type` as the result, rather than a `cty.Value`. The purpose of this Custom Static Decoding Extension, then, is to provide a bridge to allow that sort of custom decoding to be used via mechanisms that normally deal in `cty.Value`, such as `hcldec` and native syntax function calls as listed above. (Note: [`gohcl`](https://pkg.go.dev/github.com/hashicorp/hcl/v2/gohcl) has its own mechanism to support this use case, exploiting the fact that it is working directly with "normal" Go types. Decoding into a struct field of type `hcl.Expression` obtains the expression directly without evaluating it first. The Custom Static Decoding Extension is not necessary for that `gohcl` technique. You can also implement custom decoding by working directly with the lowest-level HCL API, which separates extraction of and evaluation of expressions into two steps.) ## Custom Decoding Types This extension relies on a convention implemented in terms of [_Capsule Types_ in the underlying `cty` type system](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types). `cty` allows a capsule type to carry arbitrary extension metadata values as an aid to creating higher-level abstractions like this extension. A custom argument decoding mode, then, is implemented by creating a new `cty` capsule type that implements the `ExtensionData` custom operation to return a decoding function when requested. For example: ```go var keywordType cty.Type keywordType = cty.CapsuleWithOps("keyword", reflect.TypeOf(""), &cty.CapsuleOps{ ExtensionData: func(key interface{}) interface{} { switch key { case customdecode.CustomExpressionDecoder: return customdecode.CustomExpressionDecoderFunc( func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics kw := hcl.ExprAsKeyword(expr) if kw == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid keyword", Detail: "A keyword is required", Subject: expr.Range().Ptr(), }) return cty.UnkownVal(keywordType), diags } return cty.CapsuleVal(keywordType, &kw) }, ) default: return nil } }, }) ``` The boilerplate here is a bit fussy, but the important part for our purposes is the `case customdecode.CustomExpressionDecoder:` clause, which uses a custom extension key type defined in this package to recognize when a component implementing this extension is checking to see if a target type has a custom decode implementation. In the above case we've defined a type that decodes expressions as static keywords, so a keyword like `foo` would decode as an encapsulated `"foo"` string, while any other sort of expression like `"baz"` or `1 + 1` would return an error. We could then use `keywordType` as a type constraint either for a function parameter or a `hcldec` attribute specification, which would require the argument for that function parameter or the expression for the matching attributes to be a static keyword, rather than an arbitrary expression. For example, in a `hcldec.AttrSpec`: ```go keywordSpec := &hcldec.AttrSpec{ Name: "keyword", Type: keywordType, } ``` The above would accept input like the following and would set its result to a `cty.Value` of `keywordType`, after decoding: ```hcl keyword = foo ``` ## The Expression and Expression Closure `cty` types Building on the above, this package also includes two capsule types that use the above mechanism to allow calling applications to capture expressions directly and thus defer analysis to a later step, after initial decoding. The `customdecode.ExpressionType` type encapsulates an `hcl.Expression` alone, for situations like our type constraint expression example above where it's the static structure of the expression we want to inspect, and thus any variables and functions defined in the evaluation context are irrelevant. The `customdecode.ExpressionClosureType` type encapsulates a `*customdecode.ExpressionClosure` value, which binds the given expression to the `hcl.EvalContext` it was asked to evaluate against and thus allows the receiver of that result to later perform normal evaluation of the expression with all the same variables and functions that would've been available to it naturally. Both of these types can be used as type constraints either for `hcldec` attribute specifications or for function arguments. Here's an example of `ExpressionClosureType` to implement a function that can evaluate an expression with some additional variables defined locally, which we'll call the `with(...)` function: ```go var WithFunc = function.New(&function.Spec{ Params: []function.Parameter{ { Name: "variables", Type: cty.DynamicPseudoType, }, { Name: "expression", Type: customdecode.ExpressionClosureType, }, }, Type: func(args []cty.Value) (cty.Type, error) { varsVal := args[0] exprVal := args[1] if !varsVal.Type().IsObjectType() { return cty.NilVal, function.NewArgErrorf(0, "must be an object defining local variables") } if !varsVal.IsKnown() { // We can't predict our result type until the variables object // is known. return cty.DynamicPseudoType, nil } vars := varsVal.AsValueMap() closure := customdecode.ExpressionClosureFromVal(exprVal) result, err := evalWithLocals(vars, closure) if err != nil { return cty.NilVal, err } return result.Type(), nil }, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { varsVal := args[0] exprVal := args[1] vars := varsVal.AsValueMap() closure := customdecode.ExpressionClosureFromVal(exprVal) return evalWithLocals(vars, closure) }, }) func evalWithLocals(locals map[string]cty.Value, closure *customdecode.ExpressionClosure) (cty.Value, error) { childCtx := closure.EvalContext.NewChild() childCtx.Variables = locals val, diags := closure.Expression.Value(childCtx) if diags.HasErrors() { return cty.NilVal, function.NewArgErrorf(1, "couldn't evaluate expression: %s", diags.Error()) } return val, nil } ``` If the above function were placed into an `hcl.EvalContext` as `with`, it could be used in a native syntax call to that function as follows: ```hcl foo = with({name = "Cory"}, "${greeting}, ${name}!") ``` The above assumes a variable in the main context called `greeting`, to which the `with` function adds `name` before evaluating the expression given in its second argument. This makes that second argument context-sensitive -- it would behave differently if the user wrote the same thing somewhere else -- so this capability should be used with care to make sure it doesn't cause confusion for the end-users of your language. There are some other examples of this capability to evaluate expressions in unusual ways in the `tryfunc` directory that is a sibling of this one. hcl-2.14.1/ext/customdecode/customdecode.go000066400000000000000000000046621431334125700206430ustar00rootroot00000000000000// Package customdecode contains a HCL extension that allows, in certain // contexts, expression evaluation to be overridden by custom static analysis. // // This mechanism is only supported in certain specific contexts where // expressions are decoded with a specific target type in mind. For more // information, see the documentation on CustomExpressionDecoder. package customdecode import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) type customDecoderImpl int // CustomExpressionDecoder is a value intended to be used as a cty capsule // type ExtensionData key for capsule types whose values are to be obtained // by static analysis of an expression rather than normal evaluation of that // expression. // // When a cooperating capsule type is asked for ExtensionData with this key, // it must return a non-nil CustomExpressionDecoderFunc value. // // This mechanism is not universally supported; instead, it's handled in a few // specific places where expressions are evaluated with the intent of producing // a cty.Value of a type given by the calling application. // // Specifically, this currently works for type constraints given in // hcldec.AttrSpec and hcldec.BlockAttrsSpec, and it works for arguments to // function calls in the HCL native syntax. HCL extensions implemented outside // of the main HCL module may also implement this; consult their own // documentation for details. const CustomExpressionDecoder = customDecoderImpl(1) // CustomExpressionDecoderFunc is the type of value that must be returned by // a capsule type handling the key CustomExpressionDecoder in its ExtensionData // implementation. // // If no error diagnostics are returned, the result value MUST be of the // capsule type that the decoder function was derived from. If the returned // error diagnostics prevent producing a value at all, return cty.NilVal. type CustomExpressionDecoderFunc func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) // CustomExpressionDecoderForType takes any cty type and returns its // custom expression decoder implementation if it has one. If it is not a // capsule type or it does not implement a custom expression decoder, this // function returns nil. func CustomExpressionDecoderForType(ty cty.Type) CustomExpressionDecoderFunc { if !ty.IsCapsuleType() { return nil } if fn, ok := ty.CapsuleExtensionData(CustomExpressionDecoder).(CustomExpressionDecoderFunc); ok { return fn } return nil } hcl-2.14.1/ext/customdecode/expression_type.go000066400000000000000000000123231431334125700214160ustar00rootroot00000000000000package customdecode import ( "fmt" "reflect" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // ExpressionType is a cty capsule type that carries hcl.Expression values. // // This type implements custom decoding in the most general way possible: it // just captures whatever expression is given to it, with no further processing // whatsoever. It could therefore be useful in situations where an application // must defer processing of the expression content until a later step. // // ExpressionType only captures the expression, not the evaluation context it // was destined to be evaluated in. That means this type can be fine for // situations where the recipient of the value only intends to do static // analysis, but ExpressionClosureType is more appropriate in situations where // the recipient will eventually evaluate the given expression. var ExpressionType cty.Type // ExpressionVal returns a new cty value of type ExpressionType, wrapping the // given expression. func ExpressionVal(expr hcl.Expression) cty.Value { return cty.CapsuleVal(ExpressionType, &expr) } // ExpressionFromVal returns the expression encapsulated in the given value, or // panics if the value is not a known value of ExpressionType. func ExpressionFromVal(v cty.Value) hcl.Expression { if !v.Type().Equals(ExpressionType) { panic("value is not of ExpressionType") } ptr := v.EncapsulatedValue().(*hcl.Expression) return *ptr } // ExpressionClosureType is a cty capsule type that carries hcl.Expression // values along with their original evaluation contexts. // // This is similar to ExpressionType except that during custom decoding it // also captures the hcl.EvalContext that was provided, allowing callers to // evaluate the expression later in the same context where it would originally // have been evaluated, or a context derived from that one. var ExpressionClosureType cty.Type // ExpressionClosure is the type encapsulated in ExpressionClosureType type ExpressionClosure struct { Expression hcl.Expression EvalContext *hcl.EvalContext } // ExpressionClosureVal returns a new cty value of type ExpressionClosureType, // wrapping the given expression closure. func ExpressionClosureVal(closure *ExpressionClosure) cty.Value { return cty.CapsuleVal(ExpressionClosureType, closure) } // Value evaluates the closure's expression using the closure's EvalContext, // returning the result. func (c *ExpressionClosure) Value() (cty.Value, hcl.Diagnostics) { return c.Expression.Value(c.EvalContext) } // ExpressionClosureFromVal returns the expression closure encapsulated in the // given value, or panics if the value is not a known value of // ExpressionClosureType. // // The caller MUST NOT modify the returned closure or the EvalContext inside // it. To derive a new EvalContext, either create a child context or make // a copy. func ExpressionClosureFromVal(v cty.Value) *ExpressionClosure { if !v.Type().Equals(ExpressionClosureType) { panic("value is not of ExpressionClosureType") } return v.EncapsulatedValue().(*ExpressionClosure) } func init() { // Getting hold of a reflect.Type for hcl.Expression is a bit tricky because // it's an interface type, but we can do it with some indirection. goExpressionType := reflect.TypeOf((*hcl.Expression)(nil)).Elem() ExpressionType = cty.CapsuleWithOps("expression", goExpressionType, &cty.CapsuleOps{ ExtensionData: func(key interface{}) interface{} { switch key { case CustomExpressionDecoder: return CustomExpressionDecoderFunc( func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return ExpressionVal(expr), nil }, ) default: return nil } }, TypeGoString: func(_ reflect.Type) string { return "customdecode.ExpressionType" }, GoString: func(raw interface{}) string { exprPtr := raw.(*hcl.Expression) return fmt.Sprintf("customdecode.ExpressionVal(%#v)", *exprPtr) }, RawEquals: func(a, b interface{}) bool { aPtr := a.(*hcl.Expression) bPtr := b.(*hcl.Expression) return reflect.DeepEqual(*aPtr, *bPtr) }, }) ExpressionClosureType = cty.CapsuleWithOps("expression closure", reflect.TypeOf(ExpressionClosure{}), &cty.CapsuleOps{ ExtensionData: func(key interface{}) interface{} { switch key { case CustomExpressionDecoder: return CustomExpressionDecoderFunc( func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return ExpressionClosureVal(&ExpressionClosure{ Expression: expr, EvalContext: ctx, }), nil }, ) default: return nil } }, TypeGoString: func(_ reflect.Type) string { return "customdecode.ExpressionClosureType" }, GoString: func(raw interface{}) string { closure := raw.(*ExpressionClosure) return fmt.Sprintf("customdecode.ExpressionClosureVal(%#v)", closure) }, RawEquals: func(a, b interface{}) bool { closureA := a.(*ExpressionClosure) closureB := b.(*ExpressionClosure) // The expression itself compares by deep equality, but EvalContexts // conventionally compare by pointer identity, so we'll comply // with both conventions here by testing them separately. return closureA.EvalContext == closureB.EvalContext && reflect.DeepEqual(closureA.Expression, closureB.Expression) }, }) } hcl-2.14.1/ext/dynblock/000077500000000000000000000000001431334125700147555ustar00rootroot00000000000000hcl-2.14.1/ext/dynblock/README.md000066400000000000000000000144571431334125700162470ustar00rootroot00000000000000# HCL Dynamic Blocks Extension This HCL extension implements a special block type named "dynamic" that can be used to dynamically generate blocks of other types by iterating over collection values. Normally the block structure in an HCL configuration file is rigid, even though dynamic expressions can be used within attribute values. This is convenient for most applications since it allows the overall structure of the document to be decoded easily, but in some applications it is desirable to allow dynamic block generation within certain portions of the configuration. Dynamic block generation is performed using the `dynamic` block type: ```hcl toplevel { nested { foo = "static block 1" } dynamic "nested" { for_each = ["a", "b", "c"] iterator = nested content { foo = "dynamic block ${nested.value}" } } nested { foo = "static block 2" } } ``` The above is interpreted as if it were written as follows: ```hcl toplevel { nested { foo = "static block 1" } nested { foo = "dynamic block a" } nested { foo = "dynamic block b" } nested { foo = "dynamic block c" } nested { foo = "static block 2" } } ``` Since HCL block syntax is not normally exposed to the possibility of unknown values, this extension must make some compromises when asked to iterate over an unknown collection. If the length of the collection cannot be statically recognized (because it is an unknown value of list, map, or set type) then the `dynamic` construct will generate a _single_ dynamic block whose iterator key and value are both unknown values of the dynamic pseudo-type, thus causing any attribute values derived from iteration to appear as unknown values. There is no explicit representation of the fact that the length of the collection may eventually be different than one. ## Usage Pass a body to function `Expand` to obtain a new body that will, on access to its content, evaluate and expand any nested `dynamic` blocks. Dynamic block processing is also automatically propagated into any nested blocks that are returned, allowing users to nest dynamic blocks inside one another and to nest dynamic blocks inside other static blocks. HCL structural decoding does not normally have access to an `EvalContext`, so any variables and functions that should be available to the `for_each` and `labels` expressions must be passed in when calling `Expand`. Expressions within the `content` block are evaluated separately and so can be passed a separate `EvalContext` if desired, during normal attribute expression evaluation. ## Detecting Variables Some applications dynamically generate an `EvalContext` by analyzing which variables are referenced by an expression before evaluating it. This unfortunately requires some extra effort when this analysis is required for the context passed to `Expand`: the HCL API requires a schema to be provided in order to do any analysis of the blocks in a body, but the low-level schema model provides a description of only one level of nested blocks at a time, and thus a new schema must be provided for each additional level of nesting. To make this arduous process as convenient as possible, this package provides a helper function `WalkForEachVariables`, which returns a `WalkVariablesNode` instance that can be used to find variables directly in a given body and also determine which nested blocks require recursive calls. Using this mechanism requires that the caller be able to look up a schema given a nested block type. For _simple_ formats where a specific block type name always has the same schema regardless of context, a walk can be implemented as follows: ```go func walkVariables(node dynblock.WalkVariablesNode, schema *hcl.BodySchema) []hcl.Traversal { vars, children := node.Visit(schema) for _, child := range children { var childSchema *hcl.BodySchema switch child.BlockTypeName { case "a": childSchema = &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "b", LabelNames: []string{"key"}, }, }, } case "b": childSchema = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "val", Required: true, }, }, } default: // Should never happen, because the above cases should be exhaustive // for the application's configuration format. panic(fmt.Errorf("can't find schema for unknown block type %q", child.BlockTypeName)) } vars = append(vars, testWalkAndAccumVars(child.Node, childSchema)...) } } ``` ### Detecting Variables with `hcldec` Specifications For applications that use the higher-level `hcldec` package to decode nested configuration structures into `cty` values, the same specification can be used to automatically drive the recursive variable-detection walk described above. The helper function `ForEachVariablesHCLDec` allows an entire recursive configuration structure to be analyzed in a single call given a `hcldec.Spec` that describes the nested block structure. This means a `hcldec`-based application can support dynamic blocks with only a little additional effort: ```go func decodeBody(body hcl.Body, spec hcldec.Spec) (cty.Value, hcl.Diagnostics) { // Determine which variables are needed to expand dynamic blocks neededForDynamic := dynblock.ForEachVariablesHCLDec(body, spec) // Build a suitable EvalContext and expand dynamic blocks dynCtx := buildEvalContext(neededForDynamic) dynBody := dynblock.Expand(body, dynCtx) // Determine which variables are needed to fully decode the expanded body // This will analyze expressions that came both from static blocks in the // original body and from blocks that were dynamically added by Expand. neededForDecode := hcldec.Variables(dynBody, spec) // Build a suitable EvalContext and then fully decode the body as per the // hcldec specification. decCtx := buildEvalContext(neededForDecode) return hcldec.Decode(dynBody, spec, decCtx) } func buildEvalContext(needed []hcl.Traversal) *hcl.EvalContext { // (to be implemented by your application) } ``` # Performance This extension is going quite harshly against the grain of the HCL API, and so it uses lots of wrapping objects and temporary data structures to get its work done. HCL in general is not suitable for use in high-performance situations or situations sensitive to memory pressure, but that is _especially_ true for this extension. hcl-2.14.1/ext/dynblock/expand_body.go000066400000000000000000000200461431334125700176020ustar00rootroot00000000000000package dynblock import ( "fmt" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // expandBody wraps another hcl.Body and expands any "dynamic" blocks found // inside whenever Content or PartialContent is called. type expandBody struct { original hcl.Body forEachCtx *hcl.EvalContext iteration *iteration // non-nil if we're nested inside another "dynamic" block // These are used with PartialContent to produce a "remaining items" // body to return. They are nil on all bodies fresh out of the transformer. // // Note that this is re-implemented here rather than delegating to the // existing support required by the underlying body because we need to // retain access to the entire original body on subsequent decode operations // so we can retain any "dynamic" blocks for types we didn't take consume // on the first pass. hiddenAttrs map[string]struct{} hiddenBlocks map[string]hcl.BlockHeaderSchema } func (b *expandBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { extSchema := b.extendSchema(schema) rawContent, diags := b.original.Content(extSchema) blocks, blockDiags := b.expandBlocks(schema, rawContent.Blocks, false) diags = append(diags, blockDiags...) attrs := b.prepareAttributes(rawContent.Attributes) content := &hcl.BodyContent{ Attributes: attrs, Blocks: blocks, MissingItemRange: b.original.MissingItemRange(), } return content, diags } func (b *expandBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { extSchema := b.extendSchema(schema) rawContent, _, diags := b.original.PartialContent(extSchema) // We discard the "remain" argument above because we're going to construct // our own remain that also takes into account remaining "dynamic" blocks. blocks, blockDiags := b.expandBlocks(schema, rawContent.Blocks, true) diags = append(diags, blockDiags...) attrs := b.prepareAttributes(rawContent.Attributes) content := &hcl.BodyContent{ Attributes: attrs, Blocks: blocks, MissingItemRange: b.original.MissingItemRange(), } remain := &expandBody{ original: b.original, forEachCtx: b.forEachCtx, iteration: b.iteration, hiddenAttrs: make(map[string]struct{}), hiddenBlocks: make(map[string]hcl.BlockHeaderSchema), } for name := range b.hiddenAttrs { remain.hiddenAttrs[name] = struct{}{} } for typeName, blockS := range b.hiddenBlocks { remain.hiddenBlocks[typeName] = blockS } for _, attrS := range schema.Attributes { remain.hiddenAttrs[attrS.Name] = struct{}{} } for _, blockS := range schema.Blocks { remain.hiddenBlocks[blockS.Type] = blockS } return content, remain, diags } func (b *expandBody) extendSchema(schema *hcl.BodySchema) *hcl.BodySchema { // We augment the requested schema to also include our special "dynamic" // block type, since then we'll get instances of it interleaved with // all of the literal child blocks we must also include. extSchema := &hcl.BodySchema{ Attributes: schema.Attributes, Blocks: make([]hcl.BlockHeaderSchema, len(schema.Blocks), len(schema.Blocks)+len(b.hiddenBlocks)+1), } copy(extSchema.Blocks, schema.Blocks) extSchema.Blocks = append(extSchema.Blocks, dynamicBlockHeaderSchema) // If we have any hiddenBlocks then we also need to register those here // so that a call to "Content" on the underlying body won't fail. // (We'll filter these out again once we process the result of either // Content or PartialContent.) for _, blockS := range b.hiddenBlocks { extSchema.Blocks = append(extSchema.Blocks, blockS) } // If we have any hiddenAttrs then we also need to register these, for // the same reason as we deal with hiddenBlocks above. if len(b.hiddenAttrs) != 0 { newAttrs := make([]hcl.AttributeSchema, len(schema.Attributes), len(schema.Attributes)+len(b.hiddenAttrs)) copy(newAttrs, extSchema.Attributes) for name := range b.hiddenAttrs { newAttrs = append(newAttrs, hcl.AttributeSchema{ Name: name, Required: false, }) } extSchema.Attributes = newAttrs } return extSchema } func (b *expandBody) prepareAttributes(rawAttrs hcl.Attributes) hcl.Attributes { if len(b.hiddenAttrs) == 0 && b.iteration == nil { // Easy path: just pass through the attrs from the original body verbatim return rawAttrs } // Otherwise we have some work to do: we must filter out any attributes // that are hidden (since a previous PartialContent call already saw these) // and wrap the expressions of the inner attributes so that they will // have access to our iteration variables. attrs := make(hcl.Attributes, len(rawAttrs)) for name, rawAttr := range rawAttrs { if _, hidden := b.hiddenAttrs[name]; hidden { continue } if b.iteration != nil { attr := *rawAttr // shallow copy so we can mutate it attr.Expr = exprWrap{ Expression: attr.Expr, i: b.iteration, } attrs[name] = &attr } else { // If we have no active iteration then no wrapping is required. attrs[name] = rawAttr } } return attrs } func (b *expandBody) expandBlocks(schema *hcl.BodySchema, rawBlocks hcl.Blocks, partial bool) (hcl.Blocks, hcl.Diagnostics) { var blocks hcl.Blocks var diags hcl.Diagnostics for _, rawBlock := range rawBlocks { switch rawBlock.Type { case "dynamic": realBlockType := rawBlock.Labels[0] if _, hidden := b.hiddenBlocks[realBlockType]; hidden { continue } var blockS *hcl.BlockHeaderSchema for _, candidate := range schema.Blocks { if candidate.Type == realBlockType { blockS = &candidate break } } if blockS == nil { // Not a block type that the caller requested. if !partial { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsupported block type", Detail: fmt.Sprintf("Blocks of type %q are not expected here.", realBlockType), Subject: &rawBlock.LabelRanges[0], }) } continue } spec, specDiags := b.decodeSpec(blockS, rawBlock) diags = append(diags, specDiags...) if specDiags.HasErrors() { continue } if spec.forEachVal.IsKnown() { for it := spec.forEachVal.ElementIterator(); it.Next(); { key, value := it.Element() i := b.iteration.MakeChild(spec.iteratorName, key, value) block, blockDiags := spec.newBlock(i, b.forEachCtx) diags = append(diags, blockDiags...) if block != nil { // Attach our new iteration context so that attributes // and other nested blocks can refer to our iterator. block.Body = b.expandChild(block.Body, i) blocks = append(blocks, block) } } } else { // If our top-level iteration value isn't known then we // substitute an unknownBody, which will cause the entire block // to evaluate to an unknown value. i := b.iteration.MakeChild(spec.iteratorName, cty.DynamicVal, cty.DynamicVal) block, blockDiags := spec.newBlock(i, b.forEachCtx) diags = append(diags, blockDiags...) if block != nil { block.Body = unknownBody{b.expandChild(block.Body, i)} blocks = append(blocks, block) } } default: if _, hidden := b.hiddenBlocks[rawBlock.Type]; !hidden { // A static block doesn't create a new iteration context, but // it does need to inherit _our own_ iteration context in // case it contains expressions that refer to our inherited // iterators, or nested "dynamic" blocks. expandedBlock := *rawBlock // shallow copy expandedBlock.Body = b.expandChild(rawBlock.Body, b.iteration) blocks = append(blocks, &expandedBlock) } } } return blocks, diags } func (b *expandBody) expandChild(child hcl.Body, i *iteration) hcl.Body { chiCtx := i.EvalContext(b.forEachCtx) ret := Expand(child, chiCtx) ret.(*expandBody).iteration = i return ret } func (b *expandBody) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { // blocks aren't allowed in JustAttributes mode and this body can // only produce blocks, so we'll just pass straight through to our // underlying body here. return b.original.JustAttributes() } func (b *expandBody) MissingItemRange() hcl.Range { return b.original.MissingItemRange() } hcl-2.14.1/ext/dynblock/expand_body_test.go000066400000000000000000000403451431334125700206450ustar00rootroot00000000000000package dynblock import ( "strings" "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hcldec" "github.com/hashicorp/hcl/v2/hcltest" "github.com/zclconf/go-cty/cty" ) func TestExpand(t *testing.T) { srcBody := hcltest.MockBody(&hcl.BodyContent{ Blocks: hcl.Blocks{ { Type: "a", Labels: []string{"static0"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprLiteral(cty.StringVal("static a 0")), }), }), }, { Type: "b", Body: hcltest.MockBody(&hcl.BodyContent{ Blocks: hcl.Blocks{ { Type: "c", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val0": hcltest.MockExprLiteral(cty.StringVal("static c 0")), }), }), }, { Type: "dynamic", Labels: []string{"c"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{ cty.StringVal("dynamic c 0"), cty.StringVal("dynamic c 1"), })), "iterator": hcltest.MockExprVariable("dyn_c"), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val0": hcltest.MockExprTraversalSrc("dyn_c.value"), }), }), }, }, }), }, }, }), }, { Type: "dynamic", Labels: []string{"a"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{ cty.StringVal("dynamic a 0"), cty.StringVal("dynamic a 1"), cty.StringVal("dynamic a 2"), })), "labels": hcltest.MockExprList([]hcl.Expression{ hcltest.MockExprTraversalSrc("a.key"), }), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("a.value"), }), }), }, }, }), }, { Type: "dynamic", Labels: []string{"b"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{ cty.StringVal("dynamic b 0"), cty.StringVal("dynamic b 1"), })), "iterator": hcltest.MockExprVariable("dyn_b"), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Blocks: hcl.Blocks{ { Type: "c", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val0": hcltest.MockExprLiteral(cty.StringVal("static c 1")), "val1": hcltest.MockExprTraversalSrc("dyn_b.value"), }), }), }, { Type: "dynamic", Labels: []string{"c"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{ cty.StringVal("dynamic c 2"), cty.StringVal("dynamic c 3"), })), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val0": hcltest.MockExprTraversalSrc("c.value"), "val1": hcltest.MockExprTraversalSrc("dyn_b.value"), }), }), }, }, }), }, }, }), }, }, }), }, { Type: "dynamic", Labels: []string{"b"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.MapVal(map[string]cty.Value{ "foo": cty.ListVal([]cty.Value{ cty.StringVal("dynamic c nested 0"), cty.StringVal("dynamic c nested 1"), }), })), "iterator": hcltest.MockExprVariable("dyn_b"), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Blocks: hcl.Blocks{ { Type: "dynamic", Labels: []string{"c"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprTraversalSrc("dyn_b.value"), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val0": hcltest.MockExprTraversalSrc("c.value"), "val1": hcltest.MockExprTraversalSrc("dyn_b.key"), }), }), }, }, }), }, }, }), }, }, }), }, { Type: "a", Labels: []string{"static1"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprLiteral(cty.StringVal("static a 1")), }), }), }, }, }) dynBody := Expand(srcBody, nil) var remain hcl.Body t.Run("PartialDecode", func(t *testing.T) { decSpec := &hcldec.BlockMapSpec{ TypeName: "a", LabelNames: []string{"key"}, Nested: &hcldec.AttrSpec{ Name: "val", Type: cty.String, Required: true, }, } var got cty.Value var diags hcl.Diagnostics got, remain, diags = hcldec.PartialDecode(dynBody, decSpec, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics") for _, diag := range diags { t.Logf("- %s", diag) } return } want := cty.MapVal(map[string]cty.Value{ "static0": cty.StringVal("static a 0"), "static1": cty.StringVal("static a 1"), "0": cty.StringVal("dynamic a 0"), "1": cty.StringVal("dynamic a 1"), "2": cty.StringVal("dynamic a 2"), }) if !got.RawEquals(want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } }) t.Run("Decode", func(t *testing.T) { decSpec := &hcldec.BlockListSpec{ TypeName: "b", Nested: &hcldec.BlockListSpec{ TypeName: "c", Nested: &hcldec.ObjectSpec{ "val0": &hcldec.AttrSpec{ Name: "val0", Type: cty.String, }, "val1": &hcldec.AttrSpec{ Name: "val1", Type: cty.String, }, }, }, } var got cty.Value var diags hcl.Diagnostics got, diags = hcldec.Decode(remain, decSpec, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics") for _, diag := range diags { t.Logf("- %s", diag) } return } want := cty.ListVal([]cty.Value{ cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("static c 0"), "val1": cty.NullVal(cty.String), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c 0"), "val1": cty.NullVal(cty.String), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c 1"), "val1": cty.NullVal(cty.String), }), }), cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("static c 1"), "val1": cty.StringVal("dynamic b 0"), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c 2"), "val1": cty.StringVal("dynamic b 0"), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c 3"), "val1": cty.StringVal("dynamic b 0"), }), }), cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("static c 1"), "val1": cty.StringVal("dynamic b 1"), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c 2"), "val1": cty.StringVal("dynamic b 1"), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c 3"), "val1": cty.StringVal("dynamic b 1"), }), }), cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c nested 0"), "val1": cty.StringVal("foo"), }), cty.ObjectVal(map[string]cty.Value{ "val0": cty.StringVal("dynamic c nested 1"), "val1": cty.StringVal("foo"), }), }), }) if !got.RawEquals(want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } }) } func TestExpandUnknownBodies(t *testing.T) { srcContent := &hcl.BodyContent{ Blocks: hcl.Blocks{ { Type: "dynamic", Labels: []string{"list"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("each.value"), }), }), }, }, }), }, { Type: "dynamic", Labels: []string{"tuple"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("each.value"), }), }), }, }, }), }, { Type: "dynamic", Labels: []string{"set"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("each.value"), }), }), }, }, }), }, { Type: "dynamic", Labels: []string{"map"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))), "labels": hcltest.MockExprList([]hcl.Expression{ hcltest.MockExprLiteral(cty.StringVal("static")), }), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("each.value"), }), }), }, }, }), }, { Type: "dynamic", Labels: []string{"object"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))), "labels": hcltest.MockExprList([]hcl.Expression{ hcltest.MockExprLiteral(cty.StringVal("static")), }), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("each.value"), }), }), }, }, }), }, { Type: "dynamic", Labels: []string{"invalid_list"}, LabelRanges: []hcl.Range{hcl.Range{}}, Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))), }), Blocks: hcl.Blocks{ { Type: "content", Body: hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "val": hcltest.MockExprTraversalSrc("each.value"), // unexpected attributes should still produce an error "invalid": hcltest.MockExprLiteral(cty.StringVal("static")), }), }), }, }, }), }, }, } srcBody := hcltest.MockBody(srcContent) dynBody := Expand(srcBody, nil) t.Run("DecodeList", func(t *testing.T) { decSpec := &hcldec.BlockListSpec{ TypeName: "list", Nested: &hcldec.ObjectSpec{ "val": &hcldec.AttrSpec{ Name: "val", Type: cty.String, }, }, } var got cty.Value var diags hcl.Diagnostics got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics") for _, diag := range diags { t.Logf("- %s", diag) } return } want := cty.UnknownVal(cty.List(cty.Object(map[string]cty.Type{ "val": cty.String, }))) if !got.RawEquals(want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } }) t.Run("DecodeTuple", func(t *testing.T) { decSpec := &hcldec.BlockTupleSpec{ TypeName: "tuple", Nested: &hcldec.ObjectSpec{ "val": &hcldec.AttrSpec{ Name: "val", Type: cty.String, }, }, } var got cty.Value var diags hcl.Diagnostics got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics") for _, diag := range diags { t.Logf("- %s", diag) } return } want := cty.DynamicVal if !got.RawEquals(want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } }) t.Run("DecodeSet", func(t *testing.T) { decSpec := &hcldec.BlockSetSpec{ TypeName: "tuple", Nested: &hcldec.ObjectSpec{ "val": &hcldec.AttrSpec{ Name: "val", Type: cty.String, }, }, } var got cty.Value var diags hcl.Diagnostics got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics") for _, diag := range diags { t.Logf("- %s", diag) } return } want := cty.UnknownVal(cty.Set(cty.Object(map[string]cty.Type{ "val": cty.String, }))) if !got.RawEquals(want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } }) t.Run("DecodeMap", func(t *testing.T) { decSpec := &hcldec.BlockMapSpec{ TypeName: "map", LabelNames: []string{"key"}, Nested: &hcldec.ObjectSpec{ "val": &hcldec.AttrSpec{ Name: "val", Type: cty.String, }, }, } var got cty.Value var diags hcl.Diagnostics got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics") for _, diag := range diags { t.Logf("- %s", diag) } return } want := cty.UnknownVal(cty.Map(cty.Object(map[string]cty.Type{ "val": cty.String, }))) if !got.RawEquals(want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } }) t.Run("DecodeInvalidList", func(t *testing.T) { decSpec := &hcldec.BlockListSpec{ TypeName: "invalid_list", Nested: &hcldec.ObjectSpec{ "val": &hcldec.AttrSpec{ Name: "val", Type: cty.String, }, }, } _, _, diags := hcldec.PartialDecode(dynBody, decSpec, nil) if len(diags) != 1 { t.Error("expected 1 extraneous argument") } want := `Mock body has extraneous argument "invalid"` if !strings.Contains(diags.Error(), want) { t.Errorf("unexpected diagnostics: %v", diags) } }) } hcl-2.14.1/ext/dynblock/expand_spec.go000066400000000000000000000147101431334125700176000ustar00rootroot00000000000000package dynblock import ( "fmt" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" ) type expandSpec struct { blockType string blockTypeRange hcl.Range defRange hcl.Range forEachVal cty.Value iteratorName string labelExprs []hcl.Expression contentBody hcl.Body inherited map[string]*iteration } func (b *expandBody) decodeSpec(blockS *hcl.BlockHeaderSchema, rawSpec *hcl.Block) (*expandSpec, hcl.Diagnostics) { var diags hcl.Diagnostics var schema *hcl.BodySchema if len(blockS.LabelNames) != 0 { schema = dynamicBlockBodySchemaLabels } else { schema = dynamicBlockBodySchemaNoLabels } specContent, specDiags := rawSpec.Body.Content(schema) diags = append(diags, specDiags...) if specDiags.HasErrors() { return nil, diags } //// for_each attribute eachAttr := specContent.Attributes["for_each"] eachVal, eachDiags := eachAttr.Expr.Value(b.forEachCtx) diags = append(diags, eachDiags...) if !eachVal.CanIterateElements() && eachVal.Type() != cty.DynamicPseudoType { // We skip this error for DynamicPseudoType because that means we either // have a null (which is checked immediately below) or an unknown // (which is handled in the expandBody Content methods). diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid dynamic for_each value", Detail: fmt.Sprintf("Cannot use a %s value in for_each. An iterable collection is required.", eachVal.Type().FriendlyName()), Subject: eachAttr.Expr.Range().Ptr(), Expression: eachAttr.Expr, EvalContext: b.forEachCtx, }) return nil, diags } if eachVal.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid dynamic for_each value", Detail: "Cannot use a null value in for_each.", Subject: eachAttr.Expr.Range().Ptr(), Expression: eachAttr.Expr, EvalContext: b.forEachCtx, }) return nil, diags } //// iterator attribute iteratorName := blockS.Type if iteratorAttr := specContent.Attributes["iterator"]; iteratorAttr != nil { itTraversal, itDiags := hcl.AbsTraversalForExpr(iteratorAttr.Expr) diags = append(diags, itDiags...) if itDiags.HasErrors() { return nil, diags } if len(itTraversal) != 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid dynamic iterator name", Detail: "Dynamic iterator must be a single variable name.", Subject: itTraversal.SourceRange().Ptr(), }) return nil, diags } iteratorName = itTraversal.RootName() } var labelExprs []hcl.Expression if labelsAttr := specContent.Attributes["labels"]; labelsAttr != nil { var labelDiags hcl.Diagnostics labelExprs, labelDiags = hcl.ExprList(labelsAttr.Expr) diags = append(diags, labelDiags...) if labelDiags.HasErrors() { return nil, diags } if len(labelExprs) > len(blockS.LabelNames) { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous dynamic block label", Detail: fmt.Sprintf("Blocks of type %q require %d label(s).", blockS.Type, len(blockS.LabelNames)), Subject: labelExprs[len(blockS.LabelNames)].Range().Ptr(), }) return nil, diags } else if len(labelExprs) < len(blockS.LabelNames) { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Insufficient dynamic block labels", Detail: fmt.Sprintf("Blocks of type %q require %d label(s).", blockS.Type, len(blockS.LabelNames)), Subject: labelsAttr.Expr.Range().Ptr(), }) return nil, diags } } // Since our schema requests only blocks of type "content", we can assume // that all entries in specContent.Blocks are content blocks. if len(specContent.Blocks) == 0 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing dynamic content block", Detail: "A dynamic block must have a nested block of type \"content\" to describe the body of each generated block.", Subject: &specContent.MissingItemRange, }) return nil, diags } if len(specContent.Blocks) > 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous dynamic content block", Detail: "Only one nested content block is allowed for each dynamic block.", Subject: &specContent.Blocks[1].DefRange, }) return nil, diags } return &expandSpec{ blockType: blockS.Type, blockTypeRange: rawSpec.LabelRanges[0], defRange: rawSpec.DefRange, forEachVal: eachVal, iteratorName: iteratorName, labelExprs: labelExprs, contentBody: specContent.Blocks[0].Body, }, diags } func (s *expandSpec) newBlock(i *iteration, ctx *hcl.EvalContext) (*hcl.Block, hcl.Diagnostics) { var diags hcl.Diagnostics var labels []string var labelRanges []hcl.Range lCtx := i.EvalContext(ctx) for _, labelExpr := range s.labelExprs { labelVal, labelDiags := labelExpr.Value(lCtx) diags = append(diags, labelDiags...) if labelDiags.HasErrors() { return nil, diags } var convErr error labelVal, convErr = convert.Convert(labelVal, cty.String) if convErr != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid dynamic block label", Detail: fmt.Sprintf("Cannot use this value as a dynamic block label: %s.", convErr), Subject: labelExpr.Range().Ptr(), Expression: labelExpr, EvalContext: lCtx, }) return nil, diags } if labelVal.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid dynamic block label", Detail: "Cannot use a null value as a dynamic block label.", Subject: labelExpr.Range().Ptr(), Expression: labelExpr, EvalContext: lCtx, }) return nil, diags } if !labelVal.IsKnown() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid dynamic block label", Detail: "This value is not yet known. Dynamic block labels must be immediately-known values.", Subject: labelExpr.Range().Ptr(), Expression: labelExpr, EvalContext: lCtx, }) return nil, diags } labels = append(labels, labelVal.AsString()) labelRanges = append(labelRanges, labelExpr.Range()) } block := &hcl.Block{ Type: s.blockType, TypeRange: s.blockTypeRange, Labels: labels, LabelRanges: labelRanges, DefRange: s.defRange, Body: s.contentBody, } return block, diags } hcl-2.14.1/ext/dynblock/expr_wrap.go000066400000000000000000000021061431334125700173120ustar00rootroot00000000000000package dynblock import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) type exprWrap struct { hcl.Expression i *iteration } func (e exprWrap) Variables() []hcl.Traversal { raw := e.Expression.Variables() ret := make([]hcl.Traversal, 0, len(raw)) // Filter out traversals that refer to our iterator name or any // iterator we've inherited; we're going to provide those in // our Value wrapper, so the caller doesn't need to know about them. for _, traversal := range raw { rootName := traversal.RootName() if rootName == e.i.IteratorName { continue } if _, inherited := e.i.Inherited[rootName]; inherited { continue } ret = append(ret, traversal) } return ret } func (e exprWrap) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { extCtx := e.i.EvalContext(ctx) return e.Expression.Value(extCtx) } // UnwrapExpression returns the expression being wrapped by this instance. // This allows the original expression to be recovered by hcl.UnwrapExpression. func (e exprWrap) UnwrapExpression() hcl.Expression { return e.Expression } hcl-2.14.1/ext/dynblock/iteration.go000066400000000000000000000025521431334125700173060ustar00rootroot00000000000000package dynblock import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) type iteration struct { IteratorName string Key cty.Value Value cty.Value Inherited map[string]*iteration } func (s *expandSpec) MakeIteration(key, value cty.Value) *iteration { return &iteration{ IteratorName: s.iteratorName, Key: key, Value: value, Inherited: s.inherited, } } func (i *iteration) Object() cty.Value { return cty.ObjectVal(map[string]cty.Value{ "key": i.Key, "value": i.Value, }) } func (i *iteration) EvalContext(base *hcl.EvalContext) *hcl.EvalContext { new := base.NewChild() if i != nil { new.Variables = map[string]cty.Value{} for name, otherIt := range i.Inherited { new.Variables[name] = otherIt.Object() } new.Variables[i.IteratorName] = i.Object() } return new } func (i *iteration) MakeChild(iteratorName string, key, value cty.Value) *iteration { if i == nil { // Create entirely new root iteration, then return &iteration{ IteratorName: iteratorName, Key: key, Value: value, } } inherited := map[string]*iteration{} for name, otherIt := range i.Inherited { inherited[name] = otherIt } inherited[i.IteratorName] = i return &iteration{ IteratorName: iteratorName, Key: key, Value: value, Inherited: inherited, } } hcl-2.14.1/ext/dynblock/public.go000066400000000000000000000030731431334125700165650ustar00rootroot00000000000000// Package dynblock provides an extension to HCL that allows dynamic // declaration of nested blocks in certain contexts via a special block type // named "dynamic". package dynblock import ( "github.com/hashicorp/hcl/v2" ) // Expand "dynamic" blocks in the given body, returning a new body that // has those blocks expanded. // // The given EvalContext is used when evaluating "for_each" and "labels" // attributes within dynamic blocks, allowing those expressions access to // variables and functions beyond the iterator variable created by the // iteration. // // Expand returns no diagnostics because no blocks are actually expanded // until a call to Content or PartialContent on the returned body, which // will then expand only the blocks selected by the schema. // // "dynamic" blocks are also expanded automatically within nested blocks // in the given body, including within other dynamic blocks, thus allowing // multi-dimensional iteration. However, it is not possible to // dynamically-generate the "dynamic" blocks themselves except through nesting. // // parent { // dynamic "child" { // for_each = child_objs // content { // dynamic "grandchild" { // for_each = child.value.children // labels = [grandchild.key] // content { // parent_key = child.key // value = grandchild.value // } // } // } // } // } func Expand(body hcl.Body, ctx *hcl.EvalContext) hcl.Body { return &expandBody{ original: body, forEachCtx: ctx, } } hcl-2.14.1/ext/dynblock/schema.go000066400000000000000000000014521431334125700165460ustar00rootroot00000000000000package dynblock import "github.com/hashicorp/hcl/v2" var dynamicBlockHeaderSchema = hcl.BlockHeaderSchema{ Type: "dynamic", LabelNames: []string{"type"}, } var dynamicBlockBodySchemaLabels = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "for_each", Required: true, }, { Name: "iterator", Required: false, }, { Name: "labels", Required: true, }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "content", LabelNames: nil, }, }, } var dynamicBlockBodySchemaNoLabels = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "for_each", Required: true, }, { Name: "iterator", Required: false, }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "content", LabelNames: nil, }, }, } hcl-2.14.1/ext/dynblock/unknown_body.go000066400000000000000000000055471431334125700200330ustar00rootroot00000000000000package dynblock import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // unknownBody is a funny body that just reports everything inside it as // unknown. It uses a given other body as a sort of template for what attributes // and blocks are inside -- including source location information -- but // subsitutes unknown values of unknown type for all attributes. // // This rather odd process is used to handle expansion of dynamic blocks whose // for_each expression is unknown. Since a block cannot itself be unknown, // we instead arrange for everything _inside_ the block to be unknown instead, // to give the best possible approximation. type unknownBody struct { template hcl.Body } var _ hcl.Body = unknownBody{} // hcldec.UnkownBody impl func (b unknownBody) Unknown() bool { return true } func (b unknownBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { content, diags := b.template.Content(schema) content = b.fixupContent(content) // We're intentionally preserving the diagnostics reported from the // inner body so that we can still report where the template body doesn't // match the requested schema. return content, diags } func (b unknownBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { content, remain, diags := b.template.PartialContent(schema) content = b.fixupContent(content) remain = unknownBody{remain} // remaining content must also be wrapped // We're intentionally preserving the diagnostics reported from the // inner body so that we can still report where the template body doesn't // match the requested schema. return content, remain, diags } func (b unknownBody) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { attrs, diags := b.template.JustAttributes() attrs = b.fixupAttrs(attrs) // We're intentionally preserving the diagnostics reported from the // inner body so that we can still report where the template body doesn't // match the requested schema. return attrs, diags } func (b unknownBody) MissingItemRange() hcl.Range { return b.template.MissingItemRange() } func (b unknownBody) fixupContent(got *hcl.BodyContent) *hcl.BodyContent { ret := &hcl.BodyContent{} ret.Attributes = b.fixupAttrs(got.Attributes) if len(got.Blocks) > 0 { ret.Blocks = make(hcl.Blocks, 0, len(got.Blocks)) for _, gotBlock := range got.Blocks { new := *gotBlock // shallow copy new.Body = unknownBody{gotBlock.Body} // nested content must also be marked unknown ret.Blocks = append(ret.Blocks, &new) } } return ret } func (b unknownBody) fixupAttrs(got hcl.Attributes) hcl.Attributes { if len(got) == 0 { return nil } ret := make(hcl.Attributes, len(got)) for name, gotAttr := range got { new := *gotAttr // shallow copy new.Expr = hcl.StaticExpr(cty.DynamicVal, gotAttr.Expr.Range()) ret[name] = &new } return ret } hcl-2.14.1/ext/dynblock/variables.go000066400000000000000000000151571431334125700172650ustar00rootroot00000000000000package dynblock import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // WalkVariables begins the recursive process of walking all expressions and // nested blocks in the given body and its child bodies while taking into // account any "dynamic" blocks. // // This function requires that the caller walk through the nested block // structure in the given body level-by-level so that an appropriate schema // can be provided at each level to inform further processing. This workflow // is thus easiest to use for calling applications that have some higher-level // schema representation available with which to drive this multi-step // process. If your application uses the hcldec package, you may be able to // use VariablesHCLDec instead for a more automatic approach. func WalkVariables(body hcl.Body) WalkVariablesNode { return WalkVariablesNode{ body: body, includeContent: true, } } // WalkExpandVariables is like Variables but it includes only the variables // required for successful block expansion, ignoring any variables referenced // inside block contents. The result is the minimal set of all variables // required for a call to Expand, excluding variables that would only be // needed to subsequently call Content or PartialContent on the expanded // body. func WalkExpandVariables(body hcl.Body) WalkVariablesNode { return WalkVariablesNode{ body: body, } } type WalkVariablesNode struct { body hcl.Body it *iteration includeContent bool } type WalkVariablesChild struct { BlockTypeName string Node WalkVariablesNode } // Body returns the HCL Body associated with the child node, in case the caller // wants to do some sort of inspection of it in order to decide what schema // to pass to Visit. // // Most implementations should just fetch a fixed schema based on the // BlockTypeName field and not access this. Deciding on a schema dynamically // based on the body is a strange thing to do and generally necessary only if // your caller is already doing other bizarre things with HCL bodies. func (c WalkVariablesChild) Body() hcl.Body { return c.Node.body } // Visit returns the variable traversals required for any "dynamic" blocks // directly in the body associated with this node, and also returns any child // nodes that must be visited in order to continue the walk. // // Each child node has its associated block type name given in its BlockTypeName // field, which the calling application should use to determine the appropriate // schema for the content of each child node and pass it to the child node's // own Visit method to continue the walk recursively. func (n WalkVariablesNode) Visit(schema *hcl.BodySchema) (vars []hcl.Traversal, children []WalkVariablesChild) { extSchema := n.extendSchema(schema) container, _, _ := n.body.PartialContent(extSchema) if container == nil { return vars, children } children = make([]WalkVariablesChild, 0, len(container.Blocks)) if n.includeContent { for _, attr := range container.Attributes { for _, traversal := range attr.Expr.Variables() { var ours, inherited bool if n.it != nil { ours = traversal.RootName() == n.it.IteratorName _, inherited = n.it.Inherited[traversal.RootName()] } if !(ours || inherited) { vars = append(vars, traversal) } } } } for _, block := range container.Blocks { switch block.Type { case "dynamic": blockTypeName := block.Labels[0] inner, _, _ := block.Body.PartialContent(variableDetectionInnerSchema) if inner == nil { continue } iteratorName := blockTypeName if attr, exists := inner.Attributes["iterator"]; exists { iterTraversal, _ := hcl.AbsTraversalForExpr(attr.Expr) if len(iterTraversal) == 0 { // Ignore this invalid dynamic block, since it'll produce // an error if someone tries to extract content from it // later anyway. continue } iteratorName = iterTraversal.RootName() } blockIt := n.it.MakeChild(iteratorName, cty.DynamicVal, cty.DynamicVal) if attr, exists := inner.Attributes["for_each"]; exists { // Filter out iterator names inherited from parent blocks for _, traversal := range attr.Expr.Variables() { if _, inherited := blockIt.Inherited[traversal.RootName()]; !inherited { vars = append(vars, traversal) } } } if attr, exists := inner.Attributes["labels"]; exists { // Filter out both our own iterator name _and_ those inherited // from parent blocks, since we provide _both_ of these to the // label expressions. for _, traversal := range attr.Expr.Variables() { ours := traversal.RootName() == iteratorName _, inherited := blockIt.Inherited[traversal.RootName()] if !(ours || inherited) { vars = append(vars, traversal) } } } for _, contentBlock := range inner.Blocks { // We only request "content" blocks in our schema, so we know // any blocks we find here will be content blocks. We require // exactly one content block for actual expansion, but we'll // be more liberal here so that callers can still collect // variables from erroneous "dynamic" blocks. children = append(children, WalkVariablesChild{ BlockTypeName: blockTypeName, Node: WalkVariablesNode{ body: contentBlock.Body, it: blockIt, includeContent: n.includeContent, }, }) } default: children = append(children, WalkVariablesChild{ BlockTypeName: block.Type, Node: WalkVariablesNode{ body: block.Body, it: n.it, includeContent: n.includeContent, }, }) } } return vars, children } func (n WalkVariablesNode) extendSchema(schema *hcl.BodySchema) *hcl.BodySchema { // We augment the requested schema to also include our special "dynamic" // block type, since then we'll get instances of it interleaved with // all of the literal child blocks we must also include. extSchema := &hcl.BodySchema{ Attributes: schema.Attributes, Blocks: make([]hcl.BlockHeaderSchema, len(schema.Blocks), len(schema.Blocks)+1), } copy(extSchema.Blocks, schema.Blocks) extSchema.Blocks = append(extSchema.Blocks, dynamicBlockHeaderSchema) return extSchema } // This is a more relaxed schema than what's in schema.go, since we // want to maximize the amount of variables we can find even if there // are erroneous blocks. var variableDetectionInnerSchema = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "for_each", Required: false, }, { Name: "labels", Required: false, }, { Name: "iterator", Required: false, }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "content", }, }, } hcl-2.14.1/ext/dynblock/variables_hcldec.go000066400000000000000000000030531431334125700205570ustar00rootroot00000000000000package dynblock import ( "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hcldec" ) // VariablesHCLDec is a wrapper around WalkVariables that uses the given hcldec // specification to automatically drive the recursive walk through nested // blocks in the given body. // // This is a drop-in replacement for hcldec.Variables which is able to treat // blocks of type "dynamic" in the same special way that dynblock.Expand would, // exposing both the variables referenced in the "for_each" and "labels" // arguments and variables used in the nested "content" block. func VariablesHCLDec(body hcl.Body, spec hcldec.Spec) []hcl.Traversal { rootNode := WalkVariables(body) return walkVariablesWithHCLDec(rootNode, spec) } // ExpandVariablesHCLDec is like VariablesHCLDec but it includes only the // minimal set of variables required to call Expand, ignoring variables that // are referenced only inside normal block contents. See WalkExpandVariables // for more information. func ExpandVariablesHCLDec(body hcl.Body, spec hcldec.Spec) []hcl.Traversal { rootNode := WalkExpandVariables(body) return walkVariablesWithHCLDec(rootNode, spec) } func walkVariablesWithHCLDec(node WalkVariablesNode, spec hcldec.Spec) []hcl.Traversal { vars, children := node.Visit(hcldec.ImpliedSchema(spec)) if len(children) > 0 { childSpecs := hcldec.ChildBlockTypes(spec) for _, child := range children { if childSpec, exists := childSpecs[child.BlockTypeName]; exists { vars = append(vars, walkVariablesWithHCLDec(child.Node, childSpec)...) } } } return vars } hcl-2.14.1/ext/dynblock/variables_test.go000066400000000000000000000102541431334125700203150ustar00rootroot00000000000000package dynblock import ( "reflect" "testing" "github.com/hashicorp/hcl/v2/hcldec" "github.com/zclconf/go-cty/cty" "github.com/davecgh/go-spew/spew" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" ) func TestVariables(t *testing.T) { const src = ` # We have some references to things inside the "val" attribute inside each # of our "b" blocks, which should be included in the result of WalkVariables # but not WalkExpandVariables. a { dynamic "b" { for_each = [for i, v in some_list_0: "${i}=${v},${baz}"] labels = ["${b.value} ${something_else_0}"] content { val = "${b.value} ${something_else_1}" } } } dynamic "a" { for_each = some_list_1 content { b "foo" { val = "${a.value} ${something_else_2}" } dynamic "b" { for_each = some_list_2 iterator = dyn_b labels = ["${a.value} ${dyn_b.value} ${b} ${something_else_3}"] content { val = "${a.value} ${dyn_b.value} ${something_else_4}" } } } } dynamic "a" { for_each = some_list_3 iterator = dyn_a content { b "foo" { val = "${dyn_a.value} ${something_else_5}" } dynamic "b" { for_each = some_list_4 labels = ["${dyn_a.value} ${b.value} ${a} ${something_else_6}"] content { val = "${dyn_a.value} ${b.value} ${something_else_7}" } } } } ` f, diags := hclsyntax.ParseConfig([]byte(src), "", hcl.Pos{}) if len(diags) != 0 { t.Errorf("unexpected diagnostics during parse") for _, diag := range diags { t.Logf("- %s", diag) } return } spec := &hcldec.BlockListSpec{ TypeName: "a", Nested: &hcldec.BlockMapSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: &hcldec.AttrSpec{ Name: "val", Type: cty.String, }, }, } t.Run("WalkVariables", func(t *testing.T) { traversals := VariablesHCLDec(f.Body, spec) got := make([]string, len(traversals)) for i, traversal := range traversals { got[i] = traversal.RootName() } // The block structure is traversed one level at a time, so the ordering // here is reflecting first a pass of the root, then the first child // under the root, then the first child under that, etc. want := []string{ "some_list_1", "some_list_3", "some_list_0", "baz", "something_else_0", "something_else_1", // Would not be included for WalkExpandVariables because it only appears in content "some_list_2", "b", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_b "something_else_3", "something_else_2", // Would not be included for WalkExpandVariables because it only appears in content "something_else_4", // Would not be included for WalkExpandVariables because it only appears in content "some_list_4", "a", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_a "something_else_6", "something_else_5", // Would not be included for WalkExpandVariables because it only appears in content "something_else_7", // Would not be included for WalkExpandVariables because it only appears in content } if !reflect.DeepEqual(got, want) { t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(want)) } }) t.Run("WalkExpandVariables", func(t *testing.T) { traversals := ExpandVariablesHCLDec(f.Body, spec) got := make([]string, len(traversals)) for i, traversal := range traversals { got[i] = traversal.RootName() } // The block structure is traversed one level at a time, so the ordering // here is reflecting first a pass of the root, then the first child // under the root, then the first child under that, etc. want := []string{ "some_list_1", "some_list_3", "some_list_0", "baz", "something_else_0", "some_list_2", "b", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_b "something_else_3", "some_list_4", "a", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_a "something_else_6", } if !reflect.DeepEqual(got, want) { t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(want)) } }) } hcl-2.14.1/ext/transform/000077500000000000000000000000001431334125700151635ustar00rootroot00000000000000hcl-2.14.1/ext/transform/doc.go000066400000000000000000000005461431334125700162640ustar00rootroot00000000000000// Package transform is a helper package for writing extensions that work // by applying transforms to bodies. // // It defines a type for body transformers, and then provides utilities in // terms of that type for working with transformers, including recursively // applying such transforms as heirarchical block structures are extracted. package transform hcl-2.14.1/ext/transform/error.go000066400000000000000000000063341431334125700166510ustar00rootroot00000000000000package transform import ( "github.com/hashicorp/hcl/v2" ) // NewErrorBody returns a hcl.Body that returns the given diagnostics whenever // any of its content-access methods are called. // // The given diagnostics must have at least one diagnostic of severity // hcl.DiagError, or this function will panic. // // This can be used to prepare a return value for a Transformer that // can't complete due to an error. While the transform itself will succeed, // the error will be returned as soon as a caller attempts to extract content // from the resulting body. func NewErrorBody(diags hcl.Diagnostics) hcl.Body { if !diags.HasErrors() { panic("NewErrorBody called without any error diagnostics") } return diagBody{ Diags: diags, } } // BodyWithDiagnostics returns a hcl.Body that wraps another hcl.Body // and emits the given diagnostics for any content-extraction method. // // Unlike the result of NewErrorBody, a body with diagnostics still runs // the extraction actions on the underlying body if (and only if) the given // diagnostics do not contain errors, but prepends the given diagnostics with // any diagnostics produced by the action. // // If the given diagnostics is empty, the given body is returned verbatim. // // This function is intended for conveniently reporting errors and/or warnings // produced during a transform, ensuring that they will be seen when the // caller eventually extracts content from the returned body. func BodyWithDiagnostics(body hcl.Body, diags hcl.Diagnostics) hcl.Body { if len(diags) == 0 { // nothing to do! return body } return diagBody{ Diags: diags, Wrapped: body, } } type diagBody struct { Diags hcl.Diagnostics Wrapped hcl.Body } func (b diagBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { if b.Diags.HasErrors() { return b.emptyContent(), b.Diags } content, wrappedDiags := b.Wrapped.Content(schema) diags := make(hcl.Diagnostics, 0, len(b.Diags)+len(wrappedDiags)) diags = append(diags, b.Diags...) diags = append(diags, wrappedDiags...) return content, diags } func (b diagBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { if b.Diags.HasErrors() { return b.emptyContent(), b.Wrapped, b.Diags } content, remain, wrappedDiags := b.Wrapped.PartialContent(schema) diags := make(hcl.Diagnostics, 0, len(b.Diags)+len(wrappedDiags)) diags = append(diags, b.Diags...) diags = append(diags, wrappedDiags...) return content, remain, diags } func (b diagBody) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { if b.Diags.HasErrors() { return nil, b.Diags } attributes, wrappedDiags := b.Wrapped.JustAttributes() diags := make(hcl.Diagnostics, 0, len(b.Diags)+len(wrappedDiags)) diags = append(diags, b.Diags...) diags = append(diags, wrappedDiags...) return attributes, diags } func (b diagBody) MissingItemRange() hcl.Range { if b.Wrapped != nil { return b.Wrapped.MissingItemRange() } // Placeholder. This should never be seen in practice because decoding // a diagBody without a wrapped body should always produce an error. return hcl.Range{ Filename: "", } } func (b diagBody) emptyContent() *hcl.BodyContent { return &hcl.BodyContent{ MissingItemRange: b.MissingItemRange(), } } hcl-2.14.1/ext/transform/transform.go000066400000000000000000000054461431334125700175360ustar00rootroot00000000000000package transform import ( "github.com/hashicorp/hcl/v2" ) // Shallow is equivalent to calling transformer.TransformBody(body), and // is provided only for completeness of the top-level API. func Shallow(body hcl.Body, transformer Transformer) hcl.Body { return transformer.TransformBody(body) } // Deep applies the given transform to the given body and then // wraps the result such that any descendent blocks that are decoded will // also have the transform applied to their bodies. // // This allows for language extensions that define a particular block type // for a particular body and all nested blocks within it. // // Due to the wrapping behavior, the body resulting from this function // will not be of the type returned by the transformer. Callers may call // only the methods defined for interface hcl.Body, and may not type-assert // to access other methods. func Deep(body hcl.Body, transformer Transformer) hcl.Body { return deepWrapper{ Transformed: transformer.TransformBody(body), Transformer: transformer, } } // deepWrapper is a hcl.Body implementation that ensures that a given // transformer is applied to another given body when content is extracted, // and that it recursively applies to any child blocks that are extracted. type deepWrapper struct { Transformed hcl.Body Transformer Transformer } func (w deepWrapper) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { content, diags := w.Transformed.Content(schema) content = w.transformContent(content) return content, diags } func (w deepWrapper) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { content, remain, diags := w.Transformed.PartialContent(schema) content = w.transformContent(content) return content, remain, diags } func (w deepWrapper) transformContent(content *hcl.BodyContent) *hcl.BodyContent { if len(content.Blocks) == 0 { // Easy path: if there are no blocks then there are no child bodies to wrap return content } // Since we're going to change things here, we'll be polite and clone the // structure so that we don't risk impacting any internal state of the // original body. ret := &hcl.BodyContent{ Attributes: content.Attributes, MissingItemRange: content.MissingItemRange, Blocks: make(hcl.Blocks, len(content.Blocks)), } for i, givenBlock := range content.Blocks { // Shallow-copy the block so we can mutate it newBlock := *givenBlock newBlock.Body = Deep(newBlock.Body, w.Transformer) ret.Blocks[i] = &newBlock } return ret } func (w deepWrapper) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { // Attributes can't have bodies or nested blocks, so this is just a thin wrapper. return w.Transformed.JustAttributes() } func (w deepWrapper) MissingItemRange() hcl.Range { return w.Transformed.MissingItemRange() } hcl-2.14.1/ext/transform/transform_test.go000066400000000000000000000044011431334125700205630ustar00rootroot00000000000000package transform import ( "testing" "reflect" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hcltest" "github.com/zclconf/go-cty/cty" ) // Assert that deepWrapper implements Body var deepWrapperIsBody hcl.Body = deepWrapper{} func TestDeep(t *testing.T) { testTransform := TransformerFunc(func(body hcl.Body) hcl.Body { _, remain, diags := body.PartialContent(&hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "remove", }, }, }) return BodyWithDiagnostics(remain, diags) }) src := hcltest.MockBody(&hcl.BodyContent{ Attributes: hcltest.MockAttrs(map[string]hcl.Expression{ "true": hcltest.MockExprLiteral(cty.True), }), Blocks: []*hcl.Block{ { Type: "remove", Body: hcl.EmptyBody(), }, { Type: "child", Body: hcltest.MockBody(&hcl.BodyContent{ Blocks: []*hcl.Block{ { Type: "remove", }, }, }), }, }, }) wrapped := Deep(src, testTransform) rootContent, diags := wrapped.Content(&hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "true", }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "child", }, }, }) if len(diags) != 0 { t.Errorf("unexpected diagnostics for root content") for _, diag := range diags { t.Logf("- %s", diag) } } wantAttrs := hcltest.MockAttrs(map[string]hcl.Expression{ "true": hcltest.MockExprLiteral(cty.True), }) if !reflect.DeepEqual(rootContent.Attributes, wantAttrs) { t.Errorf("wrong root attributes\ngot: %#v\nwant: %#v", rootContent.Attributes, wantAttrs) } if got, want := len(rootContent.Blocks), 1; got != want { t.Fatalf("wrong number of root blocks %d; want %d", got, want) } if got, want := rootContent.Blocks[0].Type, "child"; got != want { t.Errorf("wrong block type %s; want %s", got, want) } childBlock := rootContent.Blocks[0] childContent, diags := childBlock.Body.Content(&hcl.BodySchema{}) if len(diags) != 0 { t.Errorf("unexpected diagnostics for child content") for _, diag := range diags { t.Logf("- %s", diag) } } if len(childContent.Attributes) != 0 { t.Errorf("unexpected attributes in child content; want empty content") } if len(childContent.Blocks) != 0 { t.Errorf("unexpected blocks in child content; want empty content") } } hcl-2.14.1/ext/transform/transformer.go000066400000000000000000000020771431334125700200620ustar00rootroot00000000000000package transform import ( "github.com/hashicorp/hcl/v2" ) // A Transformer takes a given body, applies some (possibly no-op) // transform to it, and returns the new body. // // It must _not_ mutate the given body in-place. // // The transform call cannot fail, but it _can_ return a body that immediately // returns diagnostics when its methods are called. NewErrorBody is a utility // to help with this. type Transformer interface { TransformBody(hcl.Body) hcl.Body } // TransformerFunc is a function type that implements Transformer. type TransformerFunc func(hcl.Body) hcl.Body // TransformBody is an implementation of Transformer.TransformBody. func (f TransformerFunc) TransformBody(in hcl.Body) hcl.Body { return f(in) } type chain []Transformer // Chain takes a slice of transformers and returns a single new // Transformer that applies each of the given transformers in sequence. func Chain(c []Transformer) Transformer { return chain(c) } func (c chain) TransformBody(body hcl.Body) hcl.Body { for _, t := range c { body = t.TransformBody(body) } return body } hcl-2.14.1/ext/tryfunc/000077500000000000000000000000001431334125700146425ustar00rootroot00000000000000hcl-2.14.1/ext/tryfunc/README.md000066400000000000000000000026501431334125700161240ustar00rootroot00000000000000# "Try" and "can" functions This Go package contains two `cty` functions intended for use in an `hcl.EvalContext` when evaluating HCL native syntax expressions. The first function `try` attempts to evaluate each of its argument expressions in order until one produces a result without any errors. ```hcl try(non_existent_variable, 2) # returns 2 ``` If none of the expressions succeed, the function call fails with all of the errors it encountered. The second function `can` is similar except that it ignores the result of the given expression altogether and simply returns `true` if the expression produced a successful result or `false` if it produced errors. Both of these are primarily intended for working with deep data structures which might not have a dependable shape. For example, we can use `try` to attempt to fetch a value from deep inside a data structure but produce a default value if any step of the traversal fails: ```hcl result = try(foo.deep[0].lots.of["traversals"], null) ``` The final result to `try` should generally be some sort of constant value that will always evaluate successfully. ## Using these functions Languages built on HCL can make `try` and `can` available to user code by exporting them in the `hcl.EvalContext` used for expression evaluation: ```go ctx := &hcl.EvalContext{ Functions: map[string]function.Function{ "try": tryfunc.TryFunc, "can": tryfunc.CanFunc, }, } ``` hcl-2.14.1/ext/tryfunc/tryfunc.go000066400000000000000000000122471431334125700166710ustar00rootroot00000000000000// Package tryfunc contains some optional functions that can be exposed in // HCL-based languages to allow authors to test whether a particular expression // can succeed and take dynamic action based on that result. // // These functions are implemented in terms of the customdecode extension from // the sibling directory "customdecode", and so they are only useful when // used within an HCL EvalContext. Other systems using cty functions are // unlikely to support the HCL-specific "customdecode" extension. package tryfunc import ( "errors" "fmt" "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) // TryFunc is a variadic function that tries to evaluate all of is arguments // in sequence until one succeeds, in which case it returns that result, or // returns an error if none of them succeed. var TryFunc function.Function // CanFunc tries to evaluate the expression given in its first argument. var CanFunc function.Function func init() { TryFunc = function.New(&function.Spec{ VarParam: &function.Parameter{ Name: "expressions", Type: customdecode.ExpressionClosureType, }, Type: func(args []cty.Value) (cty.Type, error) { v, err := try(args) if err != nil { return cty.NilType, err } return v.Type(), nil }, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { return try(args) }, }) CanFunc = function.New(&function.Spec{ Params: []function.Parameter{ { Name: "expression", Type: customdecode.ExpressionClosureType, }, }, Type: function.StaticReturnType(cty.Bool), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { return can(args[0]) }, }) } func try(args []cty.Value) (cty.Value, error) { if len(args) == 0 { return cty.NilVal, errors.New("at least one argument is required") } // We'll collect up all of the diagnostics we encounter along the way // and report them all if none of the expressions succeed, so that the // user might get some hints on how to make at least one succeed. var diags hcl.Diagnostics for _, arg := range args { closure := customdecode.ExpressionClosureFromVal(arg) if dependsOnUnknowns(closure.Expression, closure.EvalContext) { // We can't safely decide if this expression will succeed yet, // and so our entire result must be unknown until we have // more information. return cty.DynamicVal, nil } v, moreDiags := closure.Value() diags = append(diags, moreDiags...) if moreDiags.HasErrors() { continue // try the next one, if there is one to try } return v, nil // ignore any accumulated diagnostics if one succeeds } // If we fall out here then none of the expressions succeeded, and so // we must have at least one diagnostic and we'll return all of them // so that the user can see the errors related to whichever one they // were expecting to have succeeded in this case. // // Because our function must return a single error value rather than // diagnostics, we'll construct a suitable error message string // that will make sense in the context of the function call failure // diagnostic HCL will eventually wrap this in. var buf strings.Builder buf.WriteString("no expression succeeded:\n") for _, diag := range diags { if diag.Subject != nil { buf.WriteString(fmt.Sprintf("- %s (at %s)\n %s\n", diag.Summary, diag.Subject, diag.Detail)) } else { buf.WriteString(fmt.Sprintf("- %s\n %s\n", diag.Summary, diag.Detail)) } } buf.WriteString("\nAt least one expression must produce a successful result") return cty.NilVal, errors.New(buf.String()) } func can(arg cty.Value) (cty.Value, error) { closure := customdecode.ExpressionClosureFromVal(arg) if dependsOnUnknowns(closure.Expression, closure.EvalContext) { // Can't decide yet, then. return cty.UnknownVal(cty.Bool), nil } _, diags := closure.Value() if diags.HasErrors() { return cty.False, nil } return cty.True, nil } // dependsOnUnknowns returns true if any of the variables that the given // expression might access are unknown values or contain unknown values. // // This is a conservative result that prefers to return true if there's any // chance that the expression might derive from an unknown value during its // evaluation; it is likely to produce false-positives for more complex // expressions involving deep data structures. func dependsOnUnknowns(expr hcl.Expression, ctx *hcl.EvalContext) bool { for _, traversal := range expr.Variables() { val, diags := traversal.TraverseAbs(ctx) if diags.HasErrors() { // If the traversal returned a definitive error then it must // not traverse through any unknowns. continue } if !val.IsWhollyKnown() { // The value will be unknown if either it refers directly to // an unknown value or if the traversal moves through an unknown // collection. We're using IsWhollyKnown, so this also catches // situations where the traversal refers to a compound data // structure that contains any unknown values. That's important, // because during evaluation the expression might evaluate more // deeply into this structure and encounter the unknowns. return true } } return false } hcl-2.14.1/ext/tryfunc/tryfunc_test.go000066400000000000000000000133051431334125700177240ustar00rootroot00000000000000package tryfunc import ( "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) func TestTryFunc(t *testing.T) { tests := map[string]struct { expr string vars map[string]cty.Value want cty.Value wantErr string }{ "one argument succeeds": { `try(1)`, nil, cty.NumberIntVal(1), ``, }, "one marked argument succeeds": { `try(sensitive)`, map[string]cty.Value{ "sensitive": cty.StringVal("secret").Mark("porpoise"), }, cty.StringVal("secret").Mark("porpoise"), ``, }, "two arguments, first succeeds": { `try(1, 2)`, nil, cty.NumberIntVal(1), ``, }, "two arguments, first fails": { `try(nope, 2)`, nil, cty.NumberIntVal(2), ``, }, "two arguments, first depends on unknowns": { `try(unknown, 2)`, map[string]cty.Value{ "unknown": cty.UnknownVal(cty.Number), }, cty.DynamicVal, // can't proceed until first argument is known ``, }, "two arguments, first succeeds and second depends on unknowns": { `try(1, unknown)`, map[string]cty.Value{ "unknown": cty.UnknownVal(cty.Number), }, cty.NumberIntVal(1), // we know 1st succeeds, so it doesn't matter that 2nd is unknown ``, }, "two arguments, first depends on unknowns deeply": { `try(has_unknowns, 2)`, map[string]cty.Value{ "has_unknowns": cty.ListVal([]cty.Value{cty.UnknownVal(cty.Bool)}), }, cty.DynamicVal, // can't proceed until first argument is wholly known ``, }, "two arguments, first traverses through an unkown": { `try(unknown.baz, 2)`, map[string]cty.Value{ "unknown": cty.UnknownVal(cty.Map(cty.String)), }, cty.DynamicVal, // can't proceed until first argument is wholly known ``, }, "two arguments, both marked, first succeeds": { `try(sensitive, other)`, map[string]cty.Value{ "sensitive": cty.StringVal("secret").Mark("porpoise"), "other": cty.StringVal("that").Mark("a"), }, cty.StringVal("secret").Mark("porpoise"), ``, }, "two arguments, both marked, second succeeds": { `try(sensitive, other)`, map[string]cty.Value{ "other": cty.StringVal("that").Mark("a"), }, cty.StringVal("that").Mark("a"), ``, }, "two arguments, result is element of marked list ": { `try(sensitive[0], other)`, map[string]cty.Value{ "sensitive": cty.ListVal([]cty.Value{ cty.StringVal("list"), cty.StringVal("of "), cty.StringVal("secrets"), }).Mark("secret"), "other": cty.StringVal("not"), }, cty.StringVal("list").Mark("secret"), ``, }, "three arguments, all fail": { `try(this, that, this_thing_in_particular)`, nil, cty.NumberIntVal(2), // The grammar of this stringification of the message is unfortunate, // but caller can type-assert our result to get the original // diagnostics directly in order to produce a better result. `test.hcl:1,1-5: Error in function call; Call to function "try" failed: no expression succeeded: - Variables not allowed (at test.hcl:1,5-9) Variables may not be used here. - Variables not allowed (at test.hcl:1,11-15) Variables may not be used here. - Variables not allowed (at test.hcl:1,17-41) Variables may not be used here. At least one expression must produce a successful result.`, }, "no arguments": { `try()`, nil, cty.NilVal, `test.hcl:1,1-5: Error in function call; Call to function "try" failed: at least one argument is required.`, }, } for k, test := range tests { t.Run(k, func(t *testing.T) { expr, diags := hclsyntax.ParseExpression([]byte(test.expr), "test.hcl", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } ctx := &hcl.EvalContext{ Variables: test.vars, Functions: map[string]function.Function{ "try": TryFunc, }, } got, err := expr.Value(ctx) if err != nil { if test.wantErr != "" { if got, want := err.Error(), test.wantErr; got != want { t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) } } else { t.Errorf("unexpected error\ngot: %s\nwant: ", err) } return } if test.wantErr != "" { t.Errorf("wrong error\ngot: \nwant: %s", test.wantErr) } if !test.want.RawEquals(got) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } func TestCanFunc(t *testing.T) { tests := map[string]struct { expr string vars map[string]cty.Value want cty.Value }{ "succeeds": { `can(1)`, nil, cty.True, }, "fails": { `can(nope)`, nil, cty.False, }, "simple unknown": { `can(unknown)`, map[string]cty.Value{ "unknown": cty.UnknownVal(cty.Number), }, cty.UnknownVal(cty.Bool), }, "traversal through unknown": { `can(unknown.foo)`, map[string]cty.Value{ "unknown": cty.UnknownVal(cty.Map(cty.Number)), }, cty.UnknownVal(cty.Bool), }, "deep unknown": { `can(has_unknown)`, map[string]cty.Value{ "has_unknown": cty.ListVal([]cty.Value{cty.UnknownVal(cty.Bool)}), }, cty.UnknownVal(cty.Bool), }, } for k, test := range tests { t.Run(k, func(t *testing.T) { expr, diags := hclsyntax.ParseExpression([]byte(test.expr), "test.hcl", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } ctx := &hcl.EvalContext{ Variables: test.vars, Functions: map[string]function.Function{ "can": CanFunc, }, } got, err := expr.Value(ctx) if err != nil { t.Errorf("unexpected error\ngot: %s\nwant: ", err) } if !test.want.RawEquals(got) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } hcl-2.14.1/ext/typeexpr/000077500000000000000000000000001431334125700150305ustar00rootroot00000000000000hcl-2.14.1/ext/typeexpr/README.md000066400000000000000000000132611431334125700163120ustar00rootroot00000000000000# HCL Type Expressions Extension This HCL extension defines a convention for describing HCL types using function call and variable reference syntax, allowing configuration formats to include type information provided by users. The type syntax is processed statically from a hcl.Expression, so it cannot use any of the usual language operators. This is similar to type expressions in statically-typed programming languages. ```hcl variable "example" { type = list(string) } ``` The extension is built using the `hcl.ExprAsKeyword` and `hcl.ExprCall` functions, and so it relies on the underlying syntax to define how "keyword" and "call" are interpreted. The above shows how they are interpreted in the HCL native syntax, while the following shows the same information expressed in JSON: ```json { "variable": { "example": { "type": "list(string)" } } } ``` Notice that since we have additional contextual information that we intend to allow only calls and keywords the JSON syntax is able to parse the given string directly as an expression, rather than as a template as would be the case for normal expression evaluation. For more information, see [the godoc reference](http://godoc.org/github.com/hashicorp/hcl/v2/ext/typeexpr). ## Type Expression Syntax When expressed in the native syntax, the following expressions are permitted in a type expression: * `string` - string * `bool` - boolean * `number` - number * `any` - `cty.DynamicPseudoType` (in function `TypeConstraint` only) * `list()` - list of the type given as an argument * `set()` - set of the type given as an argument * `map()` - map of the type given as an argument * `tuple([])` - tuple with the element types given in the single list argument * `object({=, ...}` - object with the attributes and corresponding types given in the single map argument For example: * `list(string)` * `object({name=string,age=number})` * `map(object({name=string,age=number}))` Note that the object constructor syntax is not fully-general for all possible object types because it requires the attribute names to be valid identifiers. In practice it is expected that any time an object type is being fixed for type checking it will be one that has identifiers as its attributes; object types with weird attributes generally show up only from arbitrary object constructors in configuration files, which are usually treated either as maps or as the dynamic pseudo-type. ## Type Constraints as Values Along with defining a convention for writing down types using HCL expression constructs, this package also includes a mechanism for representing types as values that can be used as data within an HCL-based language. `typeexpr.TypeConstraintType` is a [`cty` capsule type](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types) that encapsulates `cty.Type` values. You can construct such a value directly using the `TypeConstraintVal` function: ```go tyVal := typeexpr.TypeConstraintVal(cty.String) // We can unpack the type from a value using TypeConstraintFromVal ty := typeExpr.TypeConstraintFromVal(tyVal) ``` However, the primary purpose of `typeexpr.TypeConstraintType` is to be specified as the type constraint for an argument, in which case it serves as a signal for HCL to treat the argument expression as a type constraint expression as defined above, rather than as a normal value expression. "An argument" in the above in practice means the following two locations: * As the type constraint for a parameter of a cty function that will be used in an `hcl.EvalContext`. In that case, function calls in the HCL native expression syntax will require the argument to be valid type constraint expression syntax and the function implementation will receive a `TypeConstraintType` value as the argument value for that parameter. * As the type constraint for a `hcldec.AttrSpec` or `hcldec.BlockAttrsSpec` when decoding an HCL body using `hcldec`. In that case, the attributes with that type constraint will be required to be valid type constraint expression syntax and the result will be a `TypeConstraintType` value. Note that the special handling of these arguments means that an argument marked in this way must use the type constraint syntax directly. It is not valid to pass in a value of `TypeConstraintType` that has been obtained dynamically via some other expression result. `TypeConstraintType` is provided with the intent of using it internally within application code when incorporating type constraint expression syntax into an HCL-based language, not to be used for dynamic "programming with types". A calling application could support programming with types by defining its _own_ capsule type, but that is not the purpose of `TypeConstraintType`. ## The "convert" `cty` Function Building on the `TypeConstraintType` described in the previous section, this package also provides `typeexpr.ConvertFunc` which is a cty function that can be placed into a `cty.EvalContext` (conventionally named "convert") in order to provide a general type conversion function in an HCL-based language: ```hcl foo = convert("true", bool) ``` The second parameter uses the mechanism described in the previous section to require its argument to be a type constraint expression rather than a value expression. In doing so, it allows converting with any type constraint that can be expressed in this package's type constraint syntax. In the above example, the `foo` argument would receive a boolean true, or `cty.True` in `cty` terms. The target type constraint must always be provided statically using inline type constraint syntax. There is no way to _dynamically_ select a type constraint using this function. hcl-2.14.1/ext/typeexpr/defaults.go000066400000000000000000000122121431334125700171640ustar00rootroot00000000000000package typeexpr import ( "github.com/zclconf/go-cty/cty" ) // Defaults represents a type tree which may contain default values for // optional object attributes at any level. This is used to apply nested // defaults to an input value before converting it to the concrete type. type Defaults struct { // Type of the node for which these defaults apply. This is necessary in // order to determine how to inspect the Defaults and Children collections. Type cty.Type // DefaultValues contains the default values for each object attribute, // indexed by attribute name. DefaultValues map[string]cty.Value // Children is a map of Defaults for elements contained in this type. This // only applies to structural and collection types. // // The map is indexed by string instead of cty.Value because cty.Number // instances are non-comparable, due to embedding a *big.Float. // // Collections have a single element type, which is stored at key "". Children map[string]*Defaults } // Apply walks the given value, applying specified defaults wherever optional // attributes are missing. The input and output values may have different // types, and the result may still require type conversion to the final desired // type. // // This function is permissive and does not report errors, assuming that the // caller will have better context to report useful type conversion failure // diagnostics. func (d *Defaults) Apply(val cty.Value) cty.Value { val, err := cty.TransformWithTransformer(val, &defaultsTransformer{defaults: d}) // The transformer should never return an error. if err != nil { panic(err) } return val } // defaultsTransformer implements cty.Transformer, as a pre-order traversal, // applying defaults as it goes. The pre-order traversal allows us to specify // defaults more loosely for structural types, as the defaults for the types // will be applied to the default value later in the walk. type defaultsTransformer struct { defaults *Defaults } var _ cty.Transformer = (*defaultsTransformer)(nil) func (t *defaultsTransformer) Enter(p cty.Path, v cty.Value) (cty.Value, error) { // Cannot apply defaults to an unknown value if !v.IsKnown() { return v, nil } // Look up the defaults for this path. defaults := t.defaults.traverse(p) // If we have no defaults, nothing to do. if len(defaults) == 0 { return v, nil } // Ensure we are working with an object or map. vt := v.Type() if !vt.IsObjectType() && !vt.IsMapType() { // Cannot apply defaults because the value type is incompatible. // We'll ignore this and let the later conversion stage display a // more useful diagnostic. return v, nil } // Unmark the value and reapply the marks later. v, valMarks := v.Unmark() // Convert the given value into an attribute map (if it's non-null and // non-empty). attrs := make(map[string]cty.Value) if !v.IsNull() && v.LengthInt() > 0 { attrs = v.AsValueMap() } // Apply defaults where attributes are missing, constructing a new // value with the same marks. for attr, defaultValue := range defaults { if attrValue, ok := attrs[attr]; !ok || attrValue.IsNull() { attrs[attr] = defaultValue } } // We construct an object even if the input value was a map, as the // type of an attribute's default value may be incompatible with the // map element type. return cty.ObjectVal(attrs).WithMarks(valMarks), nil } func (t *defaultsTransformer) Exit(p cty.Path, v cty.Value) (cty.Value, error) { return v, nil } // traverse walks the abstract defaults structure for a given path, returning // a set of default values (if any are present) or nil (if not). This operation // differs from applying a path to a value because we need to customize the // traversal steps for collection types, where a single set of defaults can be // applied to an arbitrary number of elements. func (d *Defaults) traverse(path cty.Path) map[string]cty.Value { if len(path) == 0 { return d.DefaultValues } switch s := path[0].(type) { case cty.GetAttrStep: if d.Type.IsObjectType() { // Attribute path steps are normally applied to objects, where each // attribute may have different defaults. return d.traverseChild(s.Name, path) } else if d.Type.IsMapType() { // Literal values for maps can result in attribute path steps, in which // case we need to disregard the attribute name, as maps can have only // one child. return d.traverseChild("", path) } return nil case cty.IndexStep: if d.Type.IsTupleType() { // Tuples can have different types for each element, so we look // up the defaults based on the index key. return d.traverseChild(s.Key.AsBigFloat().String(), path) } else if d.Type.IsCollectionType() { // Defaults for collection element types are stored with a blank // key, so we disregard the index key. return d.traverseChild("", path) } return nil default: // At time of writing there are no other path step types. return nil } } // traverseChild continues the traversal for a given child key, and mutually // recurses with traverse. func (d *Defaults) traverseChild(name string, path cty.Path) map[string]cty.Value { if child, ok := d.Children[name]; ok { return child.traverse(path[1:]) } return nil } hcl-2.14.1/ext/typeexpr/defaults_test.go000066400000000000000000000321561431334125700202340ustar00rootroot00000000000000package typeexpr import ( "testing" "github.com/google/go-cmp/cmp" "github.com/zclconf/go-cty/cty" ) var ( valueComparer = cmp.Comparer(cty.Value.RawEquals) ) func TestDefaults_Apply(t *testing.T) { simpleObject := cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Bool, }, []string{"b"}) nestedObject := cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "c": simpleObject, "d": cty.Number, }, []string{"c"}) testCases := map[string]struct { defaults *Defaults value cty.Value want cty.Value }{ // Nothing happens when there are no default values and no children. "no defaults": { defaults: &Defaults{ Type: cty.Map(cty.String), }, value: cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.StringVal("bar"), }), want: cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.StringVal("bar"), }), }, // Passing a map which does not include one of the attributes with a // default results in the default being applied to the output. Output // is always an object. "simple object with defaults applied": { defaults: &Defaults{ Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, value: cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), want: cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), }, // Unknown values may be assigned to root modules during validation, // and we cannot apply defaults at that time. "simple object with defaults but unknown value": { defaults: &Defaults{ Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, value: cty.UnknownVal(cty.Map(cty.String)), want: cty.UnknownVal(cty.Map(cty.String)), }, // Defaults do not override attributes which are present in the given // value. "simple object with optional attributes specified": { defaults: &Defaults{ Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, value: cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.StringVal("false"), }), want: cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.StringVal("false"), }), }, // Defaults will replace explicit nulls. "object with explicit null for attribute with default": { defaults: &Defaults{ Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, value: cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.NullVal(cty.String), }), want: cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), }, // Defaults can be specified at any level of depth and will be applied // so long as there is a parent value to populate. "nested object with defaults applied": { defaults: &Defaults{ Type: nestedObject, Children: map[string]*Defaults{ "c": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.False, }, }, }, }, value: cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "d": cty.NumberIntVal(5), }), want: cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.False, }), "d": cty.NumberIntVal(5), }), }, // Testing traversal of collections. "map of objects with defaults applied": { defaults: &Defaults{ Type: cty.Map(simpleObject), Children: map[string]*Defaults{ "": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, value: cty.MapVal(map[string]cty.Value{ "f": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "b": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("bar"), }), }), want: cty.MapVal(map[string]cty.Value{ "f": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), "b": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("bar"), "b": cty.True, }), }), }, // A map variable value specified in a tfvars file will be an object, // in which case we must still traverse the defaults structure // correctly. "map of objects with defaults applied, given object instead of map": { defaults: &Defaults{ Type: cty.Map(simpleObject), Children: map[string]*Defaults{ "": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, value: cty.ObjectVal(map[string]cty.Value{ "f": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "b": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("bar"), }), }), want: cty.ObjectVal(map[string]cty.Value{ "f": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), "b": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("bar"), "b": cty.True, }), }), }, // Another example of a collection type, this time exercising the code // processing a tuple input. "list of objects with defaults applied": { defaults: &Defaults{ Type: cty.List(simpleObject), Children: map[string]*Defaults{ "": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, value: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("bar"), }), }), want: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("bar"), "b": cty.True, }), }), }, // Unlike collections, tuple variable types can have defaults for // multiple element types. "tuple of objects with defaults applied": { defaults: &Defaults{ Type: cty.Tuple([]cty.Type{simpleObject, nestedObject}), Children: map[string]*Defaults{ "0": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.False, }, }, "1": { Type: nestedObject, DefaultValues: map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("default"), "b": cty.True, }), }, }, }, }, value: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), cty.ObjectVal(map[string]cty.Value{ "d": cty.NumberIntVal(5), }), }), want: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.False, }), cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("default"), "b": cty.True, }), "d": cty.NumberIntVal(5), }), }), }, // More complex cases with deeply nested defaults, testing the "default // within a default" edges. "set of nested objects, no default sub-object": { defaults: &Defaults{ Type: cty.Set(nestedObject), Children: map[string]*Defaults{ "": { Type: nestedObject, Children: map[string]*Defaults{ "c": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, }, }, value: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "d": cty.NumberIntVal(7), }), }), want: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ // No default value for "c" specified, so none applied. The // convert stage will fill in a null. "d": cty.NumberIntVal(7), }), }), }, "set of nested objects, empty default sub-object": { defaults: &Defaults{ Type: cty.Set(nestedObject), Children: map[string]*Defaults{ "": { Type: nestedObject, DefaultValues: map[string]cty.Value{ // This is a convenient shorthand which causes a // missing sub-object to be filled with an object // with all of the default values specified in the // sub-object's type. "c": cty.EmptyObjectVal, }, Children: map[string]*Defaults{ "c": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, }, }, value: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "d": cty.NumberIntVal(7), }), }), want: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ // Default value for "b" is applied to the empty object // specified as the default for "c" "b": cty.True, }), "d": cty.NumberIntVal(7), }), }), }, "set of nested objects, overriding default sub-object": { defaults: &Defaults{ Type: cty.Set(nestedObject), Children: map[string]*Defaults{ "": { Type: nestedObject, DefaultValues: map[string]cty.Value{ // If no value is given for "c", we use this object // of non-default values instead. These take // precedence over the default values specified in // the child type. "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("fallback"), "b": cty.False, }), }, Children: map[string]*Defaults{ "c": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, }, }, value: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "d": cty.NumberIntVal(7), }), }), want: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ // The default value for "b" is not applied, as the // default value for "c" includes a non-default value // already. "a": cty.StringVal("fallback"), "b": cty.False, }), "d": cty.NumberIntVal(7), }), }), }, "set of nested objects, nulls in default sub-object overridden": { defaults: &Defaults{ Type: cty.Set(nestedObject), Children: map[string]*Defaults{ "": { Type: nestedObject, DefaultValues: map[string]cty.Value{ // The default value for "c" is used to prepopulate // the nested object's value if not specified, but // the null default for its "b" attribute will be // overridden by the default specified in the child // type. "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("fallback"), "b": cty.NullVal(cty.Bool), }), }, Children: map[string]*Defaults{ "c": { Type: simpleObject, DefaultValues: map[string]cty.Value{ "b": cty.True, }, }, }, }, }, }, value: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "d": cty.NumberIntVal(7), }), }), want: cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ "a": cty.StringVal("foo"), "b": cty.True, }), "d": cty.NumberIntVal(5), }), cty.ObjectVal(map[string]cty.Value{ "c": cty.ObjectVal(map[string]cty.Value{ // The default value for "b" overrides the explicit // null in the default value for "c". "a": cty.StringVal("fallback"), "b": cty.True, }), "d": cty.NumberIntVal(7), }), }), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { got := tc.defaults.Apply(tc.value) if !cmp.Equal(tc.want, got, valueComparer) { t.Errorf("wrong result\n%s", cmp.Diff(tc.want, got, valueComparer)) } }) } } hcl-2.14.1/ext/typeexpr/doc.go000066400000000000000000000006201431334125700161220ustar00rootroot00000000000000// Package typeexpr extends HCL with a convention for describing HCL types // within configuration files. // // The type syntax is processed statically from a hcl.Expression, so it cannot // use any of the usual language operators. This is similar to type expressions // in statically-typed programming languages. // // variable "example" { // type = list(string) // } package typeexpr hcl-2.14.1/ext/typeexpr/get_type.go000066400000000000000000000267631431334125700172150ustar00rootroot00000000000000package typeexpr import ( "fmt" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" ) const invalidTypeSummary = "Invalid type specification" // getType is the internal implementation of Type, TypeConstraint, and // TypeConstraintWithDefaults, using the passed flags to distinguish. When // `constraint` is true, the "any" keyword can be used in place of a concrete // type. When `withDefaults` is true, the "optional" call expression supports // an additional argument describing a default value. func getType(expr hcl.Expression, constraint, withDefaults bool) (cty.Type, *Defaults, hcl.Diagnostics) { // First we'll try for one of our keywords kw := hcl.ExprAsKeyword(expr) switch kw { case "bool": return cty.Bool, nil, nil case "string": return cty.String, nil, nil case "number": return cty.Number, nil, nil case "any": if constraint { return cty.DynamicPseudoType, nil, nil } return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("The keyword %q cannot be used in this type specification: an exact type is required.", kw), Subject: expr.Range().Ptr(), }} case "list", "map", "set": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("The %s type constructor requires one argument specifying the element type.", kw), Subject: expr.Range().Ptr(), }} case "object": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "The object type constructor requires one argument specifying the attribute types and values as a map.", Subject: expr.Range().Ptr(), }} case "tuple": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "The tuple type constructor requires one argument specifying the element types as a list.", Subject: expr.Range().Ptr(), }} case "": // okay! we'll fall through and try processing as a call, then. default: return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("The keyword %q is not a valid type specification.", kw), Subject: expr.Range().Ptr(), }} } // If we get down here then our expression isn't just a keyword, so we'll // try to process it as a call instead. call, diags := hcl.ExprCall(expr) if diags.HasErrors() { return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "A type specification is either a primitive type keyword (bool, number, string) or a complex type constructor call, like list(string).", Subject: expr.Range().Ptr(), }} } switch call.Name { case "bool", "string", "number": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("Primitive type keyword %q does not expect arguments.", call.Name), Subject: &call.ArgsRange, }} case "any": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("Type constraint keyword %q does not expect arguments.", call.Name), Subject: &call.ArgsRange, }} } if len(call.Arguments) != 1 { contextRange := call.ArgsRange subjectRange := call.ArgsRange if len(call.Arguments) > 1 { // If we have too many arguments (as opposed to too _few_) then // we'll highlight the extraneous arguments as the diagnostic // subject. subjectRange = hcl.RangeBetween(call.Arguments[1].Range(), call.Arguments[len(call.Arguments)-1].Range()) } switch call.Name { case "list", "set", "map": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("The %s type constructor requires one argument specifying the element type.", call.Name), Subject: &subjectRange, Context: &contextRange, }} case "object": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "The object type constructor requires one argument specifying the attribute types and values as a map.", Subject: &subjectRange, Context: &contextRange, }} case "tuple": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "The tuple type constructor requires one argument specifying the element types as a list.", Subject: &subjectRange, Context: &contextRange, }} } } switch call.Name { case "list": ety, defaults, diags := getType(call.Arguments[0], constraint, withDefaults) ty := cty.List(ety) return ty, collectionDefaults(ty, defaults), diags case "set": ety, defaults, diags := getType(call.Arguments[0], constraint, withDefaults) ty := cty.Set(ety) return ty, collectionDefaults(ty, defaults), diags case "map": ety, defaults, diags := getType(call.Arguments[0], constraint, withDefaults) ty := cty.Map(ety) return ty, collectionDefaults(ty, defaults), diags case "object": attrDefs, diags := hcl.ExprMap(call.Arguments[0]) if diags.HasErrors() { return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Object type constructor requires a map whose keys are attribute names and whose values are the corresponding attribute types.", Subject: call.Arguments[0].Range().Ptr(), Context: expr.Range().Ptr(), }} } atys := make(map[string]cty.Type) defaultValues := make(map[string]cty.Value) children := make(map[string]*Defaults) var optAttrs []string for _, attrDef := range attrDefs { attrName := hcl.ExprAsKeyword(attrDef.Key) if attrName == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Object constructor map keys must be attribute names.", Subject: attrDef.Key.Range().Ptr(), Context: expr.Range().Ptr(), }) continue } atyExpr := attrDef.Value // the attribute type expression might be wrapped in the special // modifier optional(...) to indicate an optional attribute. If // so, we'll unwrap that first and make a note about it being // optional for when we construct the type below. var defaultExpr hcl.Expression if call, callDiags := hcl.ExprCall(atyExpr); !callDiags.HasErrors() { if call.Name == "optional" { if len(call.Arguments) < 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Optional attribute modifier requires the attribute type as its argument.", Subject: call.ArgsRange.Ptr(), Context: atyExpr.Range().Ptr(), }) continue } if constraint { if withDefaults { switch len(call.Arguments) { case 2: defaultExpr = call.Arguments[1] defaultVal, defaultDiags := defaultExpr.Value(nil) diags = append(diags, defaultDiags...) if !defaultDiags.HasErrors() { optAttrs = append(optAttrs, attrName) defaultValues[attrName] = defaultVal } case 1: optAttrs = append(optAttrs, attrName) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Optional attribute modifier expects at most two arguments: the attribute type, and a default value.", Subject: call.ArgsRange.Ptr(), Context: atyExpr.Range().Ptr(), }) } } else { if len(call.Arguments) == 1 { optAttrs = append(optAttrs, attrName) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Optional attribute modifier expects only one argument: the attribute type.", Subject: call.ArgsRange.Ptr(), Context: atyExpr.Range().Ptr(), }) } } } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Optional attribute modifier is only for type constraints, not for exact types.", Subject: call.NameRange.Ptr(), Context: atyExpr.Range().Ptr(), }) } atyExpr = call.Arguments[0] } } aty, aDefaults, attrDiags := getType(atyExpr, constraint, withDefaults) diags = append(diags, attrDiags...) // If a default is set for an optional attribute, verify that it is // convertible to the attribute type. if defaultVal, ok := defaultValues[attrName]; ok { convertedDefaultVal, err := convert.Convert(defaultVal, aty) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid default value for optional attribute", Detail: fmt.Sprintf("This default value is not compatible with the attribute's type constraint: %s.", err), Subject: defaultExpr.Range().Ptr(), }) delete(defaultValues, attrName) } else { defaultValues[attrName] = convertedDefaultVal } } atys[attrName] = aty if aDefaults != nil { children[attrName] = aDefaults } } ty := cty.ObjectWithOptionalAttrs(atys, optAttrs) return ty, structuredDefaults(ty, defaultValues, children), diags case "tuple": elemDefs, diags := hcl.ExprList(call.Arguments[0]) if diags.HasErrors() { return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: "Tuple type constructor requires a list of element types.", Subject: call.Arguments[0].Range().Ptr(), Context: expr.Range().Ptr(), }} } etys := make([]cty.Type, len(elemDefs)) children := make(map[string]*Defaults, len(elemDefs)) for i, defExpr := range elemDefs { ety, elemDefaults, elemDiags := getType(defExpr, constraint, withDefaults) diags = append(diags, elemDiags...) etys[i] = ety if elemDefaults != nil { children[fmt.Sprintf("%d", i)] = elemDefaults } } ty := cty.Tuple(etys) return ty, structuredDefaults(ty, nil, children), diags case "optional": return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("Keyword %q is valid only as a modifier for object type attributes.", call.Name), Subject: call.NameRange.Ptr(), }} default: // Can't access call.Arguments in this path because we've not validated // that it contains exactly one expression here. return cty.DynamicPseudoType, nil, hcl.Diagnostics{{ Severity: hcl.DiagError, Summary: invalidTypeSummary, Detail: fmt.Sprintf("Keyword %q is not a valid type constructor.", call.Name), Subject: expr.Range().Ptr(), }} } } func collectionDefaults(ty cty.Type, defaults *Defaults) *Defaults { if defaults == nil { return nil } return &Defaults{ Type: ty, Children: map[string]*Defaults{ "": defaults, }, } } func structuredDefaults(ty cty.Type, defaultValues map[string]cty.Value, children map[string]*Defaults) *Defaults { if len(defaultValues) == 0 && len(children) == 0 { return nil } defaults := &Defaults{ Type: ty, } if len(defaultValues) > 0 { defaults.DefaultValues = defaultValues } if len(children) > 0 { defaults.Children = children } return defaults } hcl-2.14.1/ext/typeexpr/get_type_test.go000066400000000000000000000343301431334125700202410ustar00rootroot00000000000000package typeexpr import ( "fmt" "testing" "github.com/hashicorp/hcl/v2/gohcl" "github.com/google/go-cmp/cmp" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2/json" "github.com/zclconf/go-cty/cty" ) var ( typeComparer = cmp.Comparer(cty.Type.Equals) ) func TestGetType(t *testing.T) { tests := []struct { Source string Constraint bool Want cty.Type WantError string }{ // keywords { `bool`, false, cty.Bool, "", }, { `number`, false, cty.Number, "", }, { `string`, false, cty.String, "", }, { `any`, false, cty.DynamicPseudoType, `The keyword "any" cannot be used in this type specification: an exact type is required.`, }, { `any`, true, cty.DynamicPseudoType, "", }, { `list`, false, cty.DynamicPseudoType, "The list type constructor requires one argument specifying the element type.", }, { `map`, false, cty.DynamicPseudoType, "The map type constructor requires one argument specifying the element type.", }, { `set`, false, cty.DynamicPseudoType, "The set type constructor requires one argument specifying the element type.", }, { `object`, false, cty.DynamicPseudoType, "The object type constructor requires one argument specifying the attribute types and values as a map.", }, { `tuple`, false, cty.DynamicPseudoType, "The tuple type constructor requires one argument specifying the element types as a list.", }, // constructors { `bool()`, false, cty.DynamicPseudoType, `Primitive type keyword "bool" does not expect arguments.`, }, { `number()`, false, cty.DynamicPseudoType, `Primitive type keyword "number" does not expect arguments.`, }, { `string()`, false, cty.DynamicPseudoType, `Primitive type keyword "string" does not expect arguments.`, }, { `any()`, false, cty.DynamicPseudoType, `Type constraint keyword "any" does not expect arguments.`, }, { `any()`, true, cty.DynamicPseudoType, `Type constraint keyword "any" does not expect arguments.`, }, { `list(string)`, false, cty.List(cty.String), ``, }, { `set(string)`, false, cty.Set(cty.String), ``, }, { `map(string)`, false, cty.Map(cty.String), ``, }, { `list()`, false, cty.DynamicPseudoType, `The list type constructor requires one argument specifying the element type.`, }, { `list(string, string)`, false, cty.DynamicPseudoType, `The list type constructor requires one argument specifying the element type.`, }, { `list(any)`, false, cty.List(cty.DynamicPseudoType), `The keyword "any" cannot be used in this type specification: an exact type is required.`, }, { `list(any)`, true, cty.List(cty.DynamicPseudoType), ``, }, { `object({})`, false, cty.EmptyObject, ``, }, { `object({name=string})`, false, cty.Object(map[string]cty.Type{"name": cty.String}), ``, }, { `object({"name"=string})`, false, cty.EmptyObject, `Object constructor map keys must be attribute names.`, }, { `object({name=nope})`, false, cty.Object(map[string]cty.Type{"name": cty.DynamicPseudoType}), `The keyword "nope" is not a valid type specification.`, }, { `object()`, false, cty.DynamicPseudoType, `The object type constructor requires one argument specifying the attribute types and values as a map.`, }, { `object(string)`, false, cty.DynamicPseudoType, `Object type constructor requires a map whose keys are attribute names and whose values are the corresponding attribute types.`, }, { `tuple([])`, false, cty.EmptyTuple, ``, }, { `tuple([string, bool])`, false, cty.Tuple([]cty.Type{cty.String, cty.Bool}), ``, }, { `tuple([nope])`, false, cty.Tuple([]cty.Type{cty.DynamicPseudoType}), `The keyword "nope" is not a valid type specification.`, }, { `tuple()`, false, cty.DynamicPseudoType, `The tuple type constructor requires one argument specifying the element types as a list.`, }, { `tuple(string)`, false, cty.DynamicPseudoType, `Tuple type constructor requires a list of element types.`, }, { `shwoop(string)`, false, cty.DynamicPseudoType, `Keyword "shwoop" is not a valid type constructor.`, }, { `list("string")`, false, cty.List(cty.DynamicPseudoType), `A type specification is either a primitive type keyword (bool, number, string) or a complex type constructor call, like list(string).`, }, // More interesting combinations { `list(object({}))`, false, cty.List(cty.EmptyObject), ``, }, { `list(map(tuple([])))`, false, cty.List(cty.Map(cty.EmptyTuple)), ``, }, // Optional modifier { `object({name=string,age=optional(number)})`, true, cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "name": cty.String, "age": cty.Number, }, []string{"age"}), ``, }, { `object({name=string,meta=optional(any)})`, true, cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "name": cty.String, "meta": cty.DynamicPseudoType, }, []string{"meta"}), ``, }, { `object({name=string,age=optional(number)})`, false, cty.Object(map[string]cty.Type{ "name": cty.String, "age": cty.Number, }), `Optional attribute modifier is only for type constraints, not for exact types.`, }, { `object({name=string,meta=optional(any)})`, false, cty.Object(map[string]cty.Type{ "name": cty.String, "meta": cty.DynamicPseudoType, }), `Optional attribute modifier is only for type constraints, not for exact types.`, }, { `object({name=string,meta=optional()})`, true, cty.Object(map[string]cty.Type{ "name": cty.String, }), `Optional attribute modifier requires the attribute type as its argument.`, }, { `object({name=string,meta=optional(string, "hello")})`, true, cty.Object(map[string]cty.Type{ "name": cty.String, "meta": cty.String, }), `Optional attribute modifier expects only one argument: the attribute type.`, }, { `optional(string)`, false, cty.DynamicPseudoType, `Keyword "optional" is valid only as a modifier for object type attributes.`, }, { `optional`, false, cty.DynamicPseudoType, `The keyword "optional" is not a valid type specification.`, }, } for _, test := range tests { t.Run(fmt.Sprintf("%s (constraint=%v)", test.Source, test.Constraint), func(t *testing.T) { expr, diags := hclsyntax.ParseExpression([]byte(test.Source), "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatalf("failed to parse: %s", diags) } got, _, diags := getType(expr, test.Constraint, false) if test.WantError == "" { for _, diag := range diags { t.Error(diag) } } else { found := false for _, diag := range diags { t.Log(diag) if diag.Severity == hcl.DiagError && diag.Detail == test.WantError { found = true } } if !found { t.Errorf("missing expected error detail message: %s", test.WantError) } } if !got.Equals(test.Want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.Want) } }) } } func TestGetTypeJSON(t *testing.T) { // We have fewer test cases here because we're mainly exercising the // extra indirection in the JSON syntax package, which ultimately calls // into the native syntax parser (which we tested extensively in // TestGetType). tests := []struct { Source string Constraint bool Want cty.Type WantError string }{ { `{"expr":"bool"}`, false, cty.Bool, "", }, { `{"expr":"list(bool)"}`, false, cty.List(cty.Bool), "", }, { `{"expr":"list"}`, false, cty.DynamicPseudoType, "The list type constructor requires one argument specifying the element type.", }, } for _, test := range tests { t.Run(test.Source, func(t *testing.T) { file, diags := json.Parse([]byte(test.Source), "") if diags.HasErrors() { t.Fatalf("failed to parse: %s", diags) } type TestContent struct { Expr hcl.Expression `hcl:"expr"` } var content TestContent diags = gohcl.DecodeBody(file.Body, nil, &content) if diags.HasErrors() { t.Fatalf("failed to decode: %s", diags) } got, _, diags := getType(content.Expr, test.Constraint, false) if test.WantError == "" { for _, diag := range diags { t.Error(diag) } } else { found := false for _, diag := range diags { t.Log(diag) if diag.Severity == hcl.DiagError && diag.Detail == test.WantError { found = true } } if !found { t.Errorf("missing expected error detail message: %s", test.WantError) } } if !got.Equals(test.Want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.Want) } }) } } func TestGetTypeDefaults(t *testing.T) { tests := []struct { Source string Want *Defaults WantError string }{ // primitive types have nil defaults { `bool`, nil, "", }, { `number`, nil, "", }, { `string`, nil, "", }, { `any`, nil, "", }, // complex structures with no defaults have nil defaults { `map(string)`, nil, "", }, { `set(number)`, nil, "", }, { `tuple([number, string])`, nil, "", }, { `object({ a = string, b = number })`, nil, "", }, { `map(list(object({ a = string, b = optional(number) })))`, nil, "", }, // object optional attribute with defaults { `object({ a = string, b = optional(number, 5) })`, &Defaults{ Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"}), DefaultValues: map[string]cty.Value{ "b": cty.NumberIntVal(5), }, }, "", }, // nested defaults { `object({ a = optional(object({ b = optional(number, 5) }), {}) })`, &Defaults{ Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "b": cty.Number, }, []string{"b"}), }, []string{"a"}), DefaultValues: map[string]cty.Value{ "a": cty.ObjectVal(map[string]cty.Value{ "b": cty.NullVal(cty.Number), }), }, Children: map[string]*Defaults{ "a": { Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "b": cty.Number, }, []string{"b"}), DefaultValues: map[string]cty.Value{ "b": cty.NumberIntVal(5), }, }, }, }, "", }, // collections of objects with defaults { `map(object({ a = string, b = optional(number, 5) }))`, &Defaults{ Type: cty.Map(cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"})), Children: map[string]*Defaults{ "": { Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"}), DefaultValues: map[string]cty.Value{ "b": cty.NumberIntVal(5), }, }, }, }, "", }, { `list(object({ a = string, b = optional(number, 5) }))`, &Defaults{ Type: cty.List(cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"})), Children: map[string]*Defaults{ "": { Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"}), DefaultValues: map[string]cty.Value{ "b": cty.NumberIntVal(5), }, }, }, }, "", }, { `set(object({ a = string, b = optional(number, 5) }))`, &Defaults{ Type: cty.Set(cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"})), Children: map[string]*Defaults{ "": { Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"}), DefaultValues: map[string]cty.Value{ "b": cty.NumberIntVal(5), }, }, }, }, "", }, // tuples containing objects with defaults work differently from // collections { `tuple([string, bool, object({ a = string, b = optional(number, 5) })])`, &Defaults{ Type: cty.Tuple([]cty.Type{ cty.String, cty.Bool, cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"}), }), Children: map[string]*Defaults{ "2": { Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"b"}), DefaultValues: map[string]cty.Value{ "b": cty.NumberIntVal(5), }, }, }, }, "", }, // incompatible default value causes an error { `object({ a = optional(string, "hello"), b = optional(number, true) })`, &Defaults{ Type: cty.ObjectWithOptionalAttrs(map[string]cty.Type{ "a": cty.String, "b": cty.Number, }, []string{"a", "b"}), DefaultValues: map[string]cty.Value{ "a": cty.StringVal("hello"), }, }, "This default value is not compatible with the attribute's type constraint: number required.", }, // Too many arguments { `object({name=string,meta=optional(string, "hello", "world")})`, nil, `Optional attribute modifier expects at most two arguments: the attribute type, and a default value.`, }, } for _, test := range tests { t.Run(test.Source, func(t *testing.T) { expr, diags := hclsyntax.ParseExpression([]byte(test.Source), "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatalf("failed to parse: %s", diags) } _, got, diags := getType(expr, true, true) if test.WantError == "" { for _, diag := range diags { t.Error(diag) } } else { found := false for _, diag := range diags { t.Log(diag) if diag.Severity == hcl.DiagError && diag.Detail == test.WantError { found = true } } if !found { t.Errorf("missing expected error detail message: %s", test.WantError) } } if !cmp.Equal(test.Want, got, valueComparer, typeComparer) { t.Errorf("wrong result\n%s", cmp.Diff(test.Want, got, valueComparer, typeComparer)) } }) } } hcl-2.14.1/ext/typeexpr/public.go000066400000000000000000000104171431334125700166400ustar00rootroot00000000000000package typeexpr import ( "bytes" "fmt" "sort" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // Type attempts to process the given expression as a type expression and, if // successful, returns the resulting type. If unsuccessful, error diagnostics // are returned. func Type(expr hcl.Expression) (cty.Type, hcl.Diagnostics) { ty, _, diags := getType(expr, false, false) return ty, diags } // TypeConstraint attempts to parse the given expression as a type constraint // and, if successful, returns the resulting type. If unsuccessful, error // diagnostics are returned. // // A type constraint has the same structure as a type, but it additionally // allows the keyword "any" to represent cty.DynamicPseudoType, which is often // used as a wildcard in type checking and type conversion operations. func TypeConstraint(expr hcl.Expression) (cty.Type, hcl.Diagnostics) { ty, _, diags := getType(expr, true, false) return ty, diags } // TypeConstraintWithDefaults attempts to parse the given expression as a type // constraint which may include default values for object attributes. If // successful both the resulting type and corresponding defaults are returned. // If unsuccessful, error diagnostics are returned. // // When using this function, defaults should be applied to the input value // before type conversion, to ensure that objects with missing attributes have // default values populated. func TypeConstraintWithDefaults(expr hcl.Expression) (cty.Type, *Defaults, hcl.Diagnostics) { return getType(expr, true, true) } // TypeString returns a string rendering of the given type as it would be // expected to appear in the HCL native syntax. // // This is primarily intended for showing types to the user in an application // that uses typexpr, where the user can be assumed to be familiar with the // type expression syntax. In applications that do not use typeexpr these // results may be confusing to the user and so type.FriendlyName may be // preferable, even though it's less precise. // // TypeString produces reasonable results only for types like what would be // produced by the Type and TypeConstraint functions. In particular, it cannot // support capsule types. func TypeString(ty cty.Type) string { // Easy cases first switch ty { case cty.String: return "string" case cty.Bool: return "bool" case cty.Number: return "number" case cty.DynamicPseudoType: return "any" } if ty.IsCapsuleType() { panic("TypeString does not support capsule types") } if ty.IsCollectionType() { ety := ty.ElementType() etyString := TypeString(ety) switch { case ty.IsListType(): return fmt.Sprintf("list(%s)", etyString) case ty.IsSetType(): return fmt.Sprintf("set(%s)", etyString) case ty.IsMapType(): return fmt.Sprintf("map(%s)", etyString) default: // Should never happen because the above is exhaustive panic("unsupported collection type") } } if ty.IsObjectType() { var buf bytes.Buffer buf.WriteString("object({") atys := ty.AttributeTypes() names := make([]string, 0, len(atys)) for name := range atys { names = append(names, name) } sort.Strings(names) first := true for _, name := range names { aty := atys[name] if !first { buf.WriteByte(',') } if !hclsyntax.ValidIdentifier(name) { // Should never happen for any type produced by this package, // but we'll do something reasonable here just so we don't // produce garbage if someone gives us a hand-assembled object // type that has weird attribute names. // Using Go-style quoting here isn't perfect, since it doesn't // exactly match HCL syntax, but it's fine for an edge-case. buf.WriteString(fmt.Sprintf("%q", name)) } else { buf.WriteString(name) } buf.WriteByte('=') buf.WriteString(TypeString(aty)) first = false } buf.WriteString("})") return buf.String() } if ty.IsTupleType() { var buf bytes.Buffer buf.WriteString("tuple([") etys := ty.TupleElementTypes() first := true for _, ety := range etys { if !first { buf.WriteByte(',') } buf.WriteString(TypeString(ety)) first = false } buf.WriteString("])") return buf.String() } // Should never happen because we covered all cases above. panic(fmt.Errorf("unsupported type %#v", ty)) } hcl-2.14.1/ext/typeexpr/type_string_test.go000066400000000000000000000037641431334125700207770ustar00rootroot00000000000000package typeexpr import ( "testing" "github.com/zclconf/go-cty/cty" ) func TestTypeString(t *testing.T) { tests := []struct { Type cty.Type Want string }{ { cty.DynamicPseudoType, "any", }, { cty.String, "string", }, { cty.Number, "number", }, { cty.Bool, "bool", }, { cty.List(cty.Number), "list(number)", }, { cty.Set(cty.Bool), "set(bool)", }, { cty.Map(cty.String), "map(string)", }, { cty.EmptyObject, "object({})", }, { cty.Object(map[string]cty.Type{"foo": cty.Bool}), "object({foo=bool})", }, { cty.Object(map[string]cty.Type{"foo": cty.Bool, "bar": cty.String}), "object({bar=string,foo=bool})", }, { cty.EmptyTuple, "tuple([])", }, { cty.Tuple([]cty.Type{cty.Bool}), "tuple([bool])", }, { cty.Tuple([]cty.Type{cty.Bool, cty.String}), "tuple([bool,string])", }, { cty.List(cty.DynamicPseudoType), "list(any)", }, { cty.Tuple([]cty.Type{cty.DynamicPseudoType}), "tuple([any])", }, { cty.Object(map[string]cty.Type{"foo": cty.DynamicPseudoType}), "object({foo=any})", }, { // We don't expect to find attributes that aren't valid identifiers // because we only promise to support types that this package // would've created, but we allow this situation during rendering // just because it's convenient for applications trying to produce // error messages about mismatched types. Note that the quoted // attribute name is not actually accepted by our Type and // TypeConstraint functions, so this is one situation where the // TypeString result cannot be re-parsed by those functions. cty.Object(map[string]cty.Type{"foo bar baz": cty.String}), `object({"foo bar baz"=string})`, }, } for _, test := range tests { t.Run(test.Type.GoString(), func(t *testing.T) { got := TypeString(test.Type) if got != test.Want { t.Errorf("wrong result\ntype: %#v\ngot: %s\nwant: %s", test.Type, got, test.Want) } }) } } hcl-2.14.1/ext/typeexpr/type_type.go000066400000000000000000000071061431334125700174050ustar00rootroot00000000000000package typeexpr import ( "fmt" "reflect" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/function" ) // TypeConstraintType is a cty capsule type that allows cty type constraints to // be used as values. // // If TypeConstraintType is used in a context supporting the // customdecode.CustomExpressionDecoder extension then it will implement // expression decoding using the TypeConstraint function, thus allowing // type expressions to be used in contexts where value expressions might // normally be expected, such as in arguments to function calls. var TypeConstraintType cty.Type // TypeConstraintVal constructs a cty.Value whose type is // TypeConstraintType. func TypeConstraintVal(ty cty.Type) cty.Value { return cty.CapsuleVal(TypeConstraintType, &ty) } // TypeConstraintFromVal extracts the type from a cty.Value of // TypeConstraintType that was previously constructed using TypeConstraintVal. // // If the given value isn't a known, non-null value of TypeConstraintType // then this function will panic. func TypeConstraintFromVal(v cty.Value) cty.Type { if !v.Type().Equals(TypeConstraintType) { panic("value is not of TypeConstraintType") } ptr := v.EncapsulatedValue().(*cty.Type) return *ptr } // ConvertFunc is a cty function that implements type conversions. // // Its signature is as follows: // convert(value, type_constraint) // // ...where type_constraint is a type constraint expression as defined by // typeexpr.TypeConstraint. // // It relies on HCL's customdecode extension and so it's not suitable for use // in non-HCL contexts or if you are using a HCL syntax implementation that // does not support customdecode for function arguments. However, it _is_ // supported for function calls in the HCL native expression syntax. var ConvertFunc function.Function func init() { TypeConstraintType = cty.CapsuleWithOps("type constraint", reflect.TypeOf(cty.Type{}), &cty.CapsuleOps{ ExtensionData: func(key interface{}) interface{} { switch key { case customdecode.CustomExpressionDecoder: return customdecode.CustomExpressionDecoderFunc( func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { ty, diags := TypeConstraint(expr) if diags.HasErrors() { return cty.NilVal, diags } return TypeConstraintVal(ty), nil }, ) default: return nil } }, TypeGoString: func(_ reflect.Type) string { return "typeexpr.TypeConstraintType" }, GoString: func(raw interface{}) string { tyPtr := raw.(*cty.Type) return fmt.Sprintf("typeexpr.TypeConstraintVal(%#v)", *tyPtr) }, RawEquals: func(a, b interface{}) bool { aPtr := a.(*cty.Type) bPtr := b.(*cty.Type) return (*aPtr).Equals(*bPtr) }, }) ConvertFunc = function.New(&function.Spec{ Params: []function.Parameter{ { Name: "value", Type: cty.DynamicPseudoType, AllowNull: true, AllowDynamicType: true, }, { Name: "type", Type: TypeConstraintType, }, }, Type: func(args []cty.Value) (cty.Type, error) { wantTypePtr := args[1].EncapsulatedValue().(*cty.Type) got, err := convert.Convert(args[0], *wantTypePtr) if err != nil { return cty.NilType, function.NewArgError(0, err) } return got.Type(), nil }, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { v, err := convert.Convert(args[0], retType) if err != nil { return cty.NilVal, function.NewArgError(0, err) } return v, nil }, }) } hcl-2.14.1/ext/typeexpr/type_type_test.go000066400000000000000000000057021431334125700204440ustar00rootroot00000000000000package typeexpr import ( "fmt" "testing" "github.com/zclconf/go-cty/cty" ) func TestTypeConstraintType(t *testing.T) { tyVal1 := TypeConstraintVal(cty.String) tyVal2 := TypeConstraintVal(cty.String) tyVal3 := TypeConstraintVal(cty.Number) if !tyVal1.RawEquals(tyVal2) { t.Errorf("tyVal1 not equal to tyVal2\ntyVal1: %#v\ntyVal2: %#v", tyVal1, tyVal2) } if tyVal1.RawEquals(tyVal3) { t.Errorf("tyVal1 equal to tyVal2, but should not be\ntyVal1: %#v\ntyVal3: %#v", tyVal1, tyVal3) } if got, want := TypeConstraintFromVal(tyVal1), cty.String; !got.Equals(want) { t.Errorf("wrong type extracted from tyVal1\ngot: %#v\nwant: %#v", got, want) } if got, want := TypeConstraintFromVal(tyVal3), cty.Number; !got.Equals(want) { t.Errorf("wrong type extracted from tyVal3\ngot: %#v\nwant: %#v", got, want) } } func TestConvertFunc(t *testing.T) { // This is testing the convert function directly, skipping over the HCL // parsing and evaluation steps that would normally lead there. There is // another test in the "integrationtest" package called TestTypeConvertFunc // that exercises the full path to this function via the hclsyntax parser. tests := []struct { val, ty cty.Value want cty.Value wantErr string }{ // The goal here is not an exhaustive set of conversions, since that's // already covered in cty/convert, but rather exercising different // permutations of success and failure to make sure the function // handles all of the results in a reasonable way. { cty.StringVal("hello"), TypeConstraintVal(cty.String), cty.StringVal("hello"), ``, }, { cty.True, TypeConstraintVal(cty.String), cty.StringVal("true"), ``, }, { cty.StringVal("hello"), TypeConstraintVal(cty.Bool), cty.NilVal, `a bool is required`, }, { cty.UnknownVal(cty.Bool), TypeConstraintVal(cty.Bool), cty.UnknownVal(cty.Bool), ``, }, { cty.DynamicVal, TypeConstraintVal(cty.Bool), cty.UnknownVal(cty.Bool), ``, }, { cty.NullVal(cty.Bool), TypeConstraintVal(cty.Bool), cty.NullVal(cty.Bool), ``, }, { cty.NullVal(cty.DynamicPseudoType), TypeConstraintVal(cty.Bool), cty.NullVal(cty.Bool), ``, }, { cty.StringVal("hello").Mark(1), TypeConstraintVal(cty.String), cty.StringVal("hello").Mark(1), ``, }, } for _, test := range tests { t.Run(fmt.Sprintf("%#v to %#v", test.val, test.ty), func(t *testing.T) { got, err := ConvertFunc.Call([]cty.Value{test.val, test.ty}) if err != nil { if test.wantErr != "" { if got, want := err.Error(), test.wantErr; got != want { t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) } } else { t.Errorf("unexpected error\ngot: %s\nwant: ", err) } return } if test.wantErr != "" { t.Errorf("wrong error\ngot: \nwant: %s", test.wantErr) } if !test.want.RawEquals(got) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } hcl-2.14.1/ext/userfunc/000077500000000000000000000000001431334125700150025ustar00rootroot00000000000000hcl-2.14.1/ext/userfunc/README.md000066400000000000000000000015471431334125700162700ustar00rootroot00000000000000# HCL User Functions Extension This HCL extension allows a calling application to support user-defined functions. Functions are defined via a specific block type, like this: ```hcl function "add" { params = [a, b] result = a + b } function "list" { params = [] variadic_param = items result = items } ``` The extension is implemented as a pre-processor for `cty.Body` objects. Given a body that may contain functions, the `DecodeUserFunctions` function searches for blocks that define functions and returns a functions map suitable for inclusion in a `hcl.EvalContext`. It also returns a new `cty.Body` that contains the remainder of the content from the given body, allowing for further processing of remaining content. For more information, see [the godoc reference](https://pkg.go.dev/github.com/hashicorp/hcl/v2/ext/userfunc?tab=doc). hcl-2.14.1/ext/userfunc/decode.go000066400000000000000000000077661431334125700165740ustar00rootroot00000000000000package userfunc import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) var funcBodySchema = &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "params", Required: true, }, { Name: "variadic_param", Required: false, }, { Name: "result", Required: true, }, }, } func decodeUserFunctions(body hcl.Body, blockType string, contextFunc ContextFunc) (funcs map[string]function.Function, remain hcl.Body, diags hcl.Diagnostics) { schema := &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: blockType, LabelNames: []string{"name"}, }, }, } content, remain, diags := body.PartialContent(schema) if diags.HasErrors() { return nil, remain, diags } // first call to getBaseCtx will populate context, and then the same // context will be used for all subsequent calls. It's assumed that // all functions in a given body should see an identical context. var baseCtx *hcl.EvalContext getBaseCtx := func() *hcl.EvalContext { if baseCtx == nil { if contextFunc != nil { baseCtx = contextFunc() } } // baseCtx might still be nil here, and that's okay return baseCtx } funcs = make(map[string]function.Function) Blocks: for _, block := range content.Blocks { name := block.Labels[0] funcContent, funcDiags := block.Body.Content(funcBodySchema) diags = append(diags, funcDiags...) if funcDiags.HasErrors() { continue } paramsExpr := funcContent.Attributes["params"].Expr resultExpr := funcContent.Attributes["result"].Expr var varParamExpr hcl.Expression if funcContent.Attributes["variadic_param"] != nil { varParamExpr = funcContent.Attributes["variadic_param"].Expr } var params []string var varParam string paramExprs, paramsDiags := hcl.ExprList(paramsExpr) diags = append(diags, paramsDiags...) if paramsDiags.HasErrors() { continue } for _, paramExpr := range paramExprs { param := hcl.ExprAsKeyword(paramExpr) if param == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid param element", Detail: "Each parameter name must be an identifier.", Subject: paramExpr.Range().Ptr(), }) continue Blocks } params = append(params, param) } if varParamExpr != nil { varParam = hcl.ExprAsKeyword(varParamExpr) if varParam == "" { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid variadic_param", Detail: "The variadic parameter name must be an identifier.", Subject: varParamExpr.Range().Ptr(), }) continue } } spec := &function.Spec{} for _, paramName := range params { spec.Params = append(spec.Params, function.Parameter{ Name: paramName, Type: cty.DynamicPseudoType, }) } if varParamExpr != nil { spec.VarParam = &function.Parameter{ Name: varParam, Type: cty.DynamicPseudoType, } } impl := func(args []cty.Value) (cty.Value, error) { ctx := getBaseCtx() ctx = ctx.NewChild() ctx.Variables = make(map[string]cty.Value) // The cty function machinery guarantees that we have at least // enough args to fill all of our params. for i, paramName := range params { ctx.Variables[paramName] = args[i] } if spec.VarParam != nil { varArgs := args[len(params):] ctx.Variables[varParam] = cty.TupleVal(varArgs) } result, diags := resultExpr.Value(ctx) if diags.HasErrors() { // Smuggle the diagnostics out via the error channel, since // a diagnostics sequence implements error. Caller can // type-assert this to recover the individual diagnostics // if desired. return cty.DynamicVal, diags } return result, nil } spec.Type = func(args []cty.Value) (cty.Type, error) { val, err := impl(args) return val.Type(), err } spec.Impl = func(args []cty.Value, retType cty.Type) (cty.Value, error) { return impl(args) } funcs[name] = function.New(spec) } return funcs, remain, diags } hcl-2.14.1/ext/userfunc/decode_test.go000066400000000000000000000056751431334125700176300ustar00rootroot00000000000000package userfunc import ( "fmt" "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) func TestDecodeUserFunctions(t *testing.T) { tests := []struct { src string testExpr string baseCtx *hcl.EvalContext want cty.Value diagCount int }{ { ` function "greet" { params = [name] result = "Hello, ${name}." } `, `greet("Ermintrude")`, nil, cty.StringVal("Hello, Ermintrude."), 0, }, { ` function "greet" { params = [name] result = "Hello, ${name}." } `, `greet()`, nil, cty.DynamicVal, 1, // missing value for "name" }, { ` function "greet" { params = [name] result = "Hello, ${name}." } `, `greet("Ermintrude", "extra")`, nil, cty.DynamicVal, 1, // too many arguments }, { ` function "add" { params = [a, b] result = a + b } `, `add(1, 5)`, nil, cty.NumberIntVal(6), 0, }, { ` function "argstuple" { params = [] variadic_param = args result = args } `, `argstuple("a", true, 1)`, nil, cty.TupleVal([]cty.Value{cty.StringVal("a"), cty.True, cty.NumberIntVal(1)}), 0, }, { ` function "missing_var" { params = [] result = nonexist } `, `missing_var()`, nil, cty.DynamicVal, 1, // no variable named "nonexist" }, { ` function "closure" { params = [] result = upvalue } `, `closure()`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "upvalue": cty.True, }, }, cty.True, 0, }, { ` function "neg" { params = [val] result = -val } function "add" { params = [a, b] result = a + b } `, `neg(add(1, 3))`, nil, cty.NumberIntVal(-4), 0, }, { ` function "neg" { parrams = [val] result = -val } `, `null`, nil, cty.NullVal(cty.DynamicPseudoType), 2, // missing attribute "params", and unknown attribute "parrams" }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { f, diags := hclsyntax.ParseConfig([]byte(test.src), "config", hcl.Pos{Line: 1, Column: 1}) if f == nil || f.Body == nil { t.Fatalf("got nil file or body") } funcs, _, funcsDiags := decodeUserFunctions(f.Body, "function", func() *hcl.EvalContext { return test.baseCtx }) diags = append(diags, funcsDiags...) expr, exprParseDiags := hclsyntax.ParseExpression([]byte(test.testExpr), "testexpr", hcl.Pos{Line: 1, Column: 1}) diags = append(diags, exprParseDiags...) if expr == nil { t.Fatalf("parsing test expr returned nil") } got, exprDiags := expr.Value(&hcl.EvalContext{ Functions: funcs, }) diags = append(diags, exprDiags...) if len(diags) != test.diagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf("- %s", diag) } } if !got.RawEquals(test.want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } hcl-2.14.1/ext/userfunc/doc.go000066400000000000000000000015201431334125700160740ustar00rootroot00000000000000// Package userfunc implements a HCL extension that allows user-defined // functions in HCL configuration. // // Using this extension requires some integration effort on the part of the // calling application, to pass any declared functions into a HCL evaluation // context after processing. // // The function declaration syntax looks like this: // // function "foo" { // params = ["name"] // result = "Hello, ${name}!" // } // // When a user-defined function is called, the expression given for the "result" // attribute is evaluated in an isolated evaluation context that defines variables // named after the given parameter names. // // The block name "function" may be overridden by the calling application, if // that default name conflicts with an existing block or attribute name in // the application. package userfunc hcl-2.14.1/ext/userfunc/public.go000066400000000000000000000037631431334125700166200ustar00rootroot00000000000000package userfunc import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty/function" ) // A ContextFunc is a callback used to produce the base EvalContext for // running a particular set of functions. // // This is a function rather than an EvalContext directly to allow functions // to be decoded before their context is complete. This will be true, for // example, for applications that wish to allow functions to refer to themselves. // // The simplest use of a ContextFunc is to give user functions access to the // same global variables and functions available elsewhere in an application's // configuration language, but more complex applications may use different // contexts to support lexical scoping depending on where in a configuration // structure a function declaration is found, etc. type ContextFunc func() *hcl.EvalContext // DecodeUserFunctions looks for blocks of the given type in the given body // and, for each one found, interprets it as a custom function definition. // // On success, the result is a mapping of function names to implementations, // along with a new body that represents the remaining content of the given // body which can be used for further processing. // // The result expression of each function is parsed during decoding but not // evaluated until the function is called. // // If the given ContextFunc is non-nil, it will be called to obtain the // context in which the function result expressions will be evaluated. If nil, // or if it returns nil, the result expression will have access only to // variables named after the declared parameters. A non-nil context turns // the returned functions into closures, bound to the given context. // // If the returned diagnostics set has errors then the function map and // remain body may be nil or incomplete. func DecodeUserFunctions(body hcl.Body, blockType string, context ContextFunc) (funcs map[string]function.Function, remain hcl.Body, diags hcl.Diagnostics) { return decodeUserFunctions(body, blockType, context) } hcl-2.14.1/go.mod000066400000000000000000000017731431334125700134660ustar00rootroot00000000000000module github.com/hashicorp/hcl/v2 go 1.18 require ( github.com/agext/levenshtein v1.2.1 github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 github.com/apparentlymart/go-textseg/v13 v13.0.0 github.com/davecgh/go-spew v1.1.1 github.com/go-test/deep v1.0.3 github.com/google/go-cmp v0.3.1 github.com/kr/pretty v0.1.0 github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 github.com/sergi/go-diff v1.0.0 github.com/spf13/pflag v1.0.2 github.com/zclconf/go-cty v1.8.0 github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167 ) require ( github.com/kr/text v0.1.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/stretchr/testify v1.2.2 // indirect golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 // indirect golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 // indirect golang.org/x/text v0.3.6 // indirect ) hcl-2.14.1/go.sum000066400000000000000000000142361431334125700135110ustar00rootroot00000000000000github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/spf13/pflag v1.0.2 h1:Fy0orTDgHdbnzHcsOgfCN4LtHf0ec3wwtiwJqwvf3Gc= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167 h1:O8uGbHCqlTp2P6QJSLmCojM4mN6UemYv8K+dCnmHmu0= golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 h1:SrN+KX8Art/Sf4HNj6Zcz06G7VEz+7w9tdXTPOZ7+l4= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= hcl-2.14.1/gohcl/000077500000000000000000000000001431334125700134445ustar00rootroot00000000000000hcl-2.14.1/gohcl/decode.go000066400000000000000000000214651431334125700152260ustar00rootroot00000000000000package gohcl import ( "fmt" "reflect" "github.com/zclconf/go-cty/cty" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/gocty" ) // DecodeBody extracts the configuration within the given body into the given // value. This value must be a non-nil pointer to either a struct or // a map, where in the former case the configuration will be decoded using // struct tags and in the latter case only attributes are allowed and their // values are decoded into the map. // // The given EvalContext is used to resolve any variables or functions in // expressions encountered while decoding. This may be nil to require only // constant values, for simple applications that do not support variables or // functions. // // The returned diagnostics should be inspected with its HasErrors method to // determine if the populated value is valid and complete. If error diagnostics // are returned then the given value may have been partially-populated but // may still be accessed by a careful caller for static analysis and editor // integration use-cases. func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { rv := reflect.ValueOf(val) if rv.Kind() != reflect.Ptr { panic(fmt.Sprintf("target value must be a pointer, not %s", rv.Type().String())) } return decodeBodyToValue(body, ctx, rv.Elem()) } func decodeBodyToValue(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics { et := val.Type() switch et.Kind() { case reflect.Struct: return decodeBodyToStruct(body, ctx, val) case reflect.Map: return decodeBodyToMap(body, ctx, val) default: panic(fmt.Sprintf("target value must be pointer to struct or map, not %s", et.String())) } } func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics { schema, partial := ImpliedBodySchema(val.Interface()) var content *hcl.BodyContent var leftovers hcl.Body var diags hcl.Diagnostics if partial { content, leftovers, diags = body.PartialContent(schema) } else { content, diags = body.Content(schema) } if content == nil { return diags } tags := getFieldTags(val.Type()) if tags.Body != nil { fieldIdx := *tags.Body field := val.Type().Field(fieldIdx) fieldV := val.Field(fieldIdx) switch { case bodyType.AssignableTo(field.Type): fieldV.Set(reflect.ValueOf(body)) default: diags = append(diags, decodeBodyToValue(body, ctx, fieldV)...) } } if tags.Remain != nil { fieldIdx := *tags.Remain field := val.Type().Field(fieldIdx) fieldV := val.Field(fieldIdx) switch { case bodyType.AssignableTo(field.Type): fieldV.Set(reflect.ValueOf(leftovers)) case attrsType.AssignableTo(field.Type): attrs, attrsDiags := leftovers.JustAttributes() if len(attrsDiags) > 0 { diags = append(diags, attrsDiags...) } fieldV.Set(reflect.ValueOf(attrs)) default: diags = append(diags, decodeBodyToValue(leftovers, ctx, fieldV)...) } } for name, fieldIdx := range tags.Attributes { attr := content.Attributes[name] field := val.Type().Field(fieldIdx) fieldV := val.Field(fieldIdx) if attr == nil { if !exprType.AssignableTo(field.Type) { continue } // As a special case, if the target is of type hcl.Expression then // we'll assign an actual expression that evalues to a cty null, // so the caller can deal with it within the cty realm rather // than within the Go realm. synthExpr := hcl.StaticExpr(cty.NullVal(cty.DynamicPseudoType), body.MissingItemRange()) fieldV.Set(reflect.ValueOf(synthExpr)) continue } switch { case attrType.AssignableTo(field.Type): fieldV.Set(reflect.ValueOf(attr)) case exprType.AssignableTo(field.Type): fieldV.Set(reflect.ValueOf(attr.Expr)) default: diags = append(diags, DecodeExpression( attr.Expr, ctx, fieldV.Addr().Interface(), )...) } } blocksByType := content.Blocks.ByType() for typeName, fieldIdx := range tags.Blocks { blocks := blocksByType[typeName] field := val.Type().Field(fieldIdx) ty := field.Type isSlice := false isPtr := false if ty.Kind() == reflect.Slice { isSlice = true ty = ty.Elem() } if ty.Kind() == reflect.Ptr { isPtr = true ty = ty.Elem() } if len(blocks) > 1 && !isSlice { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Duplicate %s block", typeName), Detail: fmt.Sprintf( "Only one %s block is allowed. Another was defined at %s.", typeName, blocks[0].DefRange.String(), ), Subject: &blocks[1].DefRange, }) continue } if len(blocks) == 0 { if isSlice || isPtr { if val.Field(fieldIdx).IsNil() { val.Field(fieldIdx).Set(reflect.Zero(field.Type)) } } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Missing %s block", typeName), Detail: fmt.Sprintf("A %s block is required.", typeName), Subject: body.MissingItemRange().Ptr(), }) } continue } switch { case isSlice: elemType := ty if isPtr { elemType = reflect.PtrTo(ty) } sli := val.Field(fieldIdx) if sli.IsNil() { sli = reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks)) } for i, block := range blocks { if isPtr { if i >= sli.Len() { sli = reflect.Append(sli, reflect.New(ty)) } v := sli.Index(i) if v.IsNil() { v = reflect.New(ty) } diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...) sli.Index(i).Set(v) } else { if i >= sli.Len() { sli = reflect.Append(sli, reflect.Indirect(reflect.New(ty))) } diags = append(diags, decodeBlockToValue(block, ctx, sli.Index(i))...) } } if sli.Len() > len(blocks) { sli.SetLen(len(blocks)) } val.Field(fieldIdx).Set(sli) default: block := blocks[0] if isPtr { v := val.Field(fieldIdx) if v.IsNil() { v = reflect.New(ty) } diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...) val.Field(fieldIdx).Set(v) } else { diags = append(diags, decodeBlockToValue(block, ctx, val.Field(fieldIdx))...) } } } return diags } func decodeBodyToMap(body hcl.Body, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics { attrs, diags := body.JustAttributes() if attrs == nil { return diags } mv := reflect.MakeMap(v.Type()) for k, attr := range attrs { switch { case attrType.AssignableTo(v.Type().Elem()): mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr)) case exprType.AssignableTo(v.Type().Elem()): mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr.Expr)) default: ev := reflect.New(v.Type().Elem()) diags = append(diags, DecodeExpression(attr.Expr, ctx, ev.Interface())...) mv.SetMapIndex(reflect.ValueOf(k), ev.Elem()) } } v.Set(mv) return diags } func decodeBlockToValue(block *hcl.Block, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics { diags := decodeBodyToValue(block.Body, ctx, v) if len(block.Labels) > 0 { blockTags := getFieldTags(v.Type()) for li, lv := range block.Labels { lfieldIdx := blockTags.Labels[li].FieldIndex v.Field(lfieldIdx).Set(reflect.ValueOf(lv)) } } return diags } // DecodeExpression extracts the value of the given expression into the given // value. This value must be something that gocty is able to decode into, // since the final decoding is delegated to that package. // // The given EvalContext is used to resolve any variables or functions in // expressions encountered while decoding. This may be nil to require only // constant values, for simple applications that do not support variables or // functions. // // The returned diagnostics should be inspected with its HasErrors method to // determine if the populated value is valid and complete. If error diagnostics // are returned then the given value may have been partially-populated but // may still be accessed by a careful caller for static analysis and editor // integration use-cases. func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { srcVal, diags := expr.Value(ctx) convTy, err := gocty.ImpliedType(val) if err != nil { panic(fmt.Sprintf("unsuitable DecodeExpression target: %s", err)) } srcVal, err = convert.Convert(srcVal, convTy) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsuitable value type", Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()), Subject: expr.StartRange().Ptr(), Context: expr.Range().Ptr(), }) return diags } err = gocty.FromCtyValue(srcVal, val) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsuitable value type", Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()), Subject: expr.StartRange().Ptr(), Context: expr.Range().Ptr(), }) } return diags } hcl-2.14.1/gohcl/decode_test.go000066400000000000000000000417131431334125700162630ustar00rootroot00000000000000package gohcl import ( "encoding/json" "fmt" "reflect" "testing" "github.com/davecgh/go-spew/spew" "github.com/hashicorp/hcl/v2" hclJSON "github.com/hashicorp/hcl/v2/json" "github.com/zclconf/go-cty/cty" ) func TestDecodeBody(t *testing.T) { deepEquals := func(other interface{}) func(v interface{}) bool { return func(v interface{}) bool { return reflect.DeepEqual(v, other) } } type withNameExpression struct { Name hcl.Expression `hcl:"name"` } type withTwoAttributes struct { A string `hcl:"a,optional"` B string `hcl:"b,optional"` } type withNestedBlock struct { Plain string `hcl:"plain,optional"` Nested *withTwoAttributes `hcl:"nested,block"` } type withListofNestedBlocks struct { Nested []*withTwoAttributes `hcl:"nested,block"` } type withListofNestedBlocksNoPointers struct { Nested []withTwoAttributes `hcl:"nested,block"` } tests := []struct { Body map[string]interface{} Target func() interface{} Check func(v interface{}) bool DiagCount int }{ { map[string]interface{}{}, makeInstantiateType(struct{}{}), deepEquals(struct{}{}), 0, }, { map[string]interface{}{}, makeInstantiateType(struct { Name string `hcl:"name"` }{}), deepEquals(struct { Name string `hcl:"name"` }{}), 1, // name is required }, { map[string]interface{}{}, makeInstantiateType(struct { Name *string `hcl:"name"` }{}), deepEquals(struct { Name *string `hcl:"name"` }{}), 0, }, // name nil { map[string]interface{}{}, makeInstantiateType(struct { Name string `hcl:"name,optional"` }{}), deepEquals(struct { Name string `hcl:"name,optional"` }{}), 0, }, // name optional { map[string]interface{}{}, makeInstantiateType(withNameExpression{}), func(v interface{}) bool { if v == nil { return false } wne, valid := v.(withNameExpression) if !valid { return false } if wne.Name == nil { return false } nameVal, _ := wne.Name.Value(nil) if !nameVal.IsNull() { return false } return true }, 0, }, { map[string]interface{}{ "name": "Ermintrude", }, makeInstantiateType(withNameExpression{}), func(v interface{}) bool { if v == nil { return false } wne, valid := v.(withNameExpression) if !valid { return false } if wne.Name == nil { return false } nameVal, _ := wne.Name.Value(nil) if !nameVal.Equals(cty.StringVal("Ermintrude")).True() { return false } return true }, 0, }, { map[string]interface{}{ "name": "Ermintrude", }, makeInstantiateType(struct { Name string `hcl:"name"` }{}), deepEquals(struct { Name string `hcl:"name"` }{"Ermintrude"}), 0, }, { map[string]interface{}{ "name": "Ermintrude", "age": 23, }, makeInstantiateType(struct { Name string `hcl:"name"` }{}), deepEquals(struct { Name string `hcl:"name"` }{"Ermintrude"}), 1, // Extraneous "age" property }, { map[string]interface{}{ "name": "Ermintrude", "age": 50, }, makeInstantiateType(struct { Name string `hcl:"name"` Attrs hcl.Attributes `hcl:",remain"` }{}), func(gotI interface{}) bool { got := gotI.(struct { Name string `hcl:"name"` Attrs hcl.Attributes `hcl:",remain"` }) return got.Name == "Ermintrude" && len(got.Attrs) == 1 && got.Attrs["age"] != nil }, 0, }, { map[string]interface{}{ "name": "Ermintrude", "age": 50, }, makeInstantiateType(struct { Name string `hcl:"name"` Remain hcl.Body `hcl:",remain"` }{}), func(gotI interface{}) bool { got := gotI.(struct { Name string `hcl:"name"` Remain hcl.Body `hcl:",remain"` }) attrs, _ := got.Remain.JustAttributes() return got.Name == "Ermintrude" && len(attrs) == 1 && attrs["age"] != nil }, 0, }, { map[string]interface{}{ "name": "Ermintrude", "living": true, }, makeInstantiateType(struct { Name string `hcl:"name"` Remain map[string]cty.Value `hcl:",remain"` }{}), deepEquals(struct { Name string `hcl:"name"` Remain map[string]cty.Value `hcl:",remain"` }{ Name: "Ermintrude", Remain: map[string]cty.Value{ "living": cty.True, }, }), 0, }, { map[string]interface{}{ "name": "Ermintrude", "age": 50, }, makeInstantiateType(struct { Name string `hcl:"name"` Body hcl.Body `hcl:",body"` Remain hcl.Body `hcl:",remain"` }{}), func(gotI interface{}) bool { got := gotI.(struct { Name string `hcl:"name"` Body hcl.Body `hcl:",body"` Remain hcl.Body `hcl:",remain"` }) attrs, _ := got.Body.JustAttributes() return got.Name == "Ermintrude" && len(attrs) == 2 && attrs["name"] != nil && attrs["age"] != nil }, 0, }, { map[string]interface{}{ "noodle": map[string]interface{}{}, }, makeInstantiateType(struct { Noodle struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating no diagnostics is good enough for this one. return true }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{{}}, }, makeInstantiateType(struct { Noodle struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating no diagnostics is good enough for this one. return true }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{{}, {}}, }, makeInstantiateType(struct { Noodle struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating one diagnostic is good enough for this one. return true }, 1, }, { map[string]interface{}{}, makeInstantiateType(struct { Noodle struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating one diagnostic is good enough for this one. return true }, 1, }, { map[string]interface{}{ "noodle": []map[string]interface{}{}, }, makeInstantiateType(struct { Noodle struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating one diagnostic is good enough for this one. return true }, 1, }, { map[string]interface{}{ "noodle": map[string]interface{}{}, }, makeInstantiateType(struct { Noodle *struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { return gotI.(struct { Noodle *struct{} `hcl:"noodle,block"` }).Noodle != nil }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{{}}, }, makeInstantiateType(struct { Noodle *struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { return gotI.(struct { Noodle *struct{} `hcl:"noodle,block"` }).Noodle != nil }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{}, }, makeInstantiateType(struct { Noodle *struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { return gotI.(struct { Noodle *struct{} `hcl:"noodle,block"` }).Noodle == nil }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{{}, {}}, }, makeInstantiateType(struct { Noodle *struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating one diagnostic is good enough for this one. return true }, 1, }, { map[string]interface{}{ "noodle": []map[string]interface{}{}, }, makeInstantiateType(struct { Noodle []struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { noodle := gotI.(struct { Noodle []struct{} `hcl:"noodle,block"` }).Noodle return len(noodle) == 0 }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{{}}, }, makeInstantiateType(struct { Noodle []struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { noodle := gotI.(struct { Noodle []struct{} `hcl:"noodle,block"` }).Noodle return len(noodle) == 1 }, 0, }, { map[string]interface{}{ "noodle": []map[string]interface{}{{}, {}}, }, makeInstantiateType(struct { Noodle []struct{} `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { noodle := gotI.(struct { Noodle []struct{} `hcl:"noodle,block"` }).Noodle return len(noodle) == 2 }, 0, }, { map[string]interface{}{ "noodle": map[string]interface{}{}, }, makeInstantiateType(struct { Noodle struct { Name string `hcl:"name,label"` } `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // Generating two diagnostics is good enough for this one. // (one for the missing noodle block and the other for // the JSON serialization detecting the missing level of // heirarchy for the label.) return true }, 2, }, { map[string]interface{}{ "noodle": map[string]interface{}{ "foo_foo": map[string]interface{}{}, }, }, makeInstantiateType(struct { Noodle struct { Name string `hcl:"name,label"` } `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { noodle := gotI.(struct { Noodle struct { Name string `hcl:"name,label"` } `hcl:"noodle,block"` }).Noodle return noodle.Name == "foo_foo" }, 0, }, { map[string]interface{}{ "noodle": map[string]interface{}{ "foo_foo": map[string]interface{}{}, "bar_baz": map[string]interface{}{}, }, }, makeInstantiateType(struct { Noodle struct { Name string `hcl:"name,label"` } `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { // One diagnostic is enough for this one. return true }, 1, }, { map[string]interface{}{ "noodle": map[string]interface{}{ "foo_foo": map[string]interface{}{}, "bar_baz": map[string]interface{}{}, }, }, makeInstantiateType(struct { Noodles []struct { Name string `hcl:"name,label"` } `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { noodles := gotI.(struct { Noodles []struct { Name string `hcl:"name,label"` } `hcl:"noodle,block"` }).Noodles return len(noodles) == 2 && (noodles[0].Name == "foo_foo" || noodles[0].Name == "bar_baz") && (noodles[1].Name == "foo_foo" || noodles[1].Name == "bar_baz") && noodles[0].Name != noodles[1].Name }, 0, }, { map[string]interface{}{ "noodle": map[string]interface{}{ "foo_foo": map[string]interface{}{ "type": "rice", }, }, }, makeInstantiateType(struct { Noodle struct { Name string `hcl:"name,label"` Type string `hcl:"type"` } `hcl:"noodle,block"` }{}), func(gotI interface{}) bool { noodle := gotI.(struct { Noodle struct { Name string `hcl:"name,label"` Type string `hcl:"type"` } `hcl:"noodle,block"` }).Noodle return noodle.Name == "foo_foo" && noodle.Type == "rice" }, 0, }, { map[string]interface{}{ "name": "Ermintrude", "age": 34, }, makeInstantiateType(map[string]string(nil)), deepEquals(map[string]string{ "name": "Ermintrude", "age": "34", }), 0, }, { map[string]interface{}{ "name": "Ermintrude", "age": 89, }, makeInstantiateType(map[string]*hcl.Attribute(nil)), func(gotI interface{}) bool { got := gotI.(map[string]*hcl.Attribute) return len(got) == 2 && got["name"] != nil && got["age"] != nil }, 0, }, { map[string]interface{}{ "name": "Ermintrude", "age": 13, }, makeInstantiateType(map[string]hcl.Expression(nil)), func(gotI interface{}) bool { got := gotI.(map[string]hcl.Expression) return len(got) == 2 && got["name"] != nil && got["age"] != nil }, 0, }, { map[string]interface{}{ "name": "Ermintrude", "living": true, }, makeInstantiateType(map[string]cty.Value(nil)), deepEquals(map[string]cty.Value{ "name": cty.StringVal("Ermintrude"), "living": cty.True, }), 0, }, { // Retain "nested" block while decoding map[string]interface{}{ "plain": "foo", }, func() interface{} { return &withNestedBlock{ Plain: "bar", Nested: &withTwoAttributes{ A: "bar", }, } }, func(gotI interface{}) bool { foo := gotI.(withNestedBlock) return foo.Plain == "foo" && foo.Nested != nil && foo.Nested.A == "bar" }, 0, }, { // Retain values in "nested" block while decoding map[string]interface{}{ "nested": map[string]interface{}{ "a": "foo", }, }, func() interface{} { return &withNestedBlock{ Nested: &withTwoAttributes{ B: "bar", }, } }, func(gotI interface{}) bool { foo := gotI.(withNestedBlock) return foo.Nested.A == "foo" && foo.Nested.B == "bar" }, 0, }, { // Retain values in "nested" block list while decoding map[string]interface{}{ "nested": []map[string]interface{}{ { "a": "foo", }, }, }, func() interface{} { return &withListofNestedBlocks{ Nested: []*withTwoAttributes{ &withTwoAttributes{ B: "bar", }, }, } }, func(gotI interface{}) bool { n := gotI.(withListofNestedBlocks) return n.Nested[0].A == "foo" && n.Nested[0].B == "bar" }, 0, }, { // Remove additional elements from the list while decoding nested blocks map[string]interface{}{ "nested": []map[string]interface{}{ { "a": "foo", }, }, }, func() interface{} { return &withListofNestedBlocks{ Nested: []*withTwoAttributes{ &withTwoAttributes{ B: "bar", }, &withTwoAttributes{ B: "bar", }, }, } }, func(gotI interface{}) bool { n := gotI.(withListofNestedBlocks) return len(n.Nested) == 1 }, 0, }, { // Make sure decoding value slices works the same as pointer slices. map[string]interface{}{ "nested": []map[string]interface{}{ { "b": "bar", }, { "b": "baz", }, }, }, func() interface{} { return &withListofNestedBlocksNoPointers{ Nested: []withTwoAttributes{ { B: "foo", }, }, } }, func(gotI interface{}) bool { n := gotI.(withListofNestedBlocksNoPointers) return n.Nested[0].B == "bar" && len(n.Nested) == 2 }, 0, }, } for i, test := range tests { // For convenience here we're going to use the JSON parser // to process the given body. buf, err := json.Marshal(test.Body) if err != nil { t.Fatalf("error JSON-encoding body for test %d: %s", i, err) } t.Run(string(buf), func(t *testing.T) { file, diags := hclJSON.Parse(buf, "test.json") if len(diags) != 0 { t.Fatalf("diagnostics while parsing: %s", diags.Error()) } targetVal := reflect.ValueOf(test.Target()) diags = DecodeBody(file.Body, nil, targetVal.Interface()) if len(diags) != test.DiagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.DiagCount) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } got := targetVal.Elem().Interface() if !test.Check(got) { t.Errorf("wrong result\ngot: %s", spew.Sdump(got)) } }) } } func TestDecodeExpression(t *testing.T) { tests := []struct { Value cty.Value Target interface{} Want interface{} DiagCount int }{ { cty.StringVal("hello"), "", "hello", 0, }, { cty.StringVal("hello"), cty.NilVal, cty.StringVal("hello"), 0, }, { cty.NumberIntVal(2), "", "2", 0, }, { cty.StringVal("true"), false, true, 0, }, { cty.NullVal(cty.String), "", "", 1, // null value is not allowed }, { cty.UnknownVal(cty.String), "", "", 1, // value must be known }, { cty.ListVal([]cty.Value{cty.True}), false, false, 1, // bool required }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { expr := &fixedExpression{test.Value} targetVal := reflect.New(reflect.TypeOf(test.Target)) diags := DecodeExpression(expr, nil, targetVal.Interface()) if len(diags) != test.DiagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.DiagCount) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } got := targetVal.Elem().Interface() if !reflect.DeepEqual(got, test.Want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.Want) } }) } } type fixedExpression struct { val cty.Value } func (e *fixedExpression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return e.val, nil } func (e *fixedExpression) Range() (r hcl.Range) { return } func (e *fixedExpression) StartRange() (r hcl.Range) { return } func (e *fixedExpression) Variables() []hcl.Traversal { return nil } func makeInstantiateType(target interface{}) func() interface{} { return func() interface{} { return reflect.New(reflect.TypeOf(target)).Interface() } } hcl-2.14.1/gohcl/doc.go000066400000000000000000000064351431334125700145500ustar00rootroot00000000000000// Package gohcl allows decoding HCL configurations into Go data structures. // // It provides a convenient and concise way of describing the schema for // configuration and then accessing the resulting data via native Go // types. // // A struct field tag scheme is used, similar to other decoding and // unmarshalling libraries. The tags are formatted as in the following example: // // ThingType string `hcl:"thing_type,attr"` // // Within each tag there are two comma-separated tokens. The first is the // name of the corresponding construct in configuration, while the second // is a keyword giving the kind of construct expected. The following // kind keywords are supported: // // attr (the default) indicates that the value is to be populated from an attribute // block indicates that the value is to populated from a block // label indicates that the value is to populated from a block label // optional is the same as attr, but the field is optional // remain indicates that the value is to be populated from the remaining body after populating other fields // // "attr" fields may either be of type *hcl.Expression, in which case the raw // expression is assigned, or of any type accepted by gocty, in which case // gocty will be used to assign the value to a native Go type. // // "block" fields may be a struct that recursively uses the same tags, or a // slice of such structs, in which case multiple blocks of the corresponding // type are decoded into the slice. // // "body" can be placed on a single field of type hcl.Body to capture // the full hcl.Body that was decoded for a block. This does not allow leftover // values like "remain", so a decoding error will still be returned if leftover // fields are given. If you want to capture the decoding body PLUS leftover // fields, you must specify a "remain" field as well to prevent errors. The // body field and the remain field will both contain the leftover fields. // // "label" fields are considered only in a struct used as the type of a field // marked as "block", and are used sequentially to capture the labels of // the blocks being decoded. In this case, the name token is used only as // an identifier for the label in diagnostic messages. // // "optional" fields behave like "attr" fields, but they are optional // and will not give parsing errors if they are missing. // // "remain" can be placed on a single field that may be either of type // hcl.Body or hcl.Attributes, in which case any remaining body content is // placed into this field for delayed processing. If no "remain" field is // present then any attributes or blocks not matched by another valid tag // will cause an error diagnostic. // // Only a subset of this tagging/typing vocabulary is supported for the // "Encode" family of functions. See the EncodeIntoBody docs for full details // on the constraints there. // // Broadly-speaking this package deals with two types of error. The first is // errors in the configuration itself, which are returned as diagnostics // written with the configuration author as the target audience. The second // is bugs in the calling program, such as invalid struct tags, which are // surfaced via panics since there can be no useful runtime handling of such // errors and they should certainly not be returned to the user as diagnostics. package gohcl hcl-2.14.1/gohcl/encode.go000066400000000000000000000133321431334125700152320ustar00rootroot00000000000000package gohcl import ( "fmt" "reflect" "sort" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/zclconf/go-cty/cty/gocty" ) // EncodeIntoBody replaces the contents of the given hclwrite Body with // attributes and blocks derived from the given value, which must be a // struct value or a pointer to a struct value with the struct tags defined // in this package. // // This function can work only with fully-decoded data. It will ignore any // fields tagged as "remain", any fields that decode attributes into either // hcl.Attribute or hcl.Expression values, and any fields that decode blocks // into hcl.Attributes values. This function does not have enough information // to complete the decoding of these types. // // Any fields tagged as "label" are ignored by this function. Use EncodeAsBlock // to produce a whole hclwrite.Block including block labels. // // As long as a suitable value is given to encode and the destination body // is non-nil, this function will always complete. It will panic in case of // any errors in the calling program, such as passing an inappropriate type // or a nil body. // // The layout of the resulting HCL source is derived from the ordering of // the struct fields, with blank lines around nested blocks of different types. // Fields representing attributes should usually precede those representing // blocks so that the attributes can group togather in the result. For more // control, use the hclwrite API directly. func EncodeIntoBody(val interface{}, dst *hclwrite.Body) { rv := reflect.ValueOf(val) ty := rv.Type() if ty.Kind() == reflect.Ptr { rv = rv.Elem() ty = rv.Type() } if ty.Kind() != reflect.Struct { panic(fmt.Sprintf("value is %s, not struct", ty.Kind())) } tags := getFieldTags(ty) populateBody(rv, ty, tags, dst) } // EncodeAsBlock creates a new hclwrite.Block populated with the data from // the given value, which must be a struct or pointer to struct with the // struct tags defined in this package. // // If the given struct type has fields tagged with "label" tags then they // will be used in order to annotate the created block with labels. // // This function has the same constraints as EncodeIntoBody and will panic // if they are violated. func EncodeAsBlock(val interface{}, blockType string) *hclwrite.Block { rv := reflect.ValueOf(val) ty := rv.Type() if ty.Kind() == reflect.Ptr { rv = rv.Elem() ty = rv.Type() } if ty.Kind() != reflect.Struct { panic(fmt.Sprintf("value is %s, not struct", ty.Kind())) } tags := getFieldTags(ty) labels := make([]string, len(tags.Labels)) for i, lf := range tags.Labels { lv := rv.Field(lf.FieldIndex) // We just stringify whatever we find. It should always be a string // but if not then we'll still do something reasonable. labels[i] = fmt.Sprintf("%s", lv.Interface()) } block := hclwrite.NewBlock(blockType, labels) populateBody(rv, ty, tags, block.Body()) return block } func populateBody(rv reflect.Value, ty reflect.Type, tags *fieldTags, dst *hclwrite.Body) { nameIdxs := make(map[string]int, len(tags.Attributes)+len(tags.Blocks)) namesOrder := make([]string, 0, len(tags.Attributes)+len(tags.Blocks)) for n, i := range tags.Attributes { nameIdxs[n] = i namesOrder = append(namesOrder, n) } for n, i := range tags.Blocks { nameIdxs[n] = i namesOrder = append(namesOrder, n) } sort.SliceStable(namesOrder, func(i, j int) bool { ni, nj := namesOrder[i], namesOrder[j] return nameIdxs[ni] < nameIdxs[nj] }) dst.Clear() prevWasBlock := false for _, name := range namesOrder { fieldIdx := nameIdxs[name] field := ty.Field(fieldIdx) fieldTy := field.Type fieldVal := rv.Field(fieldIdx) if fieldTy.Kind() == reflect.Ptr { fieldTy = fieldTy.Elem() fieldVal = fieldVal.Elem() } if _, isAttr := tags.Attributes[name]; isAttr { if exprType.AssignableTo(fieldTy) || attrType.AssignableTo(fieldTy) { continue // ignore undecoded fields } if !fieldVal.IsValid() { continue // ignore (field value is nil pointer) } if fieldTy.Kind() == reflect.Ptr && fieldVal.IsNil() { continue // ignore } if prevWasBlock { dst.AppendNewline() prevWasBlock = false } valTy, err := gocty.ImpliedType(fieldVal.Interface()) if err != nil { panic(fmt.Sprintf("cannot encode %T as HCL expression: %s", fieldVal.Interface(), err)) } val, err := gocty.ToCtyValue(fieldVal.Interface(), valTy) if err != nil { // This should never happen, since we should always be able // to decode into the implied type. panic(fmt.Sprintf("failed to encode %T as %#v: %s", fieldVal.Interface(), valTy, err)) } dst.SetAttributeValue(name, val) } else { // must be a block, then elemTy := fieldTy isSeq := false if elemTy.Kind() == reflect.Slice || elemTy.Kind() == reflect.Array { isSeq = true elemTy = elemTy.Elem() } if bodyType.AssignableTo(elemTy) || attrsType.AssignableTo(elemTy) { continue // ignore undecoded fields } prevWasBlock = false if isSeq { l := fieldVal.Len() for i := 0; i < l; i++ { elemVal := fieldVal.Index(i) if !elemVal.IsValid() { continue // ignore (elem value is nil pointer) } if elemTy.Kind() == reflect.Ptr && elemVal.IsNil() { continue // ignore } block := EncodeAsBlock(elemVal.Interface(), name) if !prevWasBlock { dst.AppendNewline() prevWasBlock = true } dst.AppendBlock(block) } } else { if !fieldVal.IsValid() { continue // ignore (field value is nil pointer) } if elemTy.Kind() == reflect.Ptr && fieldVal.IsNil() { continue // ignore } block := EncodeAsBlock(fieldVal.Interface(), name) if !prevWasBlock { dst.AppendNewline() prevWasBlock = true } dst.AppendBlock(block) } } } } hcl-2.14.1/gohcl/encode_test.go000066400000000000000000000023451431334125700162730ustar00rootroot00000000000000package gohcl_test import ( "fmt" "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2/hclwrite" ) func ExampleEncodeIntoBody() { type Service struct { Name string `hcl:"name,label"` Exe []string `hcl:"executable"` } type Constraints struct { OS string `hcl:"os"` Arch string `hcl:"arch"` } type App struct { Name string `hcl:"name"` Desc string `hcl:"description"` Constraints *Constraints `hcl:"constraints,block"` Services []Service `hcl:"service,block"` } app := App{ Name: "awesome-app", Desc: "Such an awesome application", Constraints: &Constraints{ OS: "linux", Arch: "amd64", }, Services: []Service{ { Name: "web", Exe: []string{"./web", "--listen=:8080"}, }, { Name: "worker", Exe: []string{"./worker"}, }, }, } f := hclwrite.NewEmptyFile() gohcl.EncodeIntoBody(&app, f.Body()) fmt.Printf("%s", f.Bytes()) // Output: // name = "awesome-app" // description = "Such an awesome application" // // constraints { // os = "linux" // arch = "amd64" // } // // service "web" { // executable = ["./web", "--listen=:8080"] // } // service "worker" { // executable = ["./worker"] // } } hcl-2.14.1/gohcl/schema.go000066400000000000000000000102411431334125700152310ustar00rootroot00000000000000package gohcl import ( "fmt" "reflect" "sort" "strings" "github.com/hashicorp/hcl/v2" ) // ImpliedBodySchema produces a hcl.BodySchema derived from the type of the // given value, which must be a struct value or a pointer to one. If an // inappropriate value is passed, this function will panic. // // The second return argument indicates whether the given struct includes // a "remain" field, and thus the returned schema is non-exhaustive. // // This uses the tags on the fields of the struct to discover how each // field's value should be expressed within configuration. If an invalid // mapping is attempted, this function will panic. func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) { ty := reflect.TypeOf(val) if ty.Kind() == reflect.Ptr { ty = ty.Elem() } if ty.Kind() != reflect.Struct { panic(fmt.Sprintf("given value must be struct, not %T", val)) } var attrSchemas []hcl.AttributeSchema var blockSchemas []hcl.BlockHeaderSchema tags := getFieldTags(ty) attrNames := make([]string, 0, len(tags.Attributes)) for n := range tags.Attributes { attrNames = append(attrNames, n) } sort.Strings(attrNames) for _, n := range attrNames { idx := tags.Attributes[n] optional := tags.Optional[n] field := ty.Field(idx) var required bool switch { case field.Type.AssignableTo(exprType): // If we're decoding to hcl.Expression then absense can be // indicated via a null value, so we don't specify that // the field is required during decoding. required = false case field.Type.Kind() != reflect.Ptr && !optional: required = true default: required = false } attrSchemas = append(attrSchemas, hcl.AttributeSchema{ Name: n, Required: required, }) } blockNames := make([]string, 0, len(tags.Blocks)) for n := range tags.Blocks { blockNames = append(blockNames, n) } sort.Strings(blockNames) for _, n := range blockNames { idx := tags.Blocks[n] field := ty.Field(idx) fty := field.Type if fty.Kind() == reflect.Slice { fty = fty.Elem() } if fty.Kind() == reflect.Ptr { fty = fty.Elem() } if fty.Kind() != reflect.Struct { panic(fmt.Sprintf( "hcl 'block' tag kind cannot be applied to %s field %s: struct required", field.Type.String(), field.Name, )) } ftags := getFieldTags(fty) var labelNames []string if len(ftags.Labels) > 0 { labelNames = make([]string, len(ftags.Labels)) for i, l := range ftags.Labels { labelNames[i] = l.Name } } blockSchemas = append(blockSchemas, hcl.BlockHeaderSchema{ Type: n, LabelNames: labelNames, }) } partial = tags.Remain != nil schema = &hcl.BodySchema{ Attributes: attrSchemas, Blocks: blockSchemas, } return schema, partial } type fieldTags struct { Attributes map[string]int Blocks map[string]int Labels []labelField Remain *int Body *int Optional map[string]bool } type labelField struct { FieldIndex int Name string } func getFieldTags(ty reflect.Type) *fieldTags { ret := &fieldTags{ Attributes: map[string]int{}, Blocks: map[string]int{}, Optional: map[string]bool{}, } ct := ty.NumField() for i := 0; i < ct; i++ { field := ty.Field(i) tag := field.Tag.Get("hcl") if tag == "" { continue } comma := strings.Index(tag, ",") var name, kind string if comma != -1 { name = tag[:comma] kind = tag[comma+1:] } else { name = tag kind = "attr" } switch kind { case "attr": ret.Attributes[name] = i case "block": ret.Blocks[name] = i case "label": ret.Labels = append(ret.Labels, labelField{ FieldIndex: i, Name: name, }) case "remain": if ret.Remain != nil { panic("only one 'remain' tag is permitted") } idx := i // copy, because this loop will continue assigning to i ret.Remain = &idx case "body": if ret.Body != nil { panic("only one 'body' tag is permitted") } idx := i // copy, because this loop will continue assigning to i ret.Body = &idx case "optional": ret.Attributes[name] = i ret.Optional[name] = true default: panic(fmt.Sprintf("invalid hcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name)) } } return ret } hcl-2.14.1/gohcl/schema_test.go000066400000000000000000000074401431334125700162770ustar00rootroot00000000000000package gohcl import ( "fmt" "reflect" "testing" "github.com/davecgh/go-spew/spew" "github.com/hashicorp/hcl/v2" ) func TestImpliedBodySchema(t *testing.T) { tests := []struct { val interface{} wantSchema *hcl.BodySchema wantPartial bool }{ { struct{}{}, &hcl.BodySchema{}, false, }, { struct { Ignored bool }{}, &hcl.BodySchema{}, false, }, { struct { Attr1 bool `hcl:"attr1"` Attr2 bool `hcl:"attr2"` }{}, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "attr1", Required: true, }, { Name: "attr2", Required: true, }, }, }, false, }, { struct { Attr *bool `hcl:"attr,attr"` }{}, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "attr", Required: false, }, }, }, false, }, { struct { Thing struct{} `hcl:"thing,block"` }{}, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "thing", }, }, }, false, }, { struct { Thing struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` } `hcl:"thing,block"` }{}, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "thing", LabelNames: []string{"type", "name"}, }, }, }, false, }, { struct { Thing []struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` } `hcl:"thing,block"` }{}, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "thing", LabelNames: []string{"type", "name"}, }, }, }, false, }, { struct { Thing *struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` } `hcl:"thing,block"` }{}, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "thing", LabelNames: []string{"type", "name"}, }, }, }, false, }, { struct { Thing struct { Name string `hcl:"name,label"` Something string `hcl:"something"` } `hcl:"thing,block"` }{}, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "thing", LabelNames: []string{"name"}, }, }, }, false, }, { struct { Doodad string `hcl:"doodad"` Thing struct { Name string `hcl:"name,label"` } `hcl:"thing,block"` }{}, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "doodad", Required: true, }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "thing", LabelNames: []string{"name"}, }, }, }, false, }, { struct { Doodad string `hcl:"doodad"` Config string `hcl:",remain"` }{}, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "doodad", Required: true, }, }, }, true, }, { struct { Expr hcl.Expression `hcl:"expr"` }{}, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "expr", Required: false, }, }, }, false, }, { struct { Meh string `hcl:"meh,optional"` }{}, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "meh", Required: false, }, }, }, false, }, } for _, test := range tests { t.Run(fmt.Sprintf("%#v", test.val), func(t *testing.T) { schema, partial := ImpliedBodySchema(test.val) if !reflect.DeepEqual(schema, test.wantSchema) { t.Errorf( "wrong schema\ngot: %s\nwant: %s", spew.Sdump(schema), spew.Sdump(test.wantSchema), ) } if partial != test.wantPartial { t.Errorf( "wrong partial flag\ngot: %#v\nwant: %#v", partial, test.wantPartial, ) } }) } } hcl-2.14.1/gohcl/types.go000066400000000000000000000005741431334125700151450ustar00rootroot00000000000000package gohcl import ( "reflect" "github.com/hashicorp/hcl/v2" ) var victimExpr hcl.Expression var victimBody hcl.Body var exprType = reflect.TypeOf(&victimExpr).Elem() var bodyType = reflect.TypeOf(&victimBody).Elem() var blockType = reflect.TypeOf((*hcl.Block)(nil)) var attrType = reflect.TypeOf((*hcl.Attribute)(nil)) var attrsType = reflect.TypeOf(hcl.Attributes(nil)) hcl-2.14.1/guide/000077500000000000000000000000001431334125700134455ustar00rootroot00000000000000hcl-2.14.1/guide/.gitignore000066400000000000000000000000171431334125700154330ustar00rootroot00000000000000env/* _build/* hcl-2.14.1/guide/Makefile000066400000000000000000000011301431334125700151000ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = HCL SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)hcl-2.14.1/guide/conf.py000066400000000000000000000111271431334125700147460ustar00rootroot00000000000000import subprocess import os import os.path # -- Project information ----------------------------------------------------- project = u'HCL' copyright = u'2018, HashiCorp' author = u'HashiCorp' if 'READTHEDOCS_VERSION' in os.environ: version_str = os.environ['READTHEDOCS_VERSION'] else: version_str = subprocess.check_output(['git', 'describe', '--always']).strip() # The short X.Y version version = unicode(version_str) # The full version, including alpha/beta/rc tags release = unicode(version_str) # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.todo', 'sphinx.ext.githubpages', 'sphinxcontrib.golangdomain', 'sphinx.ext.autodoc', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store', 'env'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'HCLdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'HCL.tex', u'HCL Documentation', u'HashiCorp', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'hcl', u'HCL Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'HCL', u'HCL Documentation', author, 'HCL', 'One line description of project.', 'Miscellaneous'), ] # -- Extension configuration ------------------------------------------------- # -- Options for todo extension ---------------------------------------------- # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True hcl-2.14.1/guide/go.rst000066400000000000000000000017321431334125700146070ustar00rootroot00000000000000Using HCL in a Go application ============================= HCL is itself written in Go_ and currently it is primarily intended for use as a library within other Go programs. This section describes a number of different ways HCL can be used to define and process a configuration language within a Go program. For simple situations, HCL can decode directly into Go ``struct`` values in a similar way as encoding packages such as ``encoding/json`` and ``encoding/xml``. The HCL Go API also offers some alternative approaches however, for processing languages that may be more complex or that include portions whose expected structure cannot be determined until runtime. The following sections give an overview of different ways HCL can be used in a Go program. .. toctree:: :maxdepth: 1 :caption: Sub-sections: go_parsing go_diagnostics go_decoding_gohcl go_decoding_hcldec go_expression_eval go_decoding_lowlevel go_patterns .. _Go: https://golang.org/ hcl-2.14.1/guide/go_decoding_gohcl.rst000066400000000000000000000117571431334125700176270ustar00rootroot00000000000000.. go:package:: gohcl .. _go-decoding-gohcl: Decoding Into Native Go Values ============================== The most straightforward way to access the content of an HCL file is to decode into native Go values using ``reflect``, similar to the technique used by packages like ``encoding/json`` and ``encoding/xml``. Package ``gohcl`` provides functions for this sort of decoding. Function ``DecodeBody`` attempts to extract values from an HCL *body* and write them into a Go value given as a pointer: .. code-block:: go type ServiceConfig struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` ListenAddr string `hcl:"listen_addr"` } type Config struct { IOMode string `hcl:"io_mode"` Services []ServiceConfig `hcl:"service,block"` } var c Config moreDiags := gohcl.DecodeBody(f.Body, nil, &c) diags = append(diags, moreDiags...) The above example decodes the *root body* of a file ``f``, presumably loaded previously using a parser, into the variable ``c``. The field labels within the struct types imply the schema of the expected language, which is a cut-down version of the hypothetical language we showed in :ref:`intro`. The struct field labels consist of two comma-separated values. The first is the name of the corresponding argument or block type as it will appear in the input file, and the second is the type of element being named. If the second value is omitted, it defaults to ``attr``, requesting an attribute. Nested blocks are represented by a struct or a slice of that struct, and the special element type ``label`` within that struct declares that each instance of that block type must be followed by one or more block labels. In the above example, the ``service`` block type is defined to require two labels, named ``type`` and ``name``. For label fields in particular, the given name is used only to refer to the particular label in error messages when the wrong number of labels is used. By default, all declared attributes and blocks are considered to be required. An optional value is indicated by making its field have a pointer type, in which case ``nil`` is written to indicate the absense of the argument. The sections below discuss some additional decoding use-cases. For full details on the `gohcl` package, see `the godoc reference `_. .. _go-decoding-gohcl-evalcontext: Variables and Functions ----------------------- By default, arguments given in the configuration may use only literal values and the built in expression language operators, such as arithmetic. The second argument to ``gohcl.DecodeBody``, shown as ``nil`` in the previous example, allows the calling application to additionally offer variables and functions for use in expressions. Its value is a pointer to an ``hcl.EvalContext``, which will be covered in more detail in the later section :ref:`go-expression-eval`. For now, a simple example of making the id of the current process available as a single variable called ``pid``: .. code-block:: go type Context struct { Pid string } ctx := gohcl.EvalContext(&Context{ Pid: os.Getpid() }) var c Config moreDiags := gohcl.DecodeBody(f.Body, ctx, &c) diags = append(diags, moreDiags...) ``gohcl.EvalContext`` constructs an expression evaluation context from a Go struct value, making the fields available as variables and the methods available as functions, after transforming the field and method names such that each word (starting with an uppercase letter) is all lowercase and separated by underscores. .. code-block:: hcl name = "example-program (${pid})" Partial Decoding ---------------- In the examples so far, we've extracted the content from the entire input file in a single call to ``DecodeBody``. This is sufficient for many simple situations, but sometimes different parts of the file must be evaluated separately. For example: * If different parts of the file must be evaluated with different variables or functions available. * If the result of evaluating one part of the file is used to set variables or functions in another part of the file. There are several ways to perform partial decoding with ``gohcl``, all of which involve decoding into HCL's own types, such as ``hcl.Body``. The most general approach is to declare an additional struct field of type ``hcl.Body``, with the special field tag type ``remain``: .. code-block:: go type ServiceConfig struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` ListenAddr string `hcl:"listen_addr"` Remain hcl.Body `hcl:",remain"` } When a ``remain`` field is present, any element of the input body that is not matched is retained in a body saved into that field, which can then be decoded in a later call, potentially with a different evaluation context. Another option is to decode an attribute into a value of type `hcl.Expression`, which can then be evaluated separately as described in :ref:`expression-eval`. hcl-2.14.1/guide/go_decoding_hcldec.rst000066400000000000000000000221731431334125700177470ustar00rootroot00000000000000.. go:package:: hcldec .. _go-decoding-hcldec: Decoding With Dynamic Schema ============================ In section :ref:`go-decoding-gohcl`, we saw the most straightforward way to access the content from an HCL file, decoding directly into a Go value whose type is known at application compile time. For some applications, it is not possible to know the schema of the entire configuration when the application is built. For example, `HashiCorp Terraform`_ uses HCL as the foundation of its configuration language, but parts of the configuration are handled by plugins loaded dynamically at runtime, and so the schemas for these portions cannot be encoded directly in the Terraform source code. HCL's ``hcldec`` package offers a different approach to decoding that allows schemas to be created at runtime, and the result to be decoded into dynamically-typed data structures. The sections below are an overview of the main parts of package ``hcldec``. For full details, see `the package godoc `_. .. _`HashiCorp Terraform`: https://www.terraform.io/ Decoder Specification --------------------- Whereas :go:pkg:`gohcl` infers the expected schema by using reflection against the given value, ``hcldec`` obtains schema through a decoding *specification*, which is a set of instructions for mapping HCL constructs onto a dynamic data structure. The ``hcldec`` package contains a number of different specifications, each implementing :go:type:`hcldec.Spec` and having a ``Spec`` suffix on its name. Each spec has two distinct functions: * Adding zero or more validation constraints on the input configuration file. * Producing a result value based on some elements from the input file. The most common pattern is for the top-level spec to be a :go:type:`hcldec.ObjectSpec` with nested specifications defining either blocks or attributes, depending on whether the configuration file will be block-structured or flat. .. code-block:: go spec := hcldec.ObjectSpec{ "io_mode": &hcldec.AttrSpec{ Name: "io_mode", Type: cty.String, }, "services": &hcldec.BlockMapSpec{ TypeName: "service", LabelNames: []string{"type", "name"}, Nested: hcldec.ObjectSpec{ "listen_addr": &hcldec.AttrSpec{ Name: "listen_addr", Type: cty.String, Required: true, }, "processes": &hcldec.BlockMapSpec{ TypeName: "service", LabelNames: []string{"name"}, Nested: hcldec.ObjectSpec{ "command": &hcldec.AttrSpec{ Name: "command", Type: cty.List(cty.String), Required: true, }, }, }, }, }, } val, moreDiags := hcldec.Decode(f.Body, spec, nil) diags = append(diags, moreDiags...) The above specification expects a configuration shaped like our example in :ref:`intro`, and calls for it to be decoded into a dynamic data structure that would have the following shape if serialized to JSON: .. code-block:: JSON { "io_mode": "async", "services": { "http": { "web_proxy": { "listen_addr": "127.0.0.1:8080", "processes": { "main": { "command": ["/usr/local/bin/awesome-app", "server"] }, "mgmt": { "command": ["/usr/local/bin/awesome-app", "mgmt"] } } } } } } .. go:package:: cty Types and Values With ``cty`` ----------------------------- HCL's expression interpreter is implemented in terms of another library called :go:pkg:`cty`, which provides a type system which HCL builds on and a robust representation of dynamic values in that type system. You could think of :go:pkg:`cty` as being a bit like Go's own :go:pkg:`reflect`, but for the results of HCL expressions rather than Go programs. The full details of this system can be found in `its own repository `_, but this section will cover the most important highlights, because ``hcldec`` specifications include :go:pkg:`cty` types (as seen in the above example) and its results are :go:pkg:`cty` values. ``hcldec`` works directly with :go:pkg:`cty` — as opposed to converting values directly into Go native types — because the functionality of the :go:pkg:`cty` packages then allows further processing of those values without any loss of fidelity or range. For example, :go:pkg:`cty` defines a JSON encoding of its values that can be decoded losslessly as long as both sides agree on the value type that is expected, which is a useful capability in systems where some sort of RPC barrier separates the main program from its plugins. Types are instances of :go:type:`cty.Type`, and are constructed from functions and variables in :go:pkg:`cty` as shown in the above example, where the string attributes are typed as ``cty.String``, which is a primitive type, and the list attribute is typed as ``cty.List(cty.String)``, which constructs a new list type with string elements. Values are instances of :go:type:`cty.Value`, and can also be constructed from functions in :go:pkg:`cty`, using the functions that include ``Val`` in their names or using the operation methods available on :go:type:`cty.Value`. In most cases you will eventually want to use the resulting data as native Go types, to pass it to non-:go:pkg:`cty`-aware code. To do this, see the guides on `Converting between types `_ (staying within :go:pkg:`cty`) and `Converting to and from native Go values `_. Partial Decoding ---------------- Because the ``hcldec`` result is always a value, the input is always entirely processed in a single call, unlike with :go:pkg:`gohcl`. However, both :go:pkg:`gohcl` and :go:pkg:`hcldec` take :go:type:`hcl.Body` as the representation of input, and so it is possible and common to mix them both in the same program. A common situation is that :go:pkg:`gohcl` is used in the main program to decode the top level of configuration, which then allows the main program to determine which plugins need to be loaded to process the leaf portions of configuration. In this case, the portions that will be interpreted by plugins are retained as opaque :go:type:`hcl.Body` until the plugins have been loaded, and then each plugin provides its :go:type:`hcldec.Spec` to allow decoding the plugin-specific configuration into a :go:type:`cty.Value` which be transmitted to the plugin for further processing. In our example from :ref:`intro`, perhaps each of the different service types is managed by a plugin, and so the main program would decode the block headers to learn which plugins are needed, but process the block bodies dynamically: .. code-block:: go type ServiceConfig struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` PluginConfig hcl.Body `hcl:",remain"` } type Config struct { IOMode string `hcl:"io_mode"` Services []ServiceConfig `hcl:"service,block"` } var c Config moreDiags := gohcl.DecodeBody(f.Body, nil, &c) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { // (show diags in the UI) return } for _, sc := range c.Services { pluginName := block.Type // Totally-hypothetical plugin manager (not part of HCL) plugin, err := pluginMgr.GetPlugin(pluginName) if err != nil { diags = diags.Append(&hcl.Diagnostic{ /* ... */ }) continue } spec := plugin.ConfigSpec() // returns hcldec.Spec // Decode the block body using the plugin's given specification configVal, moreDiags := hcldec.Decode(sc.PluginConfig, spec, nil) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { continue } // Again, hypothetical API within your application itself, and not // part of HCL. Perhaps plugin system serializes configVal as JSON // and sends it over to the plugin. svc := plugin.NewService(configVal) serviceMgr.AddService(sc.Name, svc) } Variables and Functions ----------------------- The final argument to ``hcldec.Decode`` is an expression evaluation context, just as with ``gohcl.DecodeBlock``. This object can be constructed using :ref:`the gohcl helper function ` as before if desired, but you can also choose to work directly with :go:type:`hcl.EvalContext` as discussed in :ref:`go-expression-eval`: .. code-block:: go ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "pid": cty.NumberIntVal(int64(os.Getpid())), }, } val, moreDiags := hcldec.Decode(f.Body, spec, ctx) diags = append(diags, moreDiags...) As you can see, this lower-level API also uses :go:pkg:`cty`, so it can be particularly convenient in situations where the result of dynamically decoding one block must be available to expressions in another block. hcl-2.14.1/guide/go_decoding_lowlevel.rst000066400000000000000000000207161431334125700203570ustar00rootroot00000000000000.. _go-decoding-lowlevel: Advanced Decoding With The Low-level API ======================================== In previous sections we've discussed :go:pkg:`gohcl` and :go:pkg:`hcldec`, which both deal with decoding of HCL bodies and the expressions within them using a high-level description of the expected configuration schema. Both of these packages are implemented in terms of HCL's low-level decoding interfaces, which we will explore in this section. HCL decoding in the low-level API has two distinct phases: * Structural decoding: analyzing the arguments and nested blocks present in a particular body. * Expression evaluation: obtaining final values for each argument expression found during structural decoding. The low-level API gives the calling application full control over when each body is decoded and when each expression is evaluated, allowing for more complex configuration formats where e.g. different variables are available in different contexts, or perhaps expressions within one block can refer to values defined in another block. The low-level API also gives more detailed access to source location information for decoded elements, and so may be desirable for applications that do a lot of additional validation of decoded data where more specific source locations lead to better diagnostic messages. Since all of the decoding mechanisms work with the same :go:type:`hcl.Body` type, it is fine and expected to mix them within an application to get access to the more detailed information where needed while using the higher-level APIs for the more straightforward portions of a configuration language. The following subsections will give an overview of the low-level API. For full details, see `the godoc reference `_. Structural Decoding ------------------- As seen in prior sections, :go:type:`hcl.Body` is an opaque representation of the arguments and child blocks at a particular nesting level. An HCL file has a root body containing the top-level elements, and then each nested block has its own body presenting its own content. :go:type:`hcl.Body` is a Go interface whose methods serve as the structural decoding API: .. go:currentpackage:: hcl .. go:type:: Body Represents the structural elements at a particular nesting level. .. go:function:: func (b Body) Content(schema *BodySchema) (*BodyContent, Diagnostics) Decode the content from the receiving body using the given schema. The schema is considered exhaustive of all content within the body, and so any elements not covered by the schema will generate error diagnostics. .. go:function:: func (b Body) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) Similar to `Content`, but allows for additional arguments and block types that are not described in the given schema. The additional body return value is a special body that contains only the *remaining* elements, after extraction of the ones covered by the schema. This returned body can be used to decode the remaining content elsewhere in the calling program. .. go:function:: func (b Body) JustAttributes() (Attributes, Diagnostics) Decode the content from the receving body in a special *attributes-only* mode, allowing the calling application to enumerate the arguments given inside the body without needing to predict them in schema. When this method is used, a body can be treated somewhat like a map expression, but it still has a rigid structure where the arguments must be given directly with no expression evaluation. This is an advantage for declarations that must themselves be resolved before expression evaluation is possible. If the body contains any blocks, error diagnostics are returned. JSON syntax relies on schema to distinguish arguments from nested blocks, and so a JSON body in attributes-only mode will treat all JSON object properties as arguments. .. go:function:: func (b Body) MissingItemRange() Range Returns a source range that points to where an absent required item in the body might be placed. This is a "best effort" sort of thing, required only to be somewhere inside the receving body, as a way to give source location information for a "missing required argument" sort of error. The main content-decoding methods each require a :go:type:`hcl.BodySchema` object describing the expected content. The fields of this type describe the expected arguments and nested block types respectively: .. code-block:: go schema := &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "io_mode", Required: false, }, }, Blocks: []hcl.BlockHeaderSchema{ { Type: "service", LabelNames: []string{"type", "name"}, }, }, } content, moreDiags := body.Content(schema) diags = append(diags, moreDiags...) :go:type:`hcl.BodyContent` is the result of both ``Content`` and ``PartialContent``, giving the actual attributes and nested blocks that were found. Since arguments are uniquely named within a body and unordered, they are returned as a map. Nested blocks are ordered and may have many instances of a given type, so they are returned all together in a single slice for further interpretation by the caller. Unlike the two higher-level approaches, the low-level API *always* works only with one nesting level at a time. Decoding a nested block returns the "header" for that block, giving its type and label values, but its body remains an :go:type:`hcl.Body` for later decoding. Each returned attribute corresponds to one of the arguments in the body, and it has an :go:type:`hcl.Expression` object that can be used to obtain a value for the argument during expression evaluation, as described in the next section. Expression Evaluation --------------------- Expression evaluation *in general* has its own section, imaginitively titled :ref:`go-expression-eval`, so this section will focus only on how it is achieved in the low-level API. All expression evaluation in the low-level API starts with an :go:type:`hcl.Expression` object. This is another interface type, with various implementations depending on the expression type and the syntax it was parsed from. .. go:currentpackage:: hcl .. go:type:: Expression Represents a unevaluated single expression. .. go:function:: func (e Expression) Value(ctx *EvalContext) (cty.Value, Diagnostics) Evaluates the receiving expression in the given evaluation context. The result is a :go:type:`cty.Value` representing the result value, along with any diagnostics that were raised during evaluation. If the diagnostics contains errors, the value may be incomplete or invalid and should either be discarded altogether or used with care for analysis. .. go:function:: func (e Expression) Variables() []Traversal Returns information about any nested expressions that access variables from the *global* evaluation context. Does not include references to temporary local variables, such as those generated by a "``for`` expression". .. go:function:: func (e Expression) Range() Range Returns the source range for the entire expression. This can be useful when generating application-specific diagnostic messages, such as value validation errors. .. go:function:: func (e Expression) StartRange() Range Similar to ``Range``, but if the expression is complex, such as a tuple or object constructor, may indicate only the opening tokens for the construct to avoid creating an overwhelming source code snippet. This should be used in diagnostic messages only in situations where the error is clearly with the construct itself and not with the overall expression. For example, a type error indicating that a tuple was not expected might use ``StartRange`` to draw attention to the beginning of a tuple constructor, without highlighting the entire expression. Method ``Value`` is the primary API for expressions, and takes the same kind of evaluation context object described in :ref:`go-expression-eval`. .. code-block:: go ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "name": cty.StringVal("Ermintrude"), "age": cty.NumberIntVal(32), }, } val, moreDiags := expr.Value(ctx) diags = append(diags, moreDiags...) hcl-2.14.1/guide/go_diagnostics.rst000066400000000000000000000073731431334125700172050ustar00rootroot00000000000000.. _go-diagnostics: Diagnostic Messages =================== An important concern for any machine language intended for human authoring is to produce good error messages when the input is somehow invalid, or has other problems. HCL uses *diagnostics* to describe problems in an end-user-oriented manner, such that the calling application can render helpful error or warning messages. The word "diagnostic" is a general term that covers both errors and warnings, where errors are problems that prevent complete processing while warnings are possible concerns that do not block processing. HCL deviates from usual Go API practice by returning its own ``hcl.Diagnostics`` type, instead of Go's own ``error`` type. This allows functions to return warnings without accompanying errors while not violating the usual expectation that the absense of errors is indicated by a nil ``error``. In order to easily accumulate and return multiple diagnostics at once, the usual pattern for functions returning diagnostics is to gather them in a local variable and then return it at the end of the function, or possibly earlier if the function cannot continue due to the problems. .. code-block:: go func returningDiagnosticsExample() hcl.Diagnostics { var diags hcl.Diagnostics // ... // Call a function that may itself produce diagnostics. f, moreDiags := parser.LoadHCLFile("example.conf") // always append, in case warnings are present diags = append(diags, moreDiags...) if diags.HasErrors() { // If we can't safely continue in the presence of errors here, we // can optionally return early. return diags } // ... return diags } A common variant of the above pattern is calling another diagnostics-generating function in a loop, using ``continue`` to begin the next iteration when errors are detected, but still completing all iterations and returning the union of all of the problems encountered along the way. In :ref:`go-parsing`, we saw that the parser can generate diagnostics which are related to syntax problems within the loaded file. Further steps to decode content from the loaded file can also generate diagnostics related to *semantic* problems within the file, such as invalid expressions or type mismatches, and so a program using HCL will generally need to accumulate diagnostics across these various steps and then render them in the application UI somehow. Rendering Diagnostics in the UI ------------------------------- The best way to render diagnostics to an end-user will depend a lot on the type of application: they might be printed into a terminal, written into a log for later review, or even shown in a GUI. HCL leaves the responsibility for rendering diagnostics to the calling application, but since rendering to a terminal is a common case for command-line tools, the `hcl` package contains a default implementation of this in the form of a "diagnostic text writer": .. code-block:: go wr := hcl.NewDiagnosticTextWriter( os.Stdout, // writer to send messages to parser.Files(), // the parser's file cache, for source snippets 78, // wrapping width true, // generate colored/highlighted output ) wr.WriteDiagnostics(diags) This default implementation of diagnostic rendering includes relevant lines of source code for context, like this: :: Error: Unsupported block type on example.tf line 4, in resource "aws_instance" "example": 2: provisionr "local-exec" { Blocks of type "provisionr" are not expected here. Did you mean "provisioner"? If the "color" flag is enabled, the severity will be additionally indicated by a text color and the relevant portion of the source code snippet will be underlined to draw further attention. hcl-2.14.1/guide/go_expression_eval.rst000066400000000000000000000135621431334125700201010ustar00rootroot00000000000000.. _go-expression-eval: Expression Evaluation ===================== Each argument attribute in a configuration file is interpreted as an expression. In the HCL native syntax, certain basic expression functionality is always available, such as arithmetic and template strings, and the calling application can extend this by making available specific variables and/or functions via an *evaluation context*. We saw in :ref:`go-decoding-gohcl` and :ref:`go-decoding-hcldec` some basic examples of populating an evaluation context to make a variable available. This section will look more closely at the ``hcl.EvalContext`` type and how HCL expression evaluation behaves in different cases. This section does not discuss in detail the expression syntax itself. For more information on that, see the HCL Native Syntax specification. .. go:currentpackage:: hcl .. go:type:: EvalContext ``hcl.EvalContext`` is the type used to describe the variables and functions available during expression evaluation, if any. Its usage is described in the following sections. Defining Variables ------------------ As we saw in :ref:`go-decoding-hcldec`, HCL represents values using an underlying library called :go:pkg:`cty`. When defining variables, their values must be given as :go:type:`cty.Value` values. A full description of the types and value constructors in :go:pkg:`cty` is in `the reference documentation `_. Variables in HCL are defined by assigning values into a map from string names to :go:type:`cty.Value`: .. code-block:: go ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "name": cty.StringVal("Ermintrude"), "age": cty.NumberIntVal(32), }, } If this evaluation context were passed to one of the evaluation functions we saw in previous sections, the user would be able to refer to these variable names in any argument expression appearing in the evaluated portion of configuration: .. code-block:: hcl message = "${name} is ${age} ${age == 1 ? "year" : "years"} old!" If you place ``cty``'s *object* values in the evaluation context, then their attributes can be referenced using the HCL attribute syntax, allowing for more complex structures: .. code-block:: go ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "path": cty.ObjectVal(map[string]cty.Value{ "root": cty.StringVal(rootDir), "module": cty.StringVal(moduleDir), "current": cty.StringVal(currentDir), }), }, } .. code-block:: hcl source_file = "${path.module}/foo.txt" .. _go-expression-funcs: Defining Functions ------------------ Custom functions can be defined by you application to allow users of its language to transform data in application-specific ways. The underlying function mechanism is also provided by :go:pkg:`cty`, allowing you to define the arguments a given function expects, what value type it will return for given argument types, etc. The full functions model is described in the ``cty`` documentation section `Functions System `_. There are `a number of "standard library" functions `_ available in a ``stdlib`` package within the :go:pkg:`cty` repository, avoiding the need for each application to re-implement basic functions for string manipulation, list manipulation, etc. It also includes function-shaped versions of several operations that are native operators in HCL, which should generally *not* be exposed as functions in HCL-based configuration formats to avoid user confusion. You can define functions in the ``Functions`` field of :go:type:`hcl.EvalContext`: .. code-block:: go ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "name": cty.StringVal("Ermintrude"), }, Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, "lower": stdlib.LowerFunc, "min": stdlib.MinFunc, "max": stdlib.MaxFunc, "strlen": stdlib.StrlenFunc, "substr": stdlib.SubstrFunc, }, } If this evaluation context were passed to one of the evaluation functions we saw in previous sections, the user would be able to call any of these functions in any argument expression appearing in the evaluated portion of configuration: .. code-block:: hcl message = "HELLO, ${upper(name)}!" Expression Evaluation Modes --------------------------- HCL uses a different expression evaluation mode depending on the evaluation context provided. In HCL native syntax, evaluation modes are used to provide more relevant error messages. In JSON syntax, which embeds the native expression syntax in strings using "template" syntax, the evaluation mode determines whether strings are evaluated as templates at all. If the given :go:type:`hcl.EvalContext` is ``nil``, native syntax expressions will react to users attempting to refer to variables or functions by producing errors indicating that these features are not available at all, rather than by saying that the specific variable or function does not exist. JSON syntax strings will not be evaluated as templates *at all* in this mode, making them function as literal strings. If the evaluation context is non-``nil`` but either ``Variables`` or ``Functions`` within it is ``nil``, native syntax will similarly produce "not supported" error messages. JSON syntax strings *will* parse templates in this case, but can also generate "not supported" messages if e.g. the user accesses a variable when the variables map is ``nil``. If neither map is ``nil``, HCL assumes that both variables and functions are supported and will instead produce error messages stating that the specific variable or function accessed by the user is not defined. hcl-2.14.1/guide/go_parsing.rst000066400000000000000000000046721431334125700163400ustar00rootroot00000000000000.. _go-parsing: Parsing HCL Input ================= The first step in processing HCL input provided by a user is to parse it. Parsing turns the raw bytes from an input file into a higher-level representation of the arguments and blocks, ready to be *decoded* into an application-specific form. The main entry point into HCL parsing is :go:pkg:`hclparse`, which provides :go:type:`hclparse.Parser`: .. code-block:: go parser := hclparse.NewParser() f, diags := parser.ParseHCLFile("server.conf") Variable ``f`` is then a pointer to an :go:type:`hcl.File`, which is an opaque abstract representation of the file, ready to be decoded. Variable ``diags`` describes any errors or warnings that were encountered during processing; HCL conventionally uses this in place of the usual ``error`` return value in Go, to allow returning a mixture of multiple errors and warnings together with enough information to present good error messages to the user. We'll cover this in more detail in the next section, :ref:`go-diagnostics`. .. go:package:: hclparse Package ``hclparse`` -------------------- .. go:type:: Parser .. go:function:: func NewParser() *Parser Constructs a new parser object. Each parser contains a cache of files that have already been read, so repeated calls to load the same file will return the same object. .. go:function:: func (*Parser) ParseHCL(src []byte, filename string) (*hcl.File, hcl.Diagnostics) Parse the given source code as HCL native syntax, saving the result into the parser's file cache under the given filename. .. go:function:: func (*Parser) ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) Parse the contents of the given file as HCL native syntax. This is a convenience wrapper around ParseHCL that first reads the file into memory. .. go:function:: func (*Parser) ParseJSON(src []byte, filename string) (*hcl.File, hcl.Diagnostics) Parse the given source code as JSON syntax, saving the result into the parser's file cache under the given filename. .. go:function:: func (*Parser) ParseJSONFile(filename string) (*hcl.File, hcl.Diagnostics) Parse the contents of the given file as JSON syntax. This is a convenience wrapper around ParseJSON that first reads the file into memory. The above list just highlights the main functions in this package. For full documentation, see `the hclparse godoc `_. hcl-2.14.1/guide/go_patterns.rst000066400000000000000000000331121431334125700165240ustar00rootroot00000000000000Design Patterns for Complex Systems =================================== In previous sections we've seen an overview of some different ways an application can decode a language its has defined in terms of the HCL grammar. For many applications, those mechanisms are sufficient. However, there are some more complex situations that can benefit from some additional techniques. This section lists a few of these situations and ways to use the HCL API to accommodate them. .. _go-interdep-blocks: Interdependent Blocks --------------------- In some configuration languages, the variables available for use in one configuration block depend on values defined in other blocks. For example, in Terraform many of the top-level constructs are also implicitly definitions of values that are available for use in expressions elsewhere: .. code-block:: hcl variable "network_numbers" { type = list(number) } variable "base_network_addr" { type = string default = "10.0.0.0/8" } locals { network_blocks = { for x in var.number: x => cidrsubnet(var.base_network_addr, 8, x) } } resource "cloud_subnet" "example" { for_each = local.network_blocks cidr_block = each.value } output "subnet_ids" { value = cloud_subnet.example[*].id } In this example, the ``variable "network_numbers"`` block makes ``var.network_numbers`` available to expressions, the ``resource "cloud_subnet" "example"`` block makes ``cloud_subnet.example`` available, etc. Terraform achieves this by decoding the top-level structure in isolation to start. You can do this either using the low-level API or using :go:pkg:`gohcl` with :go:type:`hcl.Body` fields tagged as "remain". Once you have a separate body for each top-level block, you can inspect each of the attribute expressions inside using the ``Variables`` method on :go:type:`hcl.Expression`, or the ``Variables`` function from package :go:pkg:`hcldec` if you will eventually use its higher-level API to decode as Terraform does. The detected variable references can then be used to construct a dependency graph between the blocks, and then perform a `topological sort `_ to determine the correct order to evaluate each block's contents so that values will always be available before they are needed. Since :go:pkg:`cty` values are immutable, it is not convenient to directly change values in a :go:type:`hcl.EvalContext` during this gradual evaluation, so instead construct a specialized data structure that has a separate value per object and construct an evaluation context from that each time a new value becomes available. Using :go:pkg:`hcldec` to evaluate block bodies is particularly convenient in this scenario because it produces :go:type:`cty.Value` results which can then just be directly incorporated into the evaluation context. Distributed Systems ------------------- Distributed systems cause a number of extra challenges, and configuration management is rarely the worst of these. However, there are some specific considerations for using HCL-based configuration in distributed systems. For the sake of this section, we are concerned with distributed systems where at least two separate components both depend on the content of HCL-based configuration files. Real-world examples include the following: * **HashiCorp Nomad** loads configuration (job specifications) in its servers but also needs these results in its clients and in its various driver plugins. * **HashiCorp Terraform** parses configuration in Terraform Core but can write a partially-evaluated execution plan to disk and continue evaluation in a separate process later. It must also pass configuration values into provider plugins. Broadly speaking, there are two approaches to allowing configuration to be accessed in multiple subsystems, which the following subsections will discuss separately. Ahead-of-time Evaluation ^^^^^^^^^^^^^^^^^^^^^^^^ Ahead-of-time evaluation is the simplest path, with the configuration files being entirely evaluated on entry to the system, and then only the resulting *constant values* being passed between subsystems. This approach is relatively straightforward because the resulting :go:type:`cty.Value` results can be losslessly serialized as either JSON or msgpack as long as all system components agree on the expected value types. Aside from passing these values around "on the wire", parsing and decoding of configuration proceeds as normal. Both Nomad and Terraform use this approach for interacting with *plugins*, because the plugins themselves are written by various different teams that do not coordinate closely, and so doing all expression evaluation in the core subsystems ensures consistency between plugins and simplifies plugin development. In both applications, the plugin is expected to describe (using an application-specific protocol) the schema it expects for each element of configuration it is responsible for, allowing the core subsystems to perform decoding on the plugin's behalf and pass a value that is guaranteed to conform to the schema. Gradual Evaluation ^^^^^^^^^^^^^^^^^^ Although ahead-of-time evaluation is relatively straightforward, it has the significant disadvantage that all data available for access via variables or functions must be known by whichever subsystem performs that initial evaluation. For example, in Terraform, the "plan" subcommand is responsible for evaluating the configuration and presenting to the user an execution plan for approval, but certain values in that plan cannot be determined until the plan is already being applied, since the specific values used depend on remote API decisions such as the allocation of opaque id strings for objects. In Terraform's case, both the creation of the plan and the eventual apply of that plan *both* entail evaluating configuration, with the apply step having a more complete set of input values and thus producing a more complete result. However, this means that Terraform must somehow make the expressions from the original input configuration available to the separate process that applies the generated plan. Good usability requires error and warning messages that are able to refer back to specific sections of the input configuration as context for the reported problem, and the best way to achieve this in a distributed system doing gradual evaluation is to send the configuration *source code* between subsystems. This is generally the most compact representation that retains source location information, and will avoid any inconsistency caused by introducing another intermediate serialization. In Terraform's, for example, the serialized plan incorporates both the data structure describing the partial evaluation results from the plan phase and the original configuration files that produced those results, which can then be re-evalauated during the apply step. In a gradual evaluation scenario, the application should verify correctness of the input configuration as completely as possible at each state. To help with this, :go:pkg:`cty` has the concept of `unknown values `_, which can stand in for values the application does not yet know while still retaining correct type information. HCL expression evaluation reacts to unknown values by performing type checking but then returning another unknown value, causing the unknowns to propagate through expressions automatically. .. code-block:: go ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "name": cty.UnknownVal(cty.String), "age": cty.UnknownVal(cty.Number), }, } val, moreDiags := expr.Value(ctx) diags = append(diags, moreDiags...) Each time an expression is re-evaluated with additional information, fewer of the input values will be unknown and thus more of the result will be known. Eventually the application should evaluate the expressions with no unknown values at all, which then guarantees that the result will also be wholly-known. Static References, Calls, Lists, and Maps ----------------------------------------- In most cases, we care more about the final result value of an expression than how that value was obtained. A particular list argument, for example, might be defined by the user via a tuple constructor, by a `for` expression, or by assigning the value of a variable that has a suitable list type. In some special cases, the structure of the expression is more important than the result value, or an expression may not *have* a reasonable result value. For example, in Terraform there are a few arguments that call for the user to name another object by reference, rather than provide an object value: .. code-block:: hcl resource "cloud_network" "example" { # ... } resource "cloud_subnet" "example" { cidr_block = "10.1.2.0/24" depends_on = [ cloud_network.example, ] } The ``depends_on`` argument in the second ``resource`` block *appears* as an expression that would construct a single-element tuple containing an object representation of the first resource block. However, Terraform uses this expression to construct its dependency graph, and so it needs to see specifically that this expression refers to ``cloud_network.example``, rather than determine a result value for it. HCL offers a number of "static analysis" functions to help with this sort of situation. These all live in the :go:pkg:`hcl` package, and each one imposes a particular requirement on the syntax tree of the expression it is given, and returns a result derived from that if the expression conforms to that requirement. .. go:currentpackage:: hcl .. go:function:: func ExprAsKeyword(expr Expression) string This function attempts to interpret the given expression as a single keyword, returning that keyword as a string if possible. A "keyword" for the purposes of this function is an expression that can be understood as a valid single identifier. For example, the simple variable reference ``foo`` can be interpreted as a keyword, while ``foo.bar`` cannot. As a special case, the language-level keywords ``true``, ``false``, and ``null`` are also considered to be valid keywords, allowing the calling application to disregard their usual meaning. If the given expression cannot be reduced to a single keyword, the result is an empty string. Since an empty string is never a valid keyword, this result unambiguously signals failure. .. go:function:: func AbsTraversalForExpr(expr Expression) (Traversal, Diagnostics) This is a generalization of ``ExprAsKeyword`` that will accept anything that can be interpreted as a *traversal*, which is a variable name followed by zero or more attribute access or index operators with constant operands. For example, all of ``foo``, ``foo.bar`` and ``foo[0]`` are valid traversals, but ``foo[bar]`` is not, because the ``bar`` index is not constant. This is the function that Terraform uses to interpret the items within the ``depends_on`` sequence in our example above. As with ``ExprAsKeyword``, this function has a special case that the keywords ``true``, ``false``, and ``null`` will be accepted as if they were variable names by this function, allowing ``null.foo`` to be interpreted as a traversal even though it would be invalid if evaluated. If error diagnostics are returned, the traversal result is invalid and should not be used. .. go:function:: func RelTraversalForExpr(expr Expression) (Traversal, Diagnostics) This is very similar to ``AbsTraversalForExpr``, but the result is a *relative* traversal, which is one whose first name is considered to be an attribute of some other (implied) object. The processing rules are identical to ``AbsTraversalForExpr``, with the only exception being that the first element of the returned traversal is marked as being an attribute, rather than as a root variable. .. go:function:: func ExprList(expr Expression) ([]Expression, Diagnostics) This function requires that the given expression be a tuple constructor, and if so returns a slice of the element expressions in that constructor. Applications can then perform further static analysis on these, or evaluate them as normal. If error diagnostics are returned, the result is invalid and should not be used. This is the fucntion that Terraform uses to interpret the expression assigned to ``depends_on`` in our example above, then in turn using ``AbsTraversalForExpr`` on each enclosed expression. .. go:function:: func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) This function requires that the given expression be an object constructor, and if so returns a slice of the element key/value pairs in that constructor. Applications can then perform further static analysis on these, or evaluate them as normal. If error diagnostics are returned, the result is invalid and should not be used. .. go:function:: func ExprCall(expr Expression) (*StaticCall, Diagnostics) This function requires that the given expression be a function call, and if so returns an object describing the name of the called function and expression objects representing the call arguments. If error diagnostics are returned, the result is invalid and should not be used. The ``Variables`` method on :go:type:`hcl.Expression` is also considered to be a "static analysis" helper, but is built in as a fundamental feature because analysis of referenced variables is often important for static validation and for implementing interdependent blocks as we saw in the section above. hcl-2.14.1/guide/index.rst000066400000000000000000000025301431334125700153060ustar00rootroot00000000000000HCL Config Language Toolkit =========================== HCL is a toolkit for creating structured configuration languages that are both human- and machine-friendly, for use with command-line tools, servers, etc. HCL has both a native syntax, intended to be pleasant to read and write for humans, and a JSON-based variant that is easier for machines to generate and parse. The native syntax is inspired by libucl_, `nginx configuration`_, and others. It includes an expression syntax that allows basic inline computation and, with support from the calling application, use of variables and functions for more dynamic configuration languages. HCL provides a set of constructs that can be used by a calling application to construct a configuration language. The application defines which argument names and nested block types are expected, and HCL parses the configuration file, verifies that it conforms to the expected structure, and returns high-level objects that the application can use for further processing. At present, HCL is primarily intended for use in applications written in Go_, via its library API. .. toctree:: :maxdepth: 1 :caption: Contents: intro go language_design .. _libucl: https://github.com/vstakhov/libucl .. _`nginx configuration`: http://nginx.org/en/docs/beginners_guide.html#conf_structure .. _Go: https://golang.org/ hcl-2.14.1/guide/intro.rst000066400000000000000000000077021431334125700153400ustar00rootroot00000000000000.. _intro: Introduction to HCL =================== HCL-based configuration is built from two main constructs: arguments and blocks. The following is an example of a configuration language for a hypothetical application: .. code-block:: hcl io_mode = "async" service "http" "web_proxy" { listen_addr = "127.0.0.1:8080" process "main" { command = ["/usr/local/bin/awesome-app", "server"] } process "mgmt" { command = ["/usr/local/bin/awesome-app", "mgmt"] } } In the above example, ``io_mode`` is a top-level argument, while ``service`` introduces a block. Within the body of a block, further arguments and nested blocks are allowed. A block type may also expect a number of *labels*, which are the quoted names following the ``service`` keyword in the above example. The specific keywords ``io_mode``, ``service``, ``process``, etc here are application-defined. HCL provides the general block structure syntax, and can validate and decode configuration based on the application's provided schema. HCL is a structured configuration language rather than a data structure serialization language. This means that unlike languages such as JSON, YAML, or TOML, HCL is always decoded using an application-defined schema. However, HCL does have a JSON-based alternative syntax, which allows the same structure above to be generated using a standard JSON serializer when users wish to generate configuration programmatically rather than hand-write it: .. code-block:: json { "io_mode": "async", "service": { "http": { "web_proxy": { "listen_addr": "127.0.0.1:8080", "process": { "main": { "command": ["/usr/local/bin/awesome-app", "server"] }, "mgmt": { "command": ["/usr/local/bin/awesome-app", "mgmt"] }, } } } } } The calling application can choose which syntaxes to support. JSON syntax may not be important or desirable for certain applications, but it is available for applications that need it. The schema provided by the calling application allows JSON input to be properly decoded even though JSON syntax is ambiguous in various ways, such as whether a JSON object is representing a nested block or an object expression. The collection of arguments and blocks at a particular nesting level is called a *body*. A file always has a root body containing the top-level elements, and each block also has its own body representing the elements within it. The term "attribute" can also be used to refer to what we've called an "argument" so far. The term "attribute" is also used for the fields of an object value in argument expressions, and so "argument" is used to refer specifically to the type of attribute that appears directly within a body. The above examples show the general "texture" of HCL-based configuration. The full details of the syntax are covered in the language specifications. .. todo:: Once the language specification documents have settled into a final location, link them from above. Argument Expressions -------------------- The value of an argument can be a literal value shown above, or it may be an expression to allow arithmetic, deriving one value from another, etc. .. code-block:: hcl listen_addr = env.LISTEN_ADDR Built-in arithmetic and comparison operators are automatically available in all HCL-based configuration languages. A calling application may optionally provide variables that users can reference, like ``env`` in the above example, and custom functions to transform values in application-specific ways. Full details of the expression syntax are in the HCL native syntax specification. Since JSON does not have an expression syntax, JSON-based configuration files use the native syntax expression language embedded inside JSON strings. .. todo:: Once the language specification documents have settled into a final location, link to the native syntax specification from above. hcl-2.14.1/guide/language_design.rst000066400000000000000000000365051431334125700173240ustar00rootroot00000000000000Configuration Language Design ============================= In this section we will cover some conventions for HCL-based configuration languages that can help make them feel consistent with other HCL-based languages, and make the best use of HCL's building blocks. HCL's native and JSON syntaxes both define a mapping from input bytes to a higher-level information model. In designing a configuration language based on HCL, your building blocks are the components in that information model: blocks, arguments, and expressions. Each calling application of HCL, then, effectively defines its own language. Just as Atom and RSS are higher-level languages built on XML, HashiCorp Terraform has a higher-level language built on HCL, while HashiCorp Nomad has its own distinct language that is *also* built on HCL. From an end-user perspective, these are distinct languages but have a common underlying texture. Users of both are therefore likely to bring some expectations from one to the other, and so this section is an attempt to codify some of these shared expectations to reduce user surprise. These are subjective guidelines however, and so applications may choose to ignore them entirely or ignore them in certain specialized cases. An application providing a configuration language for a pre-existing system, for example, may choose to eschew the identifier naming conventions in this section in order to exactly match the existing names in that underlying system. Language Keywords and Identifiers --------------------------------- Much of the work in defining an HCL-based language is in selecting good names for arguments, block types, variables, and functions. The standard for naming in HCL is to use all-lowercase identifiers with underscores separating words, like ``service`` or ``io_mode``. HCL identifiers do allow uppercase letters and dashes, but this primarily for natural interfacing with external systems that may have other identifier conventions, and so these should generally be avoided for the identifiers native to your own language. The distinction between "keywords" and other identifiers is really just a convention. In your own language documentation, you may use the word "keyword" to refer to names that are presented as an intrinsic part of your language, such as important top-level block type names. Block type names are usually singular, since each block defines a single object. Use a plural block name only if the block is serving only as a namespacing container for a number of other objects. A block with a plural type name will generally contain only nested blocks, and no arguments of its own. Argument names are also singular unless they expect a collection value, in which case they should be plural. For example, ``name = "foo"`` but ``subnet_ids = ["abc", "123"]``. Function names will generally *not* use underscores and will instead just run words together, as is common in the C standard library. This is a result of the fact that several of the standard library functions offered in ``cty`` (covered in a later section) have names that follow C library function names like ``substr``. This is not a strong rule, and applications that use longer names may choose to use underscores for them to improve readability. Blocks vs. Object Values ------------------------ HCL blocks and argument values of object type have quite a similar appearance in the native syntax, and are identical in JSON syntax: .. code-block:: hcl block { foo = bar } # argument with object constructor expression argument = { foo = bar } In spite of this superficial similarity, there are some important differences between these two forms. The most significant difference is that a child block can contain nested blocks of its own, while an object constructor expression can define only attributes of the object it is creating. The user-facing model for blocks is that they generally form the more "rigid" structure of the language itself, while argument values can be more free-form. An application will generally define in its schema and documentation all of the arguments that are valid for a particular block type, while arguments accepting object constructors are more appropriate for situations where the arguments themselves are freely selected by the user, such as when the expression will be converted by the application to a map type. As a less contrived example, consider the ``resource`` block type in Terraform and its use with a particular resource type ``aws_instance``: .. code-block:: hcl resource "aws_instance" "example" { ami = "ami-abc123" instance_type = "t2.micro" tags = { Name = "example instance" } ebs_block_device { device_name = "hda1" volume_size = 8 volume_type = "standard" } } The top-level block type ``resource`` is fundamental to Terraform itself and so an obvious candidate for block syntax: it maps directly onto an object in Terraform's own domain model. Within this block we see a mixture of arguments and nested blocks, all defined as part of the schema of the ``aws_instance`` resource type. The ``tags`` map here is specified as an argument because its keys are free-form, chosen by the user and mapped directly onto a map in the underlying system. ``ebs_block_device`` is specified as a nested block, because it is a separate domain object within the remote system and has a rigid schema of its own. As a special case, block syntax may sometimes be used with free-form keys if those keys each serve as a separate declaration of some first-class object in the language. For example, Terraform has a top-level block type ``locals`` which behaves in this way: .. code-block:: hcl locals { instance_type = "t2.micro" instance_id = aws_instance.example.id } Although the argument names in this block are arbitrarily selected by the user, each one defines a distinct top-level object. In other words, this approach is used to create a more ergonomic syntax for defining these simple single-expression objects, as a pragmatic alternative to more verbose and redundant declarations using blocks: .. code-block:: hcl local "instance_type" { value = "t2.micro" } local "instance_id" { value = aws_instance.example.id } The distinction between domain objects, language constructs and user data will always be subjective, so the final decision is up to you as the language designer. Standard Functions ------------------ HCL itself does not define a common set of functions available in all HCL-based languages; the built-in language operators give a baseline of functionality that is always available, but applications are free to define functions as they see fit. With that said, there's a number of generally-useful functions that don't belong to the domain of any one application: string manipulation, sequence manipulation, date formatting, JSON serialization and parsing, etc. Given the general need such functions serve, it's helpful if a similar set of functions is available with compatible behavior across multiple HCL-based languages, assuming the language is for an application where function calls make sense at all. The Go implementation of HCL is built on an underlying type and function system :go:pkg:`cty`, whose usage was introduced in :ref:`go-expression-funcs`. That library also has a package of "standard library" functions which we encourage applications to offer with consistent names and compatible behavior, either by using the standard implementations directly or offering compatible implementations under the same name. The "standard" functions that new configuration formats should consider offering are: * ``abs(number)`` - returns the absolute (positive) value of the given number. * ``coalesce(vals...)`` - returns the value of the first argument that isn't null. Useful only in formats where null values may appear. * ``compact(vals...)`` - returns a new tuple with the non-null values given as arguments, preserving order. * ``concat(seqs...)`` - builds a tuple value by concatenating together all of the given sequence (list or tuple) arguments. * ``format(fmt, args...)`` - performs simple string formatting similar to the C library function ``printf``. * ``hasindex(coll, idx)`` - returns true if the given collection has the given index. ``coll`` may be of list, tuple, map, or object type. * ``int(number)`` - returns the integer component of the given number, rounding towards zero. * ``jsondecode(str)`` - interprets the given string as JSON format and return the corresponding decoded value. * ``jsonencode(val)`` - encodes the given value as a JSON string. * ``length(coll)`` - returns the length of the given collection. * ``lower(str)`` - converts the letters in the given string to lowercase, using Unicode case folding rules. * ``max(numbers...)`` - returns the highest of the given number values. * ``min(numbers...)`` - returns the lowest of the given number values. * ``sethas(set, val)`` - returns true only if the given set has the given value as an element. * ``setintersection(sets...)`` - returns the intersection of the given sets * ``setsubtract(set1, set2)`` - returns a set with the elements from ``set1`` that are not also in ``set2``. * ``setsymdiff(sets...)`` - returns the symmetric difference of the given sets. * ``setunion(sets...)`` - returns the union of the given sets. * ``strlen(str)`` - returns the length of the given string in Unicode grapheme clusters. * ``substr(str, offset, length)`` - returns a substring from the given string by splitting it between Unicode grapheme clusters. * ``timeadd(time, duration)`` - takes a timestamp in RFC3339 format and a possibly-negative duration given as a string like ``"1h"`` (for "one hour") and returns a new RFC3339 timestamp after adding the duration to the given timestamp. * ``upper(str)`` - converts the letters in the given string to uppercase, using Unicode case folding rules. Not all of these functions will make sense in all applications. For example, an application that doesn't use set types at all would have no reason to provide the set-manipulation functions here. Some languages will not provide functions at all, since they are primarily for assigning values to arguments and thus do not need nor want any custom computations of those values. Block Results as Expression Variables ------------------------------------- In some applications, top-level blocks serve also as declarations of variables (or of attributes of object variables) available during expression evaluation, as discussed in :ref:`go-interdep-blocks`. In this case, it's most intuitive for the variables map in the evaluation context to contain an value named after each valid top-level block type and for these values to be object-typed or map-typed and reflect the structure implied by block type labels. For example, an application may have a top-level ``service`` block type used like this: .. code-block:: hcl service "http" "web_proxy" { listen_addr = "127.0.0.1:8080" process "main" { command = ["/usr/local/bin/awesome-app", "server"] } process "mgmt" { command = ["/usr/local/bin/awesome-app", "mgmt"] } } If the result of decoding this block were available for use in expressions elsewhere in configuration, the above convention would call for it to be available to expressions as an object at ``service.http.web_proxy``. If it the contents of the block itself that are offered to evaluation -- or a superset object *derived* from the block contents -- then the block arguments can map directly to object attributes, but it is up to the application to decide which value type is most appropriate for each block type, since this depends on how multiple blocks of the same type relate to one another, or if multiple blocks of that type are even allowed. In the above example, an application would probably expose the ``listen_addr`` argument value as ``service.http.web_proxy.listen_addr``, and may choose to expose the ``process`` blocks as a map of objects using the labels as keys, which would allow an expression like ``service.http.web_proxy.service["main"].command``. If multiple blocks of a given type do not have a significant order relative to one another, as seems to be the case with these ``process`` blocks, representation as a map is often the most intuitive. If the ordering of the blocks *is* significant then a list may be more appropriate, allowing the use of HCL's "splat operators" for convenient access to child arguments. However, there is no one-size-fits-all solution here and language designers must instead consider the likely usage patterns of each value and select the value representation that best accommodates those patterns. Some applications may choose to offer variables with slightly different names than the top-level blocks in order to allow for more concise references, such as abbreviating ``service`` to ``svc`` in the above examples. This should be done with care since it may make the relationship between the two less obvious, but this may be a good tradeoff for names that are accessed frequently that might otherwise hurt the readability of expressions they are embedded in. Familiarity permits brevity. Many applications will not make blocks results available for use in other expressions at all, in which case they are free to select whichever variable names make sense for what is being exposed. For example, a format may make environment variable values available for use in expressions, and may do so either as top-level variables (if no other variables are needed) or as an object named ``env``, which can be used as in ``env.HOME``. Text Editor and IDE Integrations -------------------------------- Since HCL defines only low-level syntax, a text editor or IDE integration for HCL itself can only really provide basic syntax highlighting. For non-trivial HCL-based languages, a more specialized editor integration may be warranted. For example, users writing configuration for HashiCorp Terraform must recall the argument names for numerous different provider plugins, and so auto-completion and documentation hovertips can be a great help, and configurations are commonly spread over multiple files making "Go to Definition" functionality useful. None of this functionality can be implemented generically for all HCL-based languages since it relies on knowledge of the structure of Terraform's own language. Writing such text editor integrations is out of the scope of this guide. The Go implementation of HCL does have some building blocks to help with this, but it will always be an application-specific effort. However, in order to *enable* such integrations, it is best to establish a conventional file extension *other than* `.hcl` for each non-trivial HCL-based language, thus allowing text editors to recognize it and enable the suitable integration. For example, Terraform requires ``.tf`` and ``.tf.json`` filenames for its main configuration, and the ``hcldec`` utility in the HCL repository accepts spec files that should conventionally be named with an ``.hcldec`` extension. For simple languages that are unlikely to benefit from specific editor integrations, using the ``.hcl`` extension is fine and may cause an editor to enable basic syntax highlighting, absent any other deeper features. An editor extension for a specific HCL-based language should *not* match generically the ``.hcl`` extension, since this can cause confusing results for users attempting to write configuration files targeting other applications. hcl-2.14.1/guide/make.bat000066400000000000000000000014471431334125700150600ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build set SPHINXPROJ=HCL if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd hcl-2.14.1/guide/requirements.txt000066400000000000000000000000611431334125700167260ustar00rootroot00000000000000sphinx sphinxcontrib-golangdomain sphinx-autoapi hcl-2.14.1/hcldec/000077500000000000000000000000001431334125700135725ustar00rootroot00000000000000hcl-2.14.1/hcldec/block_labels.go000066400000000000000000000005371431334125700165420ustar00rootroot00000000000000package hcldec import ( "github.com/hashicorp/hcl/v2" ) type blockLabel struct { Value string Range hcl.Range } func labelsForBlock(block *hcl.Block) []blockLabel { ret := make([]blockLabel, len(block.Labels)) for i := range block.Labels { ret[i] = blockLabel{ Value: block.Labels[i], Range: block.LabelRanges[i], } } return ret } hcl-2.14.1/hcldec/decode.go000066400000000000000000000015551431334125700153520ustar00rootroot00000000000000package hcldec import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func decode(body hcl.Body, blockLabels []blockLabel, ctx *hcl.EvalContext, spec Spec, partial bool) (cty.Value, hcl.Body, hcl.Diagnostics) { schema := ImpliedSchema(spec) var content *hcl.BodyContent var diags hcl.Diagnostics var leftovers hcl.Body if partial { content, leftovers, diags = body.PartialContent(schema) } else { content, diags = body.Content(schema) } val, valDiags := spec.decode(content, blockLabels, ctx) diags = append(diags, valDiags...) return val, leftovers, diags } func impliedType(spec Spec) cty.Type { return spec.impliedType() } func sourceRange(body hcl.Body, blockLabels []blockLabel, spec Spec) hcl.Range { schema := ImpliedSchema(spec) content, _, _ := body.PartialContent(schema) return spec.sourceRange(content, blockLabels) } hcl-2.14.1/hcldec/doc.go000066400000000000000000000011501431334125700146630ustar00rootroot00000000000000// Package hcldec provides a higher-level API for unpacking the content of // HCL bodies, implemented in terms of the low-level "Content" API exposed // by the bodies themselves. // // It allows decoding an entire nested configuration in a single operation // by providing a description of the intended structure. // // For some applications it may be more convenient to use the "gohcl" // package, which has a similar purpose but decodes directly into native // Go data types. hcldec instead targets the cty type system, and thus allows // a cty-driven application to remain within that type system. package hcldec hcl-2.14.1/hcldec/gob.go000066400000000000000000000012511431334125700146670ustar00rootroot00000000000000package hcldec import ( "encoding/gob" ) func init() { // Every Spec implementation should be registered with gob, so that // specs can be sent over gob channels, such as using // github.com/hashicorp/go-plugin with plugins that need to describe // what shape of configuration they are expecting. gob.Register(ObjectSpec(nil)) gob.Register(TupleSpec(nil)) gob.Register((*AttrSpec)(nil)) gob.Register((*LiteralSpec)(nil)) gob.Register((*ExprSpec)(nil)) gob.Register((*BlockSpec)(nil)) gob.Register((*BlockListSpec)(nil)) gob.Register((*BlockSetSpec)(nil)) gob.Register((*BlockMapSpec)(nil)) gob.Register((*BlockLabelSpec)(nil)) gob.Register((*DefaultSpec)(nil)) } hcl-2.14.1/hcldec/public.go000066400000000000000000000060211431334125700153760ustar00rootroot00000000000000package hcldec import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // Decode interprets the given body using the given specification and returns // the resulting value. If the given body is not valid per the spec, error // diagnostics are returned and the returned value is likely to be incomplete. // // The ctx argument may be nil, in which case any references to variables or // functions will produce error diagnostics. func Decode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { val, _, diags := decode(body, nil, ctx, spec, false) return val, diags } // PartialDecode is like Decode except that it permits "leftover" items in // the top-level body, which are returned as a new body to allow for // further processing. // // Any descendent block bodies are _not_ decoded partially and thus must // be fully described by the given specification. func PartialDecode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Body, hcl.Diagnostics) { return decode(body, nil, ctx, spec, true) } // ImpliedType returns the value type that should result from decoding the // given spec. func ImpliedType(spec Spec) cty.Type { return impliedType(spec) } // SourceRange interprets the given body using the given specification and // then returns the source range of the value that would be used to // fulfill the spec. // // This can be used if application-level validation detects value errors, to // obtain a reasonable SourceRange to use for generated diagnostics. It works // best when applied to specific body items (e.g. using AttrSpec, BlockSpec, ...) // as opposed to entire bodies using ObjectSpec, TupleSpec. The result will // be less useful the broader the specification, so e.g. a spec that returns // the entirety of all of the blocks of a given type is likely to be // _particularly_ arbitrary and useless. // // If the given body is not valid per the given spec, the result is best-effort // and may not actually be something ideal. It's expected that an application // will already have used Decode or PartialDecode earlier and thus had an // opportunity to detect and report spec violations. func SourceRange(body hcl.Body, spec Spec) hcl.Range { return sourceRange(body, nil, spec) } // ChildBlockTypes returns a map of all of the child block types declared // by the given spec, with block type names as keys and the associated // nested body specs as values. func ChildBlockTypes(spec Spec) map[string]Spec { ret := map[string]Spec{} // visitSameBodyChildren walks through the spec structure, calling // the given callback for each descendent spec encountered. We are // interested in the specs that reference attributes and blocks. var visit visitFunc visit = func(s Spec) { if bs, ok := s.(blockSpec); ok { for _, blockS := range bs.blockHeaderSchemata() { nested := bs.nestedSpec() if nested != nil { // nil can be returned to dynamically opt out of this interface ret[blockS.Type] = nested } } } s.visitSameBodyChildren(visit) } visit(spec) return ret } hcl-2.14.1/hcldec/public_test.go000066400000000000000000000425041431334125700164430ustar00rootroot00000000000000package hcldec import ( "fmt" "reflect" "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) func TestDecode(t *testing.T) { tests := []struct { config string spec Spec ctx *hcl.EvalContext want cty.Value diagCount int }{ { ``, &ObjectSpec{}, nil, cty.EmptyObjectVal, 0, }, { "a = 1\n", &ObjectSpec{}, nil, cty.EmptyObjectVal, 1, // attribute named "a" is not expected here }, { "a = 1\n", &ObjectSpec{ "a": &AttrSpec{ Name: "a", Type: cty.Number, }, }, nil, cty.ObjectVal(map[string]cty.Value{ "a": cty.NumberIntVal(1), }), 0, }, { "a = 1\n", &AttrSpec{ Name: "a", Type: cty.Number, }, nil, cty.NumberIntVal(1), 0, }, { "a = 1\n", &DefaultSpec{ Primary: &AttrSpec{ Name: "a", Type: cty.Number, }, Default: &LiteralSpec{ Value: cty.NumberIntVal(10), }, }, nil, cty.NumberIntVal(1), 0, }, { "", &DefaultSpec{ Primary: &AttrSpec{ Name: "a", Type: cty.Number, }, Default: &LiteralSpec{ Value: cty.NumberIntVal(10), }, }, nil, cty.NumberIntVal(10), 0, }, { "a = 1\n", ObjectSpec{ "foo": &DefaultSpec{ Primary: &AttrSpec{ Name: "a", Type: cty.Number, }, Default: &LiteralSpec{ Value: cty.NumberIntVal(10), }, }, }, nil, cty.ObjectVal(map[string]cty.Value{"foo": cty.NumberIntVal(1)}), 0, }, { "a = \"1\"\n", &AttrSpec{ Name: "a", Type: cty.Number, }, nil, cty.NumberIntVal(1), 0, }, { "a = true\n", &AttrSpec{ Name: "a", Type: cty.Number, }, nil, cty.UnknownVal(cty.Number), 1, // incorrect type - number required. }, { ``, &AttrSpec{ Name: "a", Type: cty.Number, Required: true, }, nil, cty.NullVal(cty.Number), 1, // attribute "a" is required }, { ` b { } `, &BlockSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.EmptyObjectVal, 0, }, { ` b "baz" { } `, &BlockSpec{ TypeName: "b", Nested: &BlockLabelSpec{ Index: 0, Name: "name", }, }, nil, cty.StringVal("baz"), 0, }, { ` b "baz" {} b "foo" {} `, &BlockSpec{ TypeName: "b", Nested: &BlockLabelSpec{ Index: 0, Name: "name", }, }, nil, cty.StringVal("baz"), 1, // duplicate "b" block }, { ` b { } `, &BlockSpec{ TypeName: "b", Nested: &BlockLabelSpec{ Index: 0, Name: "name", }, }, nil, cty.NullVal(cty.String), 1, // missing name label }, { ``, &BlockSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.NullVal(cty.EmptyObject), 0, }, { "a {}\n", &BlockSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.NullVal(cty.EmptyObject), 1, // blocks of type "a" are not supported }, { ``, &BlockSpec{ TypeName: "b", Nested: ObjectSpec{}, Required: true, }, nil, cty.NullVal(cty.EmptyObject), 1, // a block of type "b" is required }, { ` b {} b {} `, &BlockSpec{ TypeName: "b", Nested: ObjectSpec{}, Required: true, }, nil, cty.EmptyObjectVal, 1, // only one "b" block is allowed }, { ` b { } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, nil, cty.MapValEmpty(cty.String), 0, }, { ` b { hello = "world" } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, nil, cty.MapVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { ` b { hello = true } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, nil, cty.MapVal(map[string]cty.Value{ "hello": cty.StringVal("true"), }), 0, }, { ` b { hello = true goodbye = 5 } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, nil, cty.MapVal(map[string]cty.Value{ "hello": cty.StringVal("true"), "goodbye": cty.StringVal("5"), }), 0, }, { ``, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, nil, cty.NullVal(cty.Map(cty.String)), 0, }, { ``, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, Required: true, }, nil, cty.NullVal(cty.Map(cty.String)), 1, // missing b block }, { ` b { } b { } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, nil, cty.MapValEmpty(cty.String), 1, // duplicate b block }, { ` b { } b { } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, Required: true, }, nil, cty.MapValEmpty(cty.String), 1, // duplicate b block }, { ` b {} b {} `, &BlockListSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.ListVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}), 0, }, { ``, &BlockListSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.ListValEmpty(cty.EmptyObject), 0, }, { ` b "foo" {} b "bar" {} `, &BlockListSpec{ TypeName: "b", Nested: &BlockLabelSpec{ Name: "name", Index: 0, }, }, nil, cty.ListVal([]cty.Value{cty.StringVal("foo"), cty.StringVal("bar")}), 0, }, { ` b {} b {} b {} `, &BlockListSpec{ TypeName: "b", Nested: ObjectSpec{}, MaxItems: 2, }, nil, cty.ListVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal, cty.EmptyObjectVal}), 1, // too many b blocks }, { ` b {} b {} `, &BlockListSpec{ TypeName: "b", Nested: ObjectSpec{}, MinItems: 10, }, nil, cty.ListVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}), 1, // insufficient b blocks }, { ` b { a = true } b { a = 1 } `, &BlockListSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", Type: cty.DynamicPseudoType, }, }, nil, cty.DynamicVal, 1, // Unconsistent argument types in b blocks }, { ` b { a = true } b { a = "not a bool" } `, &BlockListSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", Type: cty.DynamicPseudoType, }, }, nil, cty.ListVal([]cty.Value{ cty.StringVal("true"), // type unification generalizes all the values to strings cty.StringVal("not a bool"), }), 0, }, { ` b {} b {} `, &BlockSetSpec{ TypeName: "b", Nested: ObjectSpec{}, MaxItems: 2, }, nil, cty.SetVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}), 0, }, { ` b "foo" "bar" {} b "bar" "baz" {} `, &BlockSetSpec{ TypeName: "b", Nested: TupleSpec{ &BlockLabelSpec{ Name: "name", Index: 1, }, &BlockLabelSpec{ Name: "type", Index: 0, }, }, }, nil, cty.SetVal([]cty.Value{ cty.TupleVal([]cty.Value{cty.StringVal("bar"), cty.StringVal("foo")}), cty.TupleVal([]cty.Value{cty.StringVal("baz"), cty.StringVal("bar")}), }), 0, }, { ` b { a = true } b { a = 1 } `, &BlockSetSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", Type: cty.DynamicPseudoType, }, }, nil, cty.DynamicVal, 1, // Unconsistent argument types in b blocks }, { ` b { a = true } b { a = "not a bool" } `, &BlockSetSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", Type: cty.DynamicPseudoType, }, }, nil, cty.SetVal([]cty.Value{ cty.StringVal("true"), // type unification generalizes all the values to strings cty.StringVal("not a bool"), }), 0, }, { ` b "foo" {} b "bar" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: ObjectSpec{}, }, nil, cty.MapVal(map[string]cty.Value{"foo": cty.EmptyObjectVal, "bar": cty.EmptyObjectVal}), 0, }, { ` b "foo" "bar" {} b "bar" "baz" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.MapVal(map[string]cty.Value{ "foo": cty.MapVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, }), "bar": cty.MapVal(map[string]cty.Value{ "baz": cty.EmptyObjectVal, }), }), 0, }, { ` b "foo" "bar" {} b "bar" "bar" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.MapVal(map[string]cty.Value{ "foo": cty.MapVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, }), "bar": cty.MapVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, }), }), 0, }, { ` b "foo" "bar" {} b "foo" "baz" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.MapVal(map[string]cty.Value{ "foo": cty.MapVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, "baz": cty.EmptyObjectVal, }), }), 0, }, { ` b "foo" "bar" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: ObjectSpec{}, }, nil, cty.MapValEmpty(cty.EmptyObject), 1, // too many labels }, { ` b "bar" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.MapValEmpty(cty.EmptyObject), 1, // not enough labels }, { ` b "foo" {} b "foo" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: ObjectSpec{}, }, nil, cty.MapVal(map[string]cty.Value{"foo": cty.EmptyObjectVal}), 1, // duplicate b block }, { ` b "foo" "bar" {} b "foo" "bar" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.MapVal(map[string]cty.Value{"foo": cty.MapVal(map[string]cty.Value{"bar": cty.EmptyObjectVal})}), 1, // duplicate b block }, { ` b "foo" "bar" {} b "bar" "baz" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"type"}, Nested: &BlockLabelSpec{ Name: "name", Index: 0, }, }, nil, cty.MapVal(map[string]cty.Value{ "foo": cty.StringVal("bar"), "bar": cty.StringVal("baz"), }), 0, }, { ` b "foo" {} `, &BlockMapSpec{ TypeName: "b", LabelNames: []string{"type"}, Nested: &BlockLabelSpec{ Name: "name", Index: 0, }, }, nil, cty.MapValEmpty(cty.String), 1, // missing name }, { ` b {} b {} `, &BlockTupleSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.TupleVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}), 0, }, { ``, &BlockTupleSpec{ TypeName: "b", Nested: ObjectSpec{}, }, nil, cty.EmptyTupleVal, 0, }, { ` b "foo" {} b "bar" {} `, &BlockTupleSpec{ TypeName: "b", Nested: &BlockLabelSpec{ Name: "name", Index: 0, }, }, nil, cty.TupleVal([]cty.Value{cty.StringVal("foo"), cty.StringVal("bar")}), 0, }, { ` b {} b {} b {} `, &BlockTupleSpec{ TypeName: "b", Nested: ObjectSpec{}, MaxItems: 2, }, nil, cty.TupleVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal, cty.EmptyObjectVal}), 1, // too many b blocks }, { ` b {} b {} `, &BlockTupleSpec{ TypeName: "b", Nested: ObjectSpec{}, MinItems: 10, }, nil, cty.TupleVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}), 1, // insufficient b blocks }, { ` b { a = true } b { a = 1 } `, &BlockTupleSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", Type: cty.DynamicPseudoType, }, }, nil, cty.TupleVal([]cty.Value{ cty.True, cty.NumberIntVal(1), }), 0, }, { ` b { a = true } b { a = "not a bool" } `, &BlockTupleSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", Type: cty.DynamicPseudoType, }, }, nil, cty.TupleVal([]cty.Value{ cty.True, cty.StringVal("not a bool"), }), 0, }, { ` b "foo" {} b "bar" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: ObjectSpec{}, }, nil, cty.ObjectVal(map[string]cty.Value{"foo": cty.EmptyObjectVal, "bar": cty.EmptyObjectVal}), 0, }, { ` b "foo" "bar" {} b "bar" "baz" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.ObjectVal(map[string]cty.Value{ "foo": cty.ObjectVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, }), "bar": cty.ObjectVal(map[string]cty.Value{ "baz": cty.EmptyObjectVal, }), }), 0, }, { ` b "foo" "bar" {} b "bar" "bar" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.ObjectVal(map[string]cty.Value{ "foo": cty.ObjectVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, }), "bar": cty.ObjectVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, }), }), 0, }, { ` b "foo" "bar" {} b "foo" "baz" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.ObjectVal(map[string]cty.Value{ "foo": cty.ObjectVal(map[string]cty.Value{ "bar": cty.EmptyObjectVal, "baz": cty.EmptyObjectVal, }), }), 0, }, { ` b "foo" "bar" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: ObjectSpec{}, }, nil, cty.EmptyObjectVal, 1, // too many labels }, { ` b "bar" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.EmptyObjectVal, 1, // not enough labels }, { ` b "foo" {} b "foo" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key"}, Nested: ObjectSpec{}, }, nil, cty.ObjectVal(map[string]cty.Value{"foo": cty.EmptyObjectVal}), 1, // duplicate b block }, { ` b "foo" "bar" {} b "foo" "bar" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"key1", "key2"}, Nested: ObjectSpec{}, }, nil, cty.ObjectVal(map[string]cty.Value{"foo": cty.ObjectVal(map[string]cty.Value{"bar": cty.EmptyObjectVal})}), 1, // duplicate b block }, { ` b "foo" "bar" {} b "bar" "baz" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"type"}, Nested: &BlockLabelSpec{ Name: "name", Index: 0, }, }, nil, cty.ObjectVal(map[string]cty.Value{ "foo": cty.StringVal("bar"), "bar": cty.StringVal("baz"), }), 0, }, { ` b "foo" {} `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"type"}, Nested: &BlockLabelSpec{ Name: "name", Index: 0, }, }, nil, cty.EmptyObjectVal, 1, // missing name }, { ` b "foo" { arg = true } b "bar" { arg = 1 } `, &BlockObjectSpec{ TypeName: "b", LabelNames: []string{"type"}, Nested: &AttrSpec{ Name: "arg", Type: cty.DynamicPseudoType, }, }, nil, cty.ObjectVal(map[string]cty.Value{ "foo": cty.True, "bar": cty.NumberIntVal(1), }), 0, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d-%s", i, test.config), func(t *testing.T) { file, parseDiags := hclsyntax.ParseConfig([]byte(test.config), "", hcl.Pos{Line: 1, Column: 1, Byte: 0}) body := file.Body got, valDiags := Decode(body, test.spec, test.ctx) var diags hcl.Diagnostics diags = append(diags, parseDiags...) diags = append(diags, valDiags...) if len(diags) != test.diagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } if !got.RawEquals(test.want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } func TestSourceRange(t *testing.T) { tests := []struct { config string spec Spec want hcl.Range }{ { "a = 1\n", &AttrSpec{ Name: "a", }, hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, { ` b { a = 1 } `, &BlockSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", }, }, hcl.Range{ Start: hcl.Pos{Line: 3, Column: 7, Byte: 11}, End: hcl.Pos{Line: 3, Column: 8, Byte: 12}, }, }, { ` b { c { a = 1 } } `, &BlockSpec{ TypeName: "b", Nested: &BlockSpec{ TypeName: "c", Nested: &AttrSpec{ Name: "a", }, }, }, hcl.Range{ Start: hcl.Pos{Line: 4, Column: 9, Byte: 19}, End: hcl.Pos{Line: 4, Column: 10, Byte: 20}, }, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d-%s", i, test.config), func(t *testing.T) { file, diags := hclsyntax.ParseConfig([]byte(test.config), "", hcl.Pos{Line: 1, Column: 1, Byte: 0}) if len(diags) != 0 { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), 0) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } body := file.Body got := SourceRange(body, test.spec) if !reflect.DeepEqual(got, test.want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } hcl-2.14.1/hcldec/schema.go000066400000000000000000000016301431334125700153610ustar00rootroot00000000000000package hcldec import ( "github.com/hashicorp/hcl/v2" ) // ImpliedSchema returns the *hcl.BodySchema implied by the given specification. // This is the schema that the Decode function will use internally to // access the content of a given body. func ImpliedSchema(spec Spec) *hcl.BodySchema { var attrs []hcl.AttributeSchema var blocks []hcl.BlockHeaderSchema // visitSameBodyChildren walks through the spec structure, calling // the given callback for each descendent spec encountered. We are // interested in the specs that reference attributes and blocks. var visit visitFunc visit = func(s Spec) { if as, ok := s.(attrSpec); ok { attrs = append(attrs, as.attrSchemata()...) } if bs, ok := s.(blockSpec); ok { blocks = append(blocks, bs.blockHeaderSchemata()...) } s.visitSameBodyChildren(visit) } visit(spec) return &hcl.BodySchema{ Attributes: attrs, Blocks: blocks, } } hcl-2.14.1/hcldec/spec.go000066400000000000000000001376361431334125700150730ustar00rootroot00000000000000package hcldec import ( "bytes" "fmt" "sort" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/function" ) // A Spec is a description of how to decode a hcl.Body to a cty.Value. // // The various other types in this package whose names end in "Spec" are // the spec implementations. The most common top-level spec is ObjectSpec, // which decodes body content into a cty.Value of an object type. type Spec interface { // Perform the decode operation on the given body, in the context of // the given block (which might be null), using the given eval context. // // "block" is provided only by the nested calls performed by the spec // types that work on block bodies. decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) // Return the cty.Type that should be returned when decoding a body with // this spec. impliedType() cty.Type // Call the given callback once for each of the nested specs that would // get decoded with the same body and block as the receiver. This should // not descend into the nested specs used when decoding blocks. visitSameBodyChildren(cb visitFunc) // Determine the source range of the value that would be returned for the // spec in the given content, in the context of the given block // (which might be null). If the corresponding item is missing, return // a place where it might be inserted. sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range } type visitFunc func(spec Spec) // An ObjectSpec is a Spec that produces a cty.Value of an object type whose // attributes correspond to the keys of the spec map. type ObjectSpec map[string]Spec // attrSpec is implemented by specs that require attributes from the body. type attrSpec interface { attrSchemata() []hcl.AttributeSchema } // blockSpec is implemented by specs that require blocks from the body. type blockSpec interface { blockHeaderSchemata() []hcl.BlockHeaderSchema nestedSpec() Spec } // specNeedingVariables is implemented by specs that can use variables // from the EvalContext, to declare which variables they need. type specNeedingVariables interface { variablesNeeded(content *hcl.BodyContent) []hcl.Traversal } // UnknownBody can be optionally implemented by an hcl.Body instance which may // be entirely unknown. type UnknownBody interface { Unknown() bool } func (s ObjectSpec) visitSameBodyChildren(cb visitFunc) { for _, c := range s { cb(c) } } func (s ObjectSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { vals := make(map[string]cty.Value, len(s)) var diags hcl.Diagnostics for k, spec := range s { var kd hcl.Diagnostics vals[k], kd = spec.decode(content, blockLabels, ctx) diags = append(diags, kd...) } return cty.ObjectVal(vals), diags } func (s ObjectSpec) impliedType() cty.Type { if len(s) == 0 { return cty.EmptyObject } attrTypes := make(map[string]cty.Type) for k, childSpec := range s { attrTypes[k] = childSpec.impliedType() } return cty.Object(attrTypes) } func (s ObjectSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // This is not great, but the best we can do. In practice, it's rather // strange to ask for the source range of an entire top-level body, since // that's already readily available to the caller. return content.MissingItemRange } // A TupleSpec is a Spec that produces a cty.Value of a tuple type whose // elements correspond to the elements of the spec slice. type TupleSpec []Spec func (s TupleSpec) visitSameBodyChildren(cb visitFunc) { for _, c := range s { cb(c) } } func (s TupleSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { vals := make([]cty.Value, len(s)) var diags hcl.Diagnostics for i, spec := range s { var ed hcl.Diagnostics vals[i], ed = spec.decode(content, blockLabels, ctx) diags = append(diags, ed...) } return cty.TupleVal(vals), diags } func (s TupleSpec) impliedType() cty.Type { if len(s) == 0 { return cty.EmptyTuple } attrTypes := make([]cty.Type, len(s)) for i, childSpec := range s { attrTypes[i] = childSpec.impliedType() } return cty.Tuple(attrTypes) } func (s TupleSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // This is not great, but the best we can do. In practice, it's rather // strange to ask for the source range of an entire top-level body, since // that's already readily available to the caller. return content.MissingItemRange } // An AttrSpec is a Spec that evaluates a particular attribute expression in // the body and returns its resulting value converted to the requested type, // or produces a diagnostic if the type is incorrect. type AttrSpec struct { Name string Type cty.Type Required bool } func (s *AttrSpec) visitSameBodyChildren(cb visitFunc) { // leaf node } // specNeedingVariables implementation func (s *AttrSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { attr, exists := content.Attributes[s.Name] if !exists { return nil } return attr.Expr.Variables() } // attrSpec implementation func (s *AttrSpec) attrSchemata() []hcl.AttributeSchema { return []hcl.AttributeSchema{ { Name: s.Name, Required: s.Required, }, } } func (s *AttrSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { attr, exists := content.Attributes[s.Name] if !exists { return content.MissingItemRange } return attr.Expr.Range() } func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { attr, exists := content.Attributes[s.Name] if !exists { // We don't need to check required and emit a diagnostic here, because // that would already have happened when building "content". return cty.NullVal(s.Type), nil } if decodeFn := customdecode.CustomExpressionDecoderForType(s.Type); decodeFn != nil { v, diags := decodeFn(attr.Expr, ctx) if v == cty.NilVal { v = cty.UnknownVal(s.Type) } return v, diags } val, diags := attr.Expr.Value(ctx) convVal, err := convert.Convert(val, s.Type) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect attribute value type", Detail: fmt.Sprintf( "Inappropriate value for attribute %q: %s.", s.Name, err.Error(), ), Subject: attr.Expr.Range().Ptr(), Context: hcl.RangeBetween(attr.NameRange, attr.Expr.Range()).Ptr(), Expression: attr.Expr, EvalContext: ctx, }) // We'll return an unknown value of the _correct_ type so that the // incomplete result can still be used for some analysis use-cases. val = cty.UnknownVal(s.Type) } else { val = convVal } return val, diags } func (s *AttrSpec) impliedType() cty.Type { return s.Type } // A LiteralSpec is a Spec that produces the given literal value, ignoring // the given body. type LiteralSpec struct { Value cty.Value } func (s *LiteralSpec) visitSameBodyChildren(cb visitFunc) { // leaf node } func (s *LiteralSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return s.Value, nil } func (s *LiteralSpec) impliedType() cty.Type { return s.Value.Type() } func (s *LiteralSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // No sensible range to return for a literal, so the caller had better // ensure it doesn't cause any diagnostics. return hcl.Range{ Filename: "", } } // An ExprSpec is a Spec that evaluates the given expression, ignoring the // given body. type ExprSpec struct { Expr hcl.Expression } func (s *ExprSpec) visitSameBodyChildren(cb visitFunc) { // leaf node } // specNeedingVariables implementation func (s *ExprSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { return s.Expr.Variables() } func (s *ExprSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return s.Expr.Value(ctx) } func (s *ExprSpec) impliedType() cty.Type { // We can't know the type of our expression until we evaluate it return cty.DynamicPseudoType } func (s *ExprSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { return s.Expr.Range() } // A BlockSpec is a Spec that produces a cty.Value by decoding the contents // of a single nested block of a given type, using a nested spec. // // If the Required flag is not set, the nested block may be omitted, in which // case a null value is produced. If it _is_ set, an error diagnostic is // produced if there are no nested blocks of the given type. type BlockSpec struct { TypeName string Nested Spec Required bool } func (s *BlockSpec) visitSameBodyChildren(cb visitFunc) { // leaf node ("Nested" does not use the same body) } // blockSpec implementation func (s *BlockSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: findLabelSpecs(s.Nested), }, } } // blockSpec implementation func (s *BlockSpec) nestedSpec() Spec { return s.Nested } // specNeedingVariables implementation func (s *BlockSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return nil } return Variables(childBlock.Body, s.Nested) } func (s *BlockSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } if childBlock != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Duplicate %s block", s.TypeName), Detail: fmt.Sprintf( "Only one block of type %q is allowed. Previous definition was at %s.", s.TypeName, childBlock.DefRange.String(), ), Subject: &candidate.DefRange, }) break } childBlock = candidate } if childBlock == nil { if s.Required { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Missing %s block", s.TypeName), Detail: fmt.Sprintf( "A block of type %q is required here.", s.TypeName, ), Subject: &content.MissingItemRange, }) } return cty.NullVal(s.Nested.impliedType()), diags } if s.Nested == nil { panic("BlockSpec with no Nested Spec") } val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) diags = append(diags, childDiags...) return val, diags } func (s *BlockSpec) impliedType() cty.Type { return s.Nested.impliedType() } func (s *BlockSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return content.MissingItemRange } return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) } // A BlockListSpec is a Spec that produces a cty list of the results of // decoding all of the nested blocks of a given type, using a nested spec. type BlockListSpec struct { TypeName string Nested Spec MinItems int MaxItems int } func (s *BlockListSpec) visitSameBodyChildren(cb visitFunc) { // leaf node ("Nested" does not use the same body) } // blockSpec implementation func (s *BlockListSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: findLabelSpecs(s.Nested), }, } } // blockSpec implementation func (s *BlockListSpec) nestedSpec() Spec { return s.Nested } // specNeedingVariables implementation func (s *BlockListSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { var ret []hcl.Traversal for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } ret = append(ret, Variables(childBlock.Body, s.Nested)...) } return ret } func (s *BlockListSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics if s.Nested == nil { panic("BlockListSpec with no Nested Spec") } var elems []cty.Value var sourceRanges []hcl.Range for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) diags = append(diags, childDiags...) if u, ok := childBlock.Body.(UnknownBody); ok { if u.Unknown() { // If any block Body is unknown, then the entire block value // must be unknown return cty.UnknownVal(s.impliedType()), diags } } elems = append(elems, val) sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)) } if len(elems) < s.MinItems { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName), Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName), Subject: &content.MissingItemRange, }) } else if s.MaxItems > 0 && len(elems) > s.MaxItems { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Too many %s blocks", s.TypeName), Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName), Subject: &sourceRanges[s.MaxItems], }) } if len(elems) == 0 { return cty.ListValEmpty(s.Nested.impliedType()), diags } // Since our target is a list, all of the decoded elements must have the // same type or cty.ListVal will panic below. Different types can arise // if there is an attribute spec of type cty.DynamicPseudoType in the // nested spec; all given values must be convertable to a single type // in order for the result to be considered valid. etys := make([]cty.Type, len(elems)) for i, v := range elems { etys[i] = v.Type() } ety, convs := convert.UnifyUnsafe(etys) if ety == cty.NilType { // FIXME: This is a pretty terrible error message. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unconsistent argument types in %s blocks", s.TypeName), Detail: "Corresponding attributes in all blocks of this type must be the same.", Subject: &sourceRanges[0], }) return cty.DynamicVal, diags } for i, v := range elems { if convs[i] != nil { newV, err := convs[i](v) if err != nil { // FIXME: This is a pretty terrible error message. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unconsistent argument types in %s blocks", s.TypeName), Detail: fmt.Sprintf("Block with index %d has inconsistent argument types: %s.", i, err), Subject: &sourceRanges[i], }) // Bail early here so we won't panic below in cty.ListVal return cty.DynamicVal, diags } elems[i] = newV } } return cty.ListVal(elems), diags } func (s *BlockListSpec) impliedType() cty.Type { return cty.List(s.Nested.impliedType()) } func (s *BlockListSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We return the source range of the _first_ block of the given type, // since they are not guaranteed to form a contiguous range. var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return content.MissingItemRange } return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) } // A BlockTupleSpec is a Spec that produces a cty tuple of the results of // decoding all of the nested blocks of a given type, using a nested spec. // // This is similar to BlockListSpec, but it permits the nested blocks to have // different result types in situations where cty.DynamicPseudoType attributes // are present. type BlockTupleSpec struct { TypeName string Nested Spec MinItems int MaxItems int } func (s *BlockTupleSpec) visitSameBodyChildren(cb visitFunc) { // leaf node ("Nested" does not use the same body) } // blockSpec implementation func (s *BlockTupleSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: findLabelSpecs(s.Nested), }, } } // blockSpec implementation func (s *BlockTupleSpec) nestedSpec() Spec { return s.Nested } // specNeedingVariables implementation func (s *BlockTupleSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { var ret []hcl.Traversal for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } ret = append(ret, Variables(childBlock.Body, s.Nested)...) } return ret } func (s *BlockTupleSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics if s.Nested == nil { panic("BlockListSpec with no Nested Spec") } var elems []cty.Value var sourceRanges []hcl.Range for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) diags = append(diags, childDiags...) if u, ok := childBlock.Body.(UnknownBody); ok { if u.Unknown() { // If any block Body is unknown, then the entire block value // must be unknown return cty.UnknownVal(s.impliedType()), diags } } elems = append(elems, val) sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)) } if len(elems) < s.MinItems { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName), Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName), Subject: &content.MissingItemRange, }) } else if s.MaxItems > 0 && len(elems) > s.MaxItems { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Too many %s blocks", s.TypeName), Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName), Subject: &sourceRanges[s.MaxItems], }) } if len(elems) == 0 { return cty.EmptyTupleVal, diags } return cty.TupleVal(elems), diags } func (s *BlockTupleSpec) impliedType() cty.Type { // We can't predict our type, because we don't know how many blocks // there will be until we decode. return cty.DynamicPseudoType } func (s *BlockTupleSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We return the source range of the _first_ block of the given type, // since they are not guaranteed to form a contiguous range. var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return content.MissingItemRange } return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) } // A BlockSetSpec is a Spec that produces a cty set of the results of // decoding all of the nested blocks of a given type, using a nested spec. type BlockSetSpec struct { TypeName string Nested Spec MinItems int MaxItems int } func (s *BlockSetSpec) visitSameBodyChildren(cb visitFunc) { // leaf node ("Nested" does not use the same body) } // blockSpec implementation func (s *BlockSetSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: findLabelSpecs(s.Nested), }, } } // blockSpec implementation func (s *BlockSetSpec) nestedSpec() Spec { return s.Nested } // specNeedingVariables implementation func (s *BlockSetSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { var ret []hcl.Traversal for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } ret = append(ret, Variables(childBlock.Body, s.Nested)...) } return ret } func (s *BlockSetSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics if s.Nested == nil { panic("BlockSetSpec with no Nested Spec") } var elems []cty.Value var sourceRanges []hcl.Range for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) diags = append(diags, childDiags...) if u, ok := childBlock.Body.(UnknownBody); ok { if u.Unknown() { // If any block Body is unknown, then the entire block value // must be unknown return cty.UnknownVal(s.impliedType()), diags } } elems = append(elems, val) sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)) } if len(elems) < s.MinItems { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName), Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName), Subject: &content.MissingItemRange, }) } else if s.MaxItems > 0 && len(elems) > s.MaxItems { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Too many %s blocks", s.TypeName), Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName), Subject: &sourceRanges[s.MaxItems], }) } if len(elems) == 0 { return cty.SetValEmpty(s.Nested.impliedType()), diags } // Since our target is a set, all of the decoded elements must have the // same type or cty.SetVal will panic below. Different types can arise // if there is an attribute spec of type cty.DynamicPseudoType in the // nested spec; all given values must be convertable to a single type // in order for the result to be considered valid. etys := make([]cty.Type, len(elems)) for i, v := range elems { etys[i] = v.Type() } ety, convs := convert.UnifyUnsafe(etys) if ety == cty.NilType { // FIXME: This is a pretty terrible error message. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unconsistent argument types in %s blocks", s.TypeName), Detail: "Corresponding attributes in all blocks of this type must be the same.", Subject: &sourceRanges[0], }) return cty.DynamicVal, diags } for i, v := range elems { if convs[i] != nil { newV, err := convs[i](v) if err != nil { // FIXME: This is a pretty terrible error message. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unconsistent argument types in %s blocks", s.TypeName), Detail: fmt.Sprintf("Block with index %d has inconsistent argument types: %s.", i, err), Subject: &sourceRanges[i], }) // Bail early here so we won't panic below in cty.ListVal return cty.DynamicVal, diags } elems[i] = newV } } return cty.SetVal(elems), diags } func (s *BlockSetSpec) impliedType() cty.Type { return cty.Set(s.Nested.impliedType()) } func (s *BlockSetSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We return the source range of the _first_ block of the given type, // since they are not guaranteed to form a contiguous range. var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return content.MissingItemRange } return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) } // A BlockMapSpec is a Spec that produces a cty map of the results of // decoding all of the nested blocks of a given type, using a nested spec. // // One level of map structure is created for each of the given label names. // There must be at least one given label name. type BlockMapSpec struct { TypeName string LabelNames []string Nested Spec } func (s *BlockMapSpec) visitSameBodyChildren(cb visitFunc) { // leaf node ("Nested" does not use the same body) } // blockSpec implementation func (s *BlockMapSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: append(s.LabelNames, findLabelSpecs(s.Nested)...), }, } } // blockSpec implementation func (s *BlockMapSpec) nestedSpec() Spec { return s.Nested } // specNeedingVariables implementation func (s *BlockMapSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { var ret []hcl.Traversal for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } ret = append(ret, Variables(childBlock.Body, s.Nested)...) } return ret } func (s *BlockMapSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics if s.Nested == nil { panic("BlockMapSpec with no Nested Spec") } if ImpliedType(s).HasDynamicTypes() { panic("cty.DynamicPseudoType attributes may not be used inside a BlockMapSpec") } elems := map[string]interface{}{} for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } if u, ok := childBlock.Body.(UnknownBody); ok { if u.Unknown() { // If any block Body is unknown, then the entire block value // must be unknown return cty.UnknownVal(s.impliedType()), diags } } childLabels := labelsForBlock(childBlock) val, _, childDiags := decode(childBlock.Body, childLabels[len(s.LabelNames):], ctx, s.Nested, false) targetMap := elems for _, key := range childBlock.Labels[:len(s.LabelNames)-1] { if _, exists := targetMap[key]; !exists { targetMap[key] = make(map[string]interface{}) } targetMap = targetMap[key].(map[string]interface{}) } diags = append(diags, childDiags...) key := childBlock.Labels[len(s.LabelNames)-1] if _, exists := targetMap[key]; exists { labelsBuf := bytes.Buffer{} for _, label := range childBlock.Labels { fmt.Fprintf(&labelsBuf, " %q", label) } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Duplicate %s block", s.TypeName), Detail: fmt.Sprintf( "A block for %s%s was already defined. The %s labels must be unique.", s.TypeName, labelsBuf.String(), s.TypeName, ), Subject: &childBlock.DefRange, }) continue } targetMap[key] = val } if len(elems) == 0 { return cty.MapValEmpty(s.Nested.impliedType()), diags } var ctyMap func(map[string]interface{}, int) cty.Value ctyMap = func(raw map[string]interface{}, depth int) cty.Value { vals := make(map[string]cty.Value, len(raw)) if depth == 1 { for k, v := range raw { vals[k] = v.(cty.Value) } } else { for k, v := range raw { vals[k] = ctyMap(v.(map[string]interface{}), depth-1) } } return cty.MapVal(vals) } return ctyMap(elems, len(s.LabelNames)), diags } func (s *BlockMapSpec) impliedType() cty.Type { ret := s.Nested.impliedType() for _ = range s.LabelNames { ret = cty.Map(ret) } return ret } func (s *BlockMapSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We return the source range of the _first_ block of the given type, // since they are not guaranteed to form a contiguous range. var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return content.MissingItemRange } return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) } // A BlockObjectSpec is a Spec that produces a cty object of the results of // decoding all of the nested blocks of a given type, using a nested spec. // // One level of object structure is created for each of the given label names. // There must be at least one given label name. // // This is similar to BlockMapSpec, but it permits the nested blocks to have // different result types in situations where cty.DynamicPseudoType attributes // are present. type BlockObjectSpec struct { TypeName string LabelNames []string Nested Spec } func (s *BlockObjectSpec) visitSameBodyChildren(cb visitFunc) { // leaf node ("Nested" does not use the same body) } // blockSpec implementation func (s *BlockObjectSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: append(s.LabelNames, findLabelSpecs(s.Nested)...), }, } } // blockSpec implementation func (s *BlockObjectSpec) nestedSpec() Spec { return s.Nested } // specNeedingVariables implementation func (s *BlockObjectSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { var ret []hcl.Traversal for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } ret = append(ret, Variables(childBlock.Body, s.Nested)...) } return ret } func (s *BlockObjectSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics if s.Nested == nil { panic("BlockObjectSpec with no Nested Spec") } elems := map[string]interface{}{} for _, childBlock := range content.Blocks { if childBlock.Type != s.TypeName { continue } if u, ok := childBlock.Body.(UnknownBody); ok { if u.Unknown() { // If any block Body is unknown, then the entire block value // must be unknown return cty.UnknownVal(s.impliedType()), diags } } childLabels := labelsForBlock(childBlock) val, _, childDiags := decode(childBlock.Body, childLabels[len(s.LabelNames):], ctx, s.Nested, false) targetMap := elems for _, key := range childBlock.Labels[:len(s.LabelNames)-1] { if _, exists := targetMap[key]; !exists { targetMap[key] = make(map[string]interface{}) } targetMap = targetMap[key].(map[string]interface{}) } diags = append(diags, childDiags...) key := childBlock.Labels[len(s.LabelNames)-1] if _, exists := targetMap[key]; exists { labelsBuf := bytes.Buffer{} for _, label := range childBlock.Labels { fmt.Fprintf(&labelsBuf, " %q", label) } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Duplicate %s block", s.TypeName), Detail: fmt.Sprintf( "A block for %s%s was already defined. The %s labels must be unique.", s.TypeName, labelsBuf.String(), s.TypeName, ), Subject: &childBlock.DefRange, }) continue } targetMap[key] = val } if len(elems) == 0 { return cty.EmptyObjectVal, diags } var ctyObj func(map[string]interface{}, int) cty.Value ctyObj = func(raw map[string]interface{}, depth int) cty.Value { vals := make(map[string]cty.Value, len(raw)) if depth == 1 { for k, v := range raw { vals[k] = v.(cty.Value) } } else { for k, v := range raw { vals[k] = ctyObj(v.(map[string]interface{}), depth-1) } } return cty.ObjectVal(vals) } return ctyObj(elems, len(s.LabelNames)), diags } func (s *BlockObjectSpec) impliedType() cty.Type { // We can't predict our type, since we don't know how many blocks are // present and what labels they have until we decode. return cty.DynamicPseudoType } func (s *BlockObjectSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We return the source range of the _first_ block of the given type, // since they are not guaranteed to form a contiguous range. var childBlock *hcl.Block for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } childBlock = candidate break } if childBlock == nil { return content.MissingItemRange } return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) } // A BlockAttrsSpec is a Spec that interprets a single block as if it were // a map of some element type. That is, each attribute within the block // becomes a key in the resulting map and the attribute's value becomes the // element value, after conversion to the given element type. The resulting // value is a cty.Map of the given element type. // // This spec imposes a validation constraint that there be exactly one block // of the given type name and that this block may contain only attributes. The // block does not accept any labels. // // This is an alternative to an AttrSpec of a map type for situations where // block syntax is desired. Note that block syntax does not permit dynamic // keys, construction of the result via a "for" expression, etc. In most cases // an AttrSpec is preferred if the desired result is a map whose keys are // chosen by the user rather than by schema. type BlockAttrsSpec struct { TypeName string ElementType cty.Type Required bool } func (s *BlockAttrsSpec) visitSameBodyChildren(cb visitFunc) { // leaf node } // blockSpec implementation func (s *BlockAttrsSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { return []hcl.BlockHeaderSchema{ { Type: s.TypeName, LabelNames: nil, }, } } // blockSpec implementation func (s *BlockAttrsSpec) nestedSpec() Spec { // This is an odd case: we aren't actually going to apply a nested spec // in this case, since we're going to interpret the body directly as // attributes, but we need to return something non-nil so that the // decoder will recognize this as a block spec. We won't actually be // using this for anything at decode time. return noopSpec{} } // specNeedingVariables implementation func (s *BlockAttrsSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { block, _ := s.findBlock(content) if block == nil { return nil } var vars []hcl.Traversal attrs, diags := block.Body.JustAttributes() if diags.HasErrors() { return nil } for _, attr := range attrs { vars = append(vars, attr.Expr.Variables()...) } // We'll return the variables references in source order so that any // error messages that result are also in source order. sort.Slice(vars, func(i, j int) bool { return vars[i].SourceRange().Start.Byte < vars[j].SourceRange().Start.Byte }) return vars } func (s *BlockAttrsSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics block, other := s.findBlock(content) if block == nil { if s.Required { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Missing %s block", s.TypeName), Detail: fmt.Sprintf( "A block of type %q is required here.", s.TypeName, ), Subject: &content.MissingItemRange, }) } return cty.NullVal(cty.Map(s.ElementType)), diags } if other != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Duplicate %s block", s.TypeName), Detail: fmt.Sprintf( "Only one block of type %q is allowed. Previous definition was at %s.", s.TypeName, block.DefRange.String(), ), Subject: &other.DefRange, }) } attrs, attrDiags := block.Body.JustAttributes() diags = append(diags, attrDiags...) if len(attrs) == 0 { return cty.MapValEmpty(s.ElementType), diags } vals := make(map[string]cty.Value, len(attrs)) for name, attr := range attrs { if decodeFn := customdecode.CustomExpressionDecoderForType(s.ElementType); decodeFn != nil { attrVal, attrDiags := decodeFn(attr.Expr, ctx) diags = append(diags, attrDiags...) if attrVal == cty.NilVal { attrVal = cty.UnknownVal(s.ElementType) } vals[name] = attrVal continue } attrVal, attrDiags := attr.Expr.Value(ctx) diags = append(diags, attrDiags...) attrVal, err := convert.Convert(attrVal, s.ElementType) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid attribute value", Detail: fmt.Sprintf("Invalid value for attribute of %q block: %s.", s.TypeName, err), Subject: attr.Expr.Range().Ptr(), Context: hcl.RangeBetween(attr.NameRange, attr.Expr.Range()).Ptr(), Expression: attr.Expr, EvalContext: ctx, }) attrVal = cty.UnknownVal(s.ElementType) } vals[name] = attrVal } return cty.MapVal(vals), diags } func (s *BlockAttrsSpec) impliedType() cty.Type { return cty.Map(s.ElementType) } func (s *BlockAttrsSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { block, _ := s.findBlock(content) if block == nil { return content.MissingItemRange } return block.DefRange } func (s *BlockAttrsSpec) findBlock(content *hcl.BodyContent) (block *hcl.Block, other *hcl.Block) { for _, candidate := range content.Blocks { if candidate.Type != s.TypeName { continue } if block != nil { return block, candidate } block = candidate } return block, nil } // A BlockLabelSpec is a Spec that returns a cty.String representing the // label of the block its given body belongs to, if indeed its given body // belongs to a block. It is a programming error to use this in a non-block // context, so this spec will panic in that case. // // This spec only works in the nested spec within a BlockSpec, BlockListSpec, // BlockSetSpec or BlockMapSpec. // // The full set of label specs used against a particular block must have a // consecutive set of indices starting at zero. The maximum index found // defines how many labels the corresponding blocks must have in cty source. type BlockLabelSpec struct { Index int Name string } func (s *BlockLabelSpec) visitSameBodyChildren(cb visitFunc) { // leaf node } func (s *BlockLabelSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { if s.Index >= len(blockLabels) { panic("BlockListSpec used in non-block context") } return cty.StringVal(blockLabels[s.Index].Value), nil } func (s *BlockLabelSpec) impliedType() cty.Type { return cty.String // labels are always strings } func (s *BlockLabelSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { if s.Index >= len(blockLabels) { panic("BlockListSpec used in non-block context") } return blockLabels[s.Index].Range } func findLabelSpecs(spec Spec) []string { maxIdx := -1 var names map[int]string var visit visitFunc visit = func(s Spec) { if ls, ok := s.(*BlockLabelSpec); ok { if maxIdx < ls.Index { maxIdx = ls.Index } if names == nil { names = make(map[int]string) } names[ls.Index] = ls.Name } s.visitSameBodyChildren(visit) } visit(spec) if maxIdx < 0 { return nil // no labels at all } ret := make([]string, maxIdx+1) for i := range ret { name := names[i] if name == "" { // Should never happen if the spec is conformant, since we require // consecutive indices starting at zero. name = fmt.Sprintf("missing%02d", i) } ret[i] = name } return ret } // DefaultSpec is a spec that wraps two specs, evaluating the primary first // and then evaluating the default if the primary returns a null value. // // The two specifications must have the same implied result type for correct // operation. If not, the result is undefined. // // Any requirements imposed by the "Default" spec apply even if "Primary" does // not return null. For example, if the "Default" spec is for a required // attribute then that attribute is always required, regardless of the result // of the "Primary" spec. // // The "Default" spec must not describe a nested block, since otherwise the // result of ChildBlockTypes would not be decidable without evaluation. If // the default spec _does_ describe a nested block then the result is // undefined. type DefaultSpec struct { Primary Spec Default Spec } func (s *DefaultSpec) visitSameBodyChildren(cb visitFunc) { cb(s.Primary) cb(s.Default) } func (s *DefaultSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { val, diags := s.Primary.decode(content, blockLabels, ctx) if val.IsNull() { var moreDiags hcl.Diagnostics val, moreDiags = s.Default.decode(content, blockLabels, ctx) diags = append(diags, moreDiags...) } return val, diags } func (s *DefaultSpec) impliedType() cty.Type { return s.Primary.impliedType() } // attrSpec implementation func (s *DefaultSpec) attrSchemata() []hcl.AttributeSchema { // We must pass through the union of both of our nested specs so that // we'll have both values available in the result. var ret []hcl.AttributeSchema if as, ok := s.Primary.(attrSpec); ok { ret = append(ret, as.attrSchemata()...) } if as, ok := s.Default.(attrSpec); ok { ret = append(ret, as.attrSchemata()...) } return ret } // blockSpec implementation func (s *DefaultSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { // Only the primary spec may describe a block, since otherwise // our nestedSpec method below can't know which to return. if bs, ok := s.Primary.(blockSpec); ok { return bs.blockHeaderSchemata() } return nil } // blockSpec implementation func (s *DefaultSpec) nestedSpec() Spec { if bs, ok := s.Primary.(blockSpec); ok { return bs.nestedSpec() } return nil } func (s *DefaultSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We can't tell from here which of the two specs will ultimately be used // in our result, so we'll just assume the first. This is usually the right // choice because the default is often a literal spec that doesn't have a // reasonable source range to return anyway. return s.Primary.sourceRange(content, blockLabels) } // TransformExprSpec is a spec that wraps another and then evaluates a given // hcl.Expression on the result. // // The implied type of this spec is determined by evaluating the expression // with an unknown value of the nested spec's implied type, which may cause // the result to be imprecise. This spec should not be used in situations where // precise result type information is needed. type TransformExprSpec struct { Wrapped Spec Expr hcl.Expression TransformCtx *hcl.EvalContext VarName string } func (s *TransformExprSpec) visitSameBodyChildren(cb visitFunc) { cb(s.Wrapped) } func (s *TransformExprSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx) if diags.HasErrors() { // We won't try to run our function in this case, because it'll probably // generate confusing additional errors that will distract from the // root cause. return cty.UnknownVal(s.impliedType()), diags } chiCtx := s.TransformCtx.NewChild() chiCtx.Variables = map[string]cty.Value{ s.VarName: wrappedVal, } resultVal, resultDiags := s.Expr.Value(chiCtx) diags = append(diags, resultDiags...) return resultVal, diags } func (s *TransformExprSpec) impliedType() cty.Type { wrappedTy := s.Wrapped.impliedType() chiCtx := s.TransformCtx.NewChild() chiCtx.Variables = map[string]cty.Value{ s.VarName: cty.UnknownVal(wrappedTy), } resultVal, _ := s.Expr.Value(chiCtx) return resultVal.Type() } func (s *TransformExprSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We'll just pass through our wrapped range here, even though that's // not super-accurate, because there's nothing better to return. return s.Wrapped.sourceRange(content, blockLabels) } // TransformFuncSpec is a spec that wraps another and then evaluates a given // cty function with the result. The given function must expect exactly one // argument, where the result of the wrapped spec will be passed. // // The implied type of this spec is determined by type-checking the function // with an unknown value of the nested spec's implied type, which may cause // the result to be imprecise. This spec should not be used in situations where // precise result type information is needed. // // If the given function produces an error when run, this spec will produce // a non-user-actionable diagnostic message. It's the caller's responsibility // to ensure that the given function cannot fail for any non-error result // of the wrapped spec. type TransformFuncSpec struct { Wrapped Spec Func function.Function } func (s *TransformFuncSpec) visitSameBodyChildren(cb visitFunc) { cb(s.Wrapped) } func (s *TransformFuncSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx) if diags.HasErrors() { // We won't try to run our function in this case, because it'll probably // generate confusing additional errors that will distract from the // root cause. return cty.UnknownVal(s.impliedType()), diags } resultVal, err := s.Func.Call([]cty.Value{wrappedVal}) if err != nil { // This is not a good example of a diagnostic because it is reporting // a programming error in the calling application, rather than something // an end-user could act on. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Transform function failed", Detail: fmt.Sprintf("Decoder transform returned an error: %s", err), Subject: s.sourceRange(content, blockLabels).Ptr(), }) return cty.UnknownVal(s.impliedType()), diags } return resultVal, diags } func (s *TransformFuncSpec) impliedType() cty.Type { wrappedTy := s.Wrapped.impliedType() resultTy, err := s.Func.ReturnType([]cty.Type{wrappedTy}) if err != nil { // Should never happen with a correctly-configured spec return cty.DynamicPseudoType } return resultTy } func (s *TransformFuncSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // We'll just pass through our wrapped range here, even though that's // not super-accurate, because there's nothing better to return. return s.Wrapped.sourceRange(content, blockLabels) } // ValidateFuncSpec is a spec that allows for extended // developer-defined validation. The validation function receives the // result of the wrapped spec. // // The Subject field of the returned Diagnostic is optional. If not // specified, it is automatically populated with the range covered by // the wrapped spec. // type ValidateSpec struct { Wrapped Spec Func func(value cty.Value) hcl.Diagnostics } func (s *ValidateSpec) visitSameBodyChildren(cb visitFunc) { cb(s.Wrapped) } func (s *ValidateSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx) if diags.HasErrors() { // We won't try to run our function in this case, because it'll probably // generate confusing additional errors that will distract from the // root cause. return cty.UnknownVal(s.impliedType()), diags } validateDiags := s.Func(wrappedVal) // Auto-populate the Subject fields if they weren't set. for i := range validateDiags { if validateDiags[i].Subject == nil { validateDiags[i].Subject = s.sourceRange(content, blockLabels).Ptr() } } diags = append(diags, validateDiags...) return wrappedVal, diags } func (s *ValidateSpec) impliedType() cty.Type { return s.Wrapped.impliedType() } func (s *ValidateSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { return s.Wrapped.sourceRange(content, blockLabels) } // noopSpec is a placeholder spec that does nothing, used in situations where // a non-nil placeholder spec is required. It is not exported because there is // no reason to use it directly; it is always an implementation detail only. type noopSpec struct { } func (s noopSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return cty.NullVal(cty.DynamicPseudoType), nil } func (s noopSpec) impliedType() cty.Type { return cty.DynamicPseudoType } func (s noopSpec) visitSameBodyChildren(cb visitFunc) { // nothing to do } func (s noopSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { // No useful range for a noopSpec, and nobody should be calling this anyway. return hcl.Range{ Filename: "noopSpec", } } hcl-2.14.1/hcldec/spec_test.go000066400000000000000000000120321431334125700161100ustar00rootroot00000000000000package hcldec import ( "fmt" "reflect" "testing" "github.com/apparentlymart/go-dump/dump" "github.com/zclconf/go-cty/cty" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" ) // Verify that all of our spec types implement the necessary interfaces var _ Spec = ObjectSpec(nil) var _ Spec = TupleSpec(nil) var _ Spec = (*AttrSpec)(nil) var _ Spec = (*LiteralSpec)(nil) var _ Spec = (*ExprSpec)(nil) var _ Spec = (*BlockSpec)(nil) var _ Spec = (*BlockListSpec)(nil) var _ Spec = (*BlockSetSpec)(nil) var _ Spec = (*BlockMapSpec)(nil) var _ Spec = (*BlockAttrsSpec)(nil) var _ Spec = (*BlockLabelSpec)(nil) var _ Spec = (*DefaultSpec)(nil) var _ Spec = (*TransformExprSpec)(nil) var _ Spec = (*TransformFuncSpec)(nil) var _ Spec = (*ValidateSpec)(nil) var _ attrSpec = (*AttrSpec)(nil) var _ attrSpec = (*DefaultSpec)(nil) var _ blockSpec = (*BlockSpec)(nil) var _ blockSpec = (*BlockListSpec)(nil) var _ blockSpec = (*BlockSetSpec)(nil) var _ blockSpec = (*BlockMapSpec)(nil) var _ blockSpec = (*BlockAttrsSpec)(nil) var _ blockSpec = (*DefaultSpec)(nil) var _ specNeedingVariables = (*AttrSpec)(nil) var _ specNeedingVariables = (*BlockSpec)(nil) var _ specNeedingVariables = (*BlockListSpec)(nil) var _ specNeedingVariables = (*BlockSetSpec)(nil) var _ specNeedingVariables = (*BlockMapSpec)(nil) var _ specNeedingVariables = (*BlockAttrsSpec)(nil) func TestDefaultSpec(t *testing.T) { config := ` foo = fooval bar = barval ` f, diags := hclsyntax.ParseConfig([]byte(config), "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatal(diags.Error()) } t.Run("primary set", func(t *testing.T) { spec := &DefaultSpec{ Primary: &AttrSpec{ Name: "foo", Type: cty.String, }, Default: &AttrSpec{ Name: "bar", Type: cty.String, }, } gotVars := Variables(f.Body, spec) wantVars := []hcl.Traversal{ { hcl.TraverseRoot{ Name: "fooval", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 2, Column: 7, Byte: 7}, End: hcl.Pos{Line: 2, Column: 13, Byte: 13}, }, }, }, { hcl.TraverseRoot{ Name: "barval", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 3, Column: 7, Byte: 20}, End: hcl.Pos{Line: 3, Column: 13, Byte: 26}, }, }, }, } if !reflect.DeepEqual(gotVars, wantVars) { t.Errorf("wrong Variables result\ngot: %s\nwant: %s", dump.Value(gotVars), dump.Value(wantVars)) } ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "fooval": cty.StringVal("foo value"), "barval": cty.StringVal("bar value"), }, } got, err := Decode(f.Body, spec, ctx) if err != nil { t.Fatal(err) } want := cty.StringVal("foo value") if !got.RawEquals(want) { t.Errorf("wrong Decode result\ngot: %#v\nwant: %#v", got, want) } }) t.Run("primary not set", func(t *testing.T) { spec := &DefaultSpec{ Primary: &AttrSpec{ Name: "foo", Type: cty.String, }, Default: &AttrSpec{ Name: "bar", Type: cty.String, }, } ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "fooval": cty.NullVal(cty.String), "barval": cty.StringVal("bar value"), }, } got, err := Decode(f.Body, spec, ctx) if err != nil { t.Fatal(err) } want := cty.StringVal("bar value") if !got.RawEquals(want) { t.Errorf("wrong Decode result\ngot: %#v\nwant: %#v", got, want) } }) } func TestValidateFuncSpec(t *testing.T) { config := ` foo = "invalid" ` f, diags := hclsyntax.ParseConfig([]byte(config), "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatal(diags.Error()) } expectRange := map[string]*hcl.Range{ "without_range": nil, "with_range": &hcl.Range{ Filename: "foobar", Start: hcl.Pos{Line: 99, Column: 99}, End: hcl.Pos{Line: 999, Column: 999}, }, } for name := range expectRange { t.Run(name, func(t *testing.T) { spec := &ValidateSpec{ Wrapped: &AttrSpec{ Name: "foo", Type: cty.String, }, Func: func(value cty.Value) hcl.Diagnostics { if value.AsString() != "invalid" { return hcl.Diagnostics{ &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "incorrect value", Detail: fmt.Sprintf("invalid value passed in: %s", value.GoString()), }, } } return hcl.Diagnostics{ &hcl.Diagnostic{ Severity: hcl.DiagWarning, Summary: "OK", Detail: "validation called correctly", Subject: expectRange[name], }, } }, } _, diags = Decode(f.Body, spec, nil) if len(diags) != 1 || diags[0].Severity != hcl.DiagWarning || diags[0].Summary != "OK" || diags[0].Detail != "validation called correctly" { t.Fatalf("unexpected diagnostics: %s", diags.Error()) } if expectRange[name] == nil && diags[0].Subject == nil { t.Fatal("returned diagnostic subject missing") } if expectRange[name] != nil && !reflect.DeepEqual(expectRange[name], diags[0].Subject) { t.Fatalf("expected range %s, got range %s", expectRange[name], diags[0].Subject) } }) } } hcl-2.14.1/hcldec/variables.go000066400000000000000000000021321431334125700160670ustar00rootroot00000000000000package hcldec import ( "github.com/hashicorp/hcl/v2" ) // Variables processes the given body with the given spec and returns a // list of the variable traversals that would be required to decode // the same pairing of body and spec. // // This can be used to conditionally populate the variables in the EvalContext // passed to Decode, for applications where a static scope is insufficient. // // If the given body is not compliant with the given schema, the result may // be incomplete, but that's assumed to be okay because the eventual call // to Decode will produce error diagnostics anyway. func Variables(body hcl.Body, spec Spec) []hcl.Traversal { var vars []hcl.Traversal schema := ImpliedSchema(spec) content, _, _ := body.PartialContent(schema) if vs, ok := spec.(specNeedingVariables); ok { vars = append(vars, vs.variablesNeeded(content)...) } var visitFn visitFunc visitFn = func(s Spec) { if vs, ok := s.(specNeedingVariables); ok { vars = append(vars, vs.variablesNeeded(content)...) } s.visitSameBodyChildren(visitFn) } spec.visitSameBodyChildren(visitFn) return vars } hcl-2.14.1/hcldec/variables_test.go000066400000000000000000000071671431334125700171430ustar00rootroot00000000000000package hcldec import ( "fmt" "reflect" "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) func TestVariables(t *testing.T) { tests := []struct { config string spec Spec want []hcl.Traversal }{ { ``, &ObjectSpec{}, nil, }, { "a = foo\n", &ObjectSpec{}, nil, // "a" is not actually used, so "foo" is not required }, { "a = foo\n", &AttrSpec{ Name: "a", }, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, }, { "a = foo\nb = bar\n", &DefaultSpec{ Primary: &AttrSpec{ Name: "a", }, Default: &AttrSpec{ Name: "b", }, }, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, { hcl.TraverseRoot{ Name: "bar", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 5, Byte: 12}, End: hcl.Pos{Line: 2, Column: 8, Byte: 15}, }, }, }, }, }, { "a = foo\n", &ObjectSpec{ "a": &AttrSpec{ Name: "a", }, }, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, }, { ` b { a = foo } `, &BlockSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", }, }, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 7, Byte: 11}, End: hcl.Pos{Line: 3, Column: 10, Byte: 14}, }, }, }, }, }, { ` b { a = foo b = bar } `, &BlockAttrsSpec{ TypeName: "b", ElementType: cty.String, }, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 7, Byte: 11}, End: hcl.Pos{Line: 3, Column: 10, Byte: 14}, }, }, }, { hcl.TraverseRoot{ Name: "bar", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 4, Column: 7, Byte: 21}, End: hcl.Pos{Line: 4, Column: 10, Byte: 24}, }, }, }, }, }, { ` b { a = foo } b { a = bar } c { a = baz } `, &BlockListSpec{ TypeName: "b", Nested: &AttrSpec{ Name: "a", }, }, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 7, Byte: 11}, End: hcl.Pos{Line: 3, Column: 10, Byte: 14}, }, }, }, { hcl.TraverseRoot{ Name: "bar", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 6, Column: 7, Byte: 27}, End: hcl.Pos{Line: 6, Column: 10, Byte: 30}, }, }, }, }, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d-%s", i, test.config), func(t *testing.T) { file, diags := hclsyntax.ParseConfig([]byte(test.config), "", hcl.Pos{Line: 1, Column: 1, Byte: 0}) if len(diags) != 0 { t.Errorf("wrong number of diagnostics from ParseConfig %d; want %d", len(diags), 0) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } body := file.Body got := Variables(body, test.spec) if !reflect.DeepEqual(got, test.want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } hcl-2.14.1/hcled/000077500000000000000000000000001431334125700134275ustar00rootroot00000000000000hcl-2.14.1/hcled/doc.go000066400000000000000000000003351431334125700145240ustar00rootroot00000000000000// Package hcled provides functionality intended to help an application // that embeds HCL to deliver relevant information to a text editor or IDE // for navigating around and analyzing configuration files. package hcled hcl-2.14.1/hcled/navigation.go000066400000000000000000000015671431334125700161260ustar00rootroot00000000000000package hcled import ( "github.com/hashicorp/hcl/v2" ) type contextStringer interface { ContextString(offset int) string } // ContextString returns a string describing the context of the given byte // offset, if available. An empty string is returned if no such information // is available, or otherwise the returned string is in a form that depends // on the language used to write the referenced file. func ContextString(file *hcl.File, offset int) string { if cser, ok := file.Nav.(contextStringer); ok { return cser.ContextString(offset) } return "" } type contextDefRanger interface { ContextDefRange(offset int) hcl.Range } func ContextDefRange(file *hcl.File, offset int) hcl.Range { if cser, ok := file.Nav.(contextDefRanger); ok { defRange := cser.ContextDefRange(offset) if !defRange.Empty() { return defRange } } return file.Body.MissingItemRange() } hcl-2.14.1/hclparse/000077500000000000000000000000001431334125700141515ustar00rootroot00000000000000hcl-2.14.1/hclparse/parser.go000066400000000000000000000112501431334125700157730ustar00rootroot00000000000000// Package hclparse has the main API entry point for parsing both HCL native // syntax and HCL JSON. // // The main HCL package also includes SimpleParse and SimpleParseFile which // can be a simpler interface for the common case where an application just // needs to parse a single file. The gohcl package simplifies that further // in its SimpleDecode function, which combines hcl.SimpleParse with decoding // into Go struct values // // Package hclparse, then, is useful for applications that require more fine // control over parsing or which need to load many separate files and keep // track of them for possible error reporting or other analysis. package hclparse import ( "fmt" "io/ioutil" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2/json" ) // NOTE: This is the public interface for parsing. The actual parsers are // in other packages alongside this one, with this package just wrapping them // to provide a unified interface for the caller across all supported formats. // Parser is the main interface for parsing configuration files. As well as // parsing files, a parser also retains a registry of all of the files it // has parsed so that multiple attempts to parse the same file will return // the same object and so the collected files can be used when printing // diagnostics. // // Any diagnostics for parsing a file are only returned once on the first // call to parse that file. Callers are expected to collect up diagnostics // and present them together, so returning diagnostics for the same file // multiple times would create a confusing result. type Parser struct { files map[string]*hcl.File } // NewParser creates a new parser, ready to parse configuration files. func NewParser() *Parser { return &Parser{ files: map[string]*hcl.File{}, } } // ParseHCL parses the given buffer (which is assumed to have been loaded from // the given filename) as a native-syntax configuration file and returns the // hcl.File object representing it. func (p *Parser) ParseHCL(src []byte, filename string) (*hcl.File, hcl.Diagnostics) { if existing := p.files[filename]; existing != nil { return existing, nil } file, diags := hclsyntax.ParseConfig(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1}) p.files[filename] = file return file, diags } // ParseHCLFile reads the given filename and parses it as a native-syntax HCL // configuration file. An error diagnostic is returned if the given file // cannot be read. func (p *Parser) ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) { if existing := p.files[filename]; existing != nil { return existing, nil } src, err := ioutil.ReadFile(filename) if err != nil { return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Failed to read file", Detail: fmt.Sprintf("The configuration file %q could not be read.", filename), }, } } return p.ParseHCL(src, filename) } // ParseJSON parses the given JSON buffer (which is assumed to have been loaded // from the given filename) and returns the hcl.File object representing it. func (p *Parser) ParseJSON(src []byte, filename string) (*hcl.File, hcl.Diagnostics) { if existing := p.files[filename]; existing != nil { return existing, nil } file, diags := json.Parse(src, filename) p.files[filename] = file return file, diags } // ParseJSONFile reads the given filename and parses it as JSON, similarly to // ParseJSON. An error diagnostic is returned if the given file cannot be read. func (p *Parser) ParseJSONFile(filename string) (*hcl.File, hcl.Diagnostics) { if existing := p.files[filename]; existing != nil { return existing, nil } file, diags := json.ParseFile(filename) p.files[filename] = file return file, diags } // AddFile allows a caller to record in a parser a file that was parsed some // other way, thus allowing it to be included in the registry of sources. func (p *Parser) AddFile(filename string, file *hcl.File) { p.files[filename] = file } // Sources returns a map from filenames to the raw source code that was // read from them. This is intended to be used, for example, to print // diagnostics with contextual information. // // The arrays underlying the returned slices should not be modified. func (p *Parser) Sources() map[string][]byte { ret := make(map[string][]byte) for fn, f := range p.files { ret[fn] = f.Bytes } return ret } // Files returns a map from filenames to the File objects produced from them. // This is intended to be used, for example, to print diagnostics with // contextual information. // // The returned map and all of the objects it refers to directly or indirectly // must not be modified. func (p *Parser) Files() map[string]*hcl.File { return p.files } hcl-2.14.1/hclsimple/000077500000000000000000000000001431334125700143305ustar00rootroot00000000000000hcl-2.14.1/hclsimple/hclsimple.go000066400000000000000000000074271431334125700166510ustar00rootroot00000000000000// Package hclsimple is a higher-level entry point for loading HCL // configuration files directly into Go struct values in a single step. // // This package is more opinionated than the rest of the HCL API. See the // documentation for function Decode for more information. package hclsimple import ( "fmt" "io/ioutil" "os" "path/filepath" "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2/json" ) // Decode parses, decodes, and evaluates expressions in the given HCL source // code, in a single step. // // The main HCL API is built to allow applications that need to decompose // the processing steps into a pipeline, with different tasks done by // different parts of the program: parsing the source code into an abstract // representation, analysing the block structure, evaluating expressions, // and then extracting the results into a form consumable by the rest of // the program. // // This function does all of those steps in one call, going directly from // source code to a populated Go struct value. // // The "filename" and "src" arguments describe the input configuration. The // filename is used to add source location context to any returned error // messages and its suffix will choose one of the two supported syntaxes: // ".hcl" for native syntax, and ".json" for HCL JSON. The src must therefore // contain a sequence of bytes that is valid for the selected syntax. // // The "ctx" argument provides variables and functions for use during // expression evaluation. Applications that need no variables nor functions // can just pass nil. // // The "target" argument must be a pointer to a value of a struct type, // with struct tags as defined by the sibling package "gohcl". // // The return type is error but any non-nil error is guaranteed to be // type-assertable to hcl.Diagnostics for applications that wish to access // the full error details. // // This is a very opinionated function that is intended to serve the needs of // applications that are just using HCL for simple configuration and don't // need detailed control over the decoding process. Because this function is // just wrapping functionality elsewhere, if it doesn't meet your needs then // please consider copying it into your program and adapting it as needed. func Decode(filename string, src []byte, ctx *hcl.EvalContext, target interface{}) error { var file *hcl.File var diags hcl.Diagnostics switch suffix := strings.ToLower(filepath.Ext(filename)); suffix { case ".hcl": file, diags = hclsyntax.ParseConfig(src, filename, hcl.Pos{Line: 1, Column: 1}) case ".json": file, diags = json.Parse(src, filename) default: diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsupported file format", Detail: fmt.Sprintf("Cannot read from %s: unrecognized file format suffix %q.", filename, suffix), }) return diags } if diags.HasErrors() { return diags } diags = gohcl.DecodeBody(file.Body, ctx, target) if diags.HasErrors() { return diags } return nil } // DecodeFile is a wrapper around Decode that first reads the given filename // from disk. See the Decode documentation for more information. func DecodeFile(filename string, ctx *hcl.EvalContext, target interface{}) error { src, err := ioutil.ReadFile(filename) if err != nil { if os.IsNotExist(err) { return hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Configuration file not found", Detail: fmt.Sprintf("The configuration file %s does not exist.", filename), }, } } return hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Failed to read configuration", Detail: fmt.Sprintf("Can't read %s: %s.", filename, err), }, } } return Decode(filename, src, ctx, target) } hcl-2.14.1/hclsimple/hclsimple_test.go000066400000000000000000000025601431334125700177010ustar00rootroot00000000000000package hclsimple_test import ( "fmt" "log" "reflect" "testing" "github.com/hashicorp/hcl/v2/hclsimple" ) func Example_nativeSyntax() { type Config struct { Foo string `hcl:"foo"` Baz string `hcl:"baz"` } const exampleConfig = ` foo = "bar" baz = "boop" ` var config Config err := hclsimple.Decode( "example.hcl", []byte(exampleConfig), nil, &config, ) if err != nil { log.Fatalf("Failed to load configuration: %s", err) } fmt.Printf("Configuration is %v\n", config) // Output: // Configuration is {bar boop} } func Example_jsonSyntax() { type Config struct { Foo string `hcl:"foo"` Baz string `hcl:"baz"` } const exampleConfig = ` { "foo": "bar", "baz": "boop" } ` var config Config err := hclsimple.Decode( "example.json", []byte(exampleConfig), nil, &config, ) if err != nil { log.Fatalf("Failed to load configuration: %s", err) } fmt.Printf("Configuration is %v\n", config) // Output: // Configuration is {bar boop} } func TestDecodeFile(t *testing.T) { type Config struct { Foo string `hcl:"foo"` Baz string `hcl:"baz"` } var got Config err := hclsimple.DecodeFile("testdata/test.hcl", nil, &got) if err != nil { t.Fatalf("unexpected error(s): %s", err) } want := Config{ Foo: "bar", Baz: "boop", } if !reflect.DeepEqual(got, want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } } hcl-2.14.1/hclsimple/testdata/000077500000000000000000000000001431334125700161415ustar00rootroot00000000000000hcl-2.14.1/hclsimple/testdata/test.hcl000066400000000000000000000000311431334125700176020ustar00rootroot00000000000000foo = "bar" baz = "boop" hcl-2.14.1/hclsyntax/000077500000000000000000000000001431334125700143655ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/diagnostics.go000066400000000000000000000014471431334125700172310ustar00rootroot00000000000000package hclsyntax import ( "github.com/hashicorp/hcl/v2" ) // setDiagEvalContext is an internal helper that will impose a particular // EvalContext on a set of diagnostics in-place, for any diagnostic that // does not already have an EvalContext set. // // We generally expect diagnostics to be immutable, but this is safe to use // on any Diagnostics where none of the contained Diagnostic objects have yet // been seen by a caller. Its purpose is to apply additional context to a // set of diagnostics produced by a "deeper" component as the stack unwinds // during expression evaluation. func setDiagEvalContext(diags hcl.Diagnostics, expr hcl.Expression, ctx *hcl.EvalContext) { for _, diag := range diags { if diag.Expression == nil { diag.Expression = expr diag.EvalContext = ctx } } } hcl-2.14.1/hclsyntax/didyoumean.go000066400000000000000000000014551431334125700170570ustar00rootroot00000000000000package hclsyntax import ( "github.com/agext/levenshtein" ) // nameSuggestion tries to find a name from the given slice of suggested names // that is close to the given name and returns it if found. If no suggestion // is close enough, returns the empty string. // // The suggestions are tried in order, so earlier suggestions take precedence // if the given string is similar to two or more suggestions. // // This function is intended to be used with a relatively-small number of // suggestions. It's not optimized for hundreds or thousands of them. func nameSuggestion(given string, suggestions []string) string { for _, suggestion := range suggestions { dist := levenshtein.Distance(given, suggestion, nil) if dist < 3 { // threshold determined experimentally return suggestion } } return "" } hcl-2.14.1/hclsyntax/didyoumean_test.go000066400000000000000000000016271431334125700201170ustar00rootroot00000000000000package hclsyntax import "testing" func TestNameSuggestion(t *testing.T) { var keywords = []string{"false", "true", "null"} tests := []struct { Input, Want string }{ {"true", "true"}, {"false", "false"}, {"null", "null"}, {"bananas", ""}, {"NaN", ""}, {"Inf", ""}, {"Infinity", ""}, {"void", ""}, {"undefined", ""}, {"ture", "true"}, {"tru", "true"}, {"tre", "true"}, {"treu", "true"}, {"rtue", "true"}, {"flase", "false"}, {"fales", "false"}, {"flse", "false"}, {"fasle", "false"}, {"fasel", "false"}, {"flue", "false"}, {"nil", "null"}, {"nul", "null"}, {"unll", "null"}, {"nll", "null"}, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { got := nameSuggestion(test.Input, keywords) if got != test.Want { t.Errorf( "wrong result\ninput: %q\ngot: %q\nwant: %q", test.Input, got, test.Want, ) } }) } } hcl-2.14.1/hclsyntax/doc.go000066400000000000000000000005031431334125700154570ustar00rootroot00000000000000// Package hclsyntax contains the parser, AST, etc for HCL's native language, // as opposed to the JSON variant. // // In normal use applications should rarely depend on this package directly, // instead preferring the higher-level interface of the main hcl package and // its companion package hclparse. package hclsyntax hcl-2.14.1/hclsyntax/expression.go000066400000000000000000001503601431334125700171200ustar00rootroot00000000000000package hclsyntax import ( "fmt" "sort" "sync" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/function" ) // Expression is the abstract type for nodes that behave as HCL expressions. type Expression interface { Node // The hcl.Expression methods are duplicated here, rather than simply // embedded, because both Node and hcl.Expression have a Range method // and so they conflict. Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) Variables() []hcl.Traversal StartRange() hcl.Range } // Assert that Expression implements hcl.Expression var _ hcl.Expression = Expression(nil) // ParenthesesExpr represents an expression written in grouping // parentheses. // // The parser takes care of the precedence effect of the parentheses, so the // only purpose of this separate expression node is to capture the source range // of the parentheses themselves, rather than the source range of the // expression within. All of the other expression operations just pass through // to the underlying expression. type ParenthesesExpr struct { Expression SrcRange hcl.Range } var _ hcl.Expression = (*ParenthesesExpr)(nil) func (e *ParenthesesExpr) Range() hcl.Range { return e.SrcRange } func (e *ParenthesesExpr) walkChildNodes(w internalWalkFunc) { // We override the walkChildNodes from the embedded Expression to // ensure that both the parentheses _and_ the content are visible // in a walk. w(e.Expression) } // LiteralValueExpr is an expression that just always returns a given value. type LiteralValueExpr struct { Val cty.Value SrcRange hcl.Range } func (e *LiteralValueExpr) walkChildNodes(w internalWalkFunc) { // Literal values have no child nodes } func (e *LiteralValueExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return e.Val, nil } func (e *LiteralValueExpr) Range() hcl.Range { return e.SrcRange } func (e *LiteralValueExpr) StartRange() hcl.Range { return e.SrcRange } // Implementation for hcl.AbsTraversalForExpr. func (e *LiteralValueExpr) AsTraversal() hcl.Traversal { // This one's a little weird: the contract for AsTraversal is to interpret // an expression as if it were traversal syntax, and traversal syntax // doesn't have the special keywords "null", "true", and "false" so these // are expected to be treated like variables in that case. // Since our parser already turned them into LiteralValueExpr by the time // we get here, we need to undo this and infer the name that would've // originally led to our value. // We don't do anything for any other values, since they don't overlap // with traversal roots. if e.Val.IsNull() { // In practice the parser only generates null values of the dynamic // pseudo-type for literals, so we can safely assume that any null // was orignally the keyword "null". return hcl.Traversal{ hcl.TraverseRoot{ Name: "null", SrcRange: e.SrcRange, }, } } switch e.Val { case cty.True: return hcl.Traversal{ hcl.TraverseRoot{ Name: "true", SrcRange: e.SrcRange, }, } case cty.False: return hcl.Traversal{ hcl.TraverseRoot{ Name: "false", SrcRange: e.SrcRange, }, } default: // No traversal is possible for any other value. return nil } } // ScopeTraversalExpr is an Expression that retrieves a value from the scope // using a traversal. type ScopeTraversalExpr struct { Traversal hcl.Traversal SrcRange hcl.Range } func (e *ScopeTraversalExpr) walkChildNodes(w internalWalkFunc) { // Scope traversals have no child nodes } func (e *ScopeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { val, diags := e.Traversal.TraverseAbs(ctx) setDiagEvalContext(diags, e, ctx) return val, diags } func (e *ScopeTraversalExpr) Range() hcl.Range { return e.SrcRange } func (e *ScopeTraversalExpr) StartRange() hcl.Range { return e.SrcRange } // Implementation for hcl.AbsTraversalForExpr. func (e *ScopeTraversalExpr) AsTraversal() hcl.Traversal { return e.Traversal } // RelativeTraversalExpr is an Expression that retrieves a value from another // value using a _relative_ traversal. type RelativeTraversalExpr struct { Source Expression Traversal hcl.Traversal SrcRange hcl.Range } func (e *RelativeTraversalExpr) walkChildNodes(w internalWalkFunc) { w(e.Source) } func (e *RelativeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { src, diags := e.Source.Value(ctx) ret, travDiags := e.Traversal.TraverseRel(src) setDiagEvalContext(travDiags, e, ctx) diags = append(diags, travDiags...) return ret, diags } func (e *RelativeTraversalExpr) Range() hcl.Range { return e.SrcRange } func (e *RelativeTraversalExpr) StartRange() hcl.Range { return e.SrcRange } // Implementation for hcl.AbsTraversalForExpr. func (e *RelativeTraversalExpr) AsTraversal() hcl.Traversal { // We can produce a traversal only if our source can. st, diags := hcl.AbsTraversalForExpr(e.Source) if diags.HasErrors() { return nil } ret := make(hcl.Traversal, len(st)+len(e.Traversal)) copy(ret, st) copy(ret[len(st):], e.Traversal) return ret } // FunctionCallExpr is an Expression that calls a function from the EvalContext // and returns its result. type FunctionCallExpr struct { Name string Args []Expression // If true, the final argument should be a tuple, list or set which will // expand to be one argument per element. ExpandFinal bool NameRange hcl.Range OpenParenRange hcl.Range CloseParenRange hcl.Range } func (e *FunctionCallExpr) walkChildNodes(w internalWalkFunc) { for _, arg := range e.Args { w(arg) } } func (e *FunctionCallExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics var f function.Function exists := false hasNonNilMap := false thisCtx := ctx for thisCtx != nil { if thisCtx.Functions == nil { thisCtx = thisCtx.Parent() continue } hasNonNilMap = true f, exists = thisCtx.Functions[e.Name] if exists { break } thisCtx = thisCtx.Parent() } if !exists { if !hasNonNilMap { return cty.DynamicVal, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Function calls not allowed", Detail: "Functions may not be called here.", Subject: e.Range().Ptr(), Expression: e, EvalContext: ctx, }, } } avail := make([]string, 0, len(ctx.Functions)) for name := range ctx.Functions { avail = append(avail, name) } suggestion := nameSuggestion(e.Name, avail) if suggestion != "" { suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) } return cty.DynamicVal, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Call to unknown function", Detail: fmt.Sprintf("There is no function named %q.%s", e.Name, suggestion), Subject: &e.NameRange, Context: e.Range().Ptr(), Expression: e, EvalContext: ctx, }, } } diagExtra := functionCallDiagExtra{ calledFunctionName: e.Name, } params := f.Params() varParam := f.VarParam() args := e.Args if e.ExpandFinal { if len(args) < 1 { // should never happen if the parser is behaving panic("ExpandFinal set on function call with no arguments") } expandExpr := args[len(args)-1] expandVal, expandDiags := expandExpr.Value(ctx) diags = append(diags, expandDiags...) if expandDiags.HasErrors() { return cty.DynamicVal, diags } switch { case expandVal.Type().Equals(cty.DynamicPseudoType): if expandVal.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid expanding argument value", Detail: "The expanding argument (indicated by ...) must not be null.", Subject: expandExpr.Range().Ptr(), Context: e.Range().Ptr(), Expression: expandExpr, EvalContext: ctx, Extra: &diagExtra, }) return cty.DynamicVal, diags } return cty.DynamicVal, diags case expandVal.Type().IsTupleType() || expandVal.Type().IsListType() || expandVal.Type().IsSetType(): if expandVal.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid expanding argument value", Detail: "The expanding argument (indicated by ...) must not be null.", Subject: expandExpr.Range().Ptr(), Context: e.Range().Ptr(), Expression: expandExpr, EvalContext: ctx, Extra: &diagExtra, }) return cty.DynamicVal, diags } if !expandVal.IsKnown() { return cty.DynamicVal, diags } // When expanding arguments from a collection, we must first unmark // the collection itself, and apply any marks directly to the // elements. This ensures that marks propagate correctly. expandVal, marks := expandVal.Unmark() newArgs := make([]Expression, 0, (len(args)-1)+expandVal.LengthInt()) newArgs = append(newArgs, args[:len(args)-1]...) it := expandVal.ElementIterator() for it.Next() { _, val := it.Element() newArgs = append(newArgs, &LiteralValueExpr{ Val: val.WithMarks(marks), SrcRange: expandExpr.Range(), }) } args = newArgs default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid expanding argument value", Detail: "The expanding argument (indicated by ...) must be of a tuple, list, or set type.", Subject: expandExpr.Range().Ptr(), Context: e.Range().Ptr(), Expression: expandExpr, EvalContext: ctx, Extra: &diagExtra, }) return cty.DynamicVal, diags } } if len(args) < len(params) { missing := params[len(args)] qual := "" if varParam != nil { qual = " at least" } return cty.DynamicVal, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Not enough function arguments", Detail: fmt.Sprintf( "Function %q expects%s %d argument(s). Missing value for %q.", e.Name, qual, len(params), missing.Name, ), Subject: &e.CloseParenRange, Context: e.Range().Ptr(), Expression: e, EvalContext: ctx, Extra: &diagExtra, }, } } if varParam == nil && len(args) > len(params) { return cty.DynamicVal, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Too many function arguments", Detail: fmt.Sprintf( "Function %q expects only %d argument(s).", e.Name, len(params), ), Subject: args[len(params)].StartRange().Ptr(), Context: e.Range().Ptr(), Expression: e, EvalContext: ctx, Extra: &diagExtra, }, } } argVals := make([]cty.Value, len(args)) for i, argExpr := range args { var param *function.Parameter if i < len(params) { param = ¶ms[i] } else { param = varParam } var val cty.Value if decodeFn := customdecode.CustomExpressionDecoderForType(param.Type); decodeFn != nil { var argDiags hcl.Diagnostics val, argDiags = decodeFn(argExpr, ctx) diags = append(diags, argDiags...) if val == cty.NilVal { val = cty.UnknownVal(param.Type) } } else { var argDiags hcl.Diagnostics val, argDiags = argExpr.Value(ctx) if len(argDiags) > 0 { diags = append(diags, argDiags...) } // Try to convert our value to the parameter type var err error val, err = convert.Convert(val, param.Type) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid function argument", Detail: fmt.Sprintf( "Invalid value for %q parameter: %s.", param.Name, err, ), Subject: argExpr.StartRange().Ptr(), Context: e.Range().Ptr(), Expression: argExpr, EvalContext: ctx, Extra: &diagExtra, }) } } argVals[i] = val } if diags.HasErrors() { // Don't try to execute the function if we already have errors with // the arguments, because the result will probably be a confusing // error message. return cty.DynamicVal, diags } resultVal, err := f.Call(argVals) if err != nil { // For errors in the underlying call itself we also return the raw // call error via an extra method on our "diagnostic extra" value. diagExtra.functionCallError = err switch terr := err.(type) { case function.ArgError: i := terr.Index var param *function.Parameter if i < len(params) { param = ¶ms[i] } else { param = varParam } if param == nil || i > len(args)-1 { // Getting here means that the function we called has a bug: // it returned an arg error that refers to an argument index // that wasn't present in the call. For that situation // we'll degrade to a less specific error just to give // some sort of answer, but best to still fix the buggy // function so that it only returns argument indices that // are in range. switch { case param != nil: // In this case we'll assume that the function was trying // to talk about a final variadic parameter but the caller // didn't actually provide any arguments for it. That means // we can at least still name the parameter in the // error message, but our source range will be the call // as a whole because we don't have an argument expression // to highlight specifically. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid function argument", Detail: fmt.Sprintf( "Invalid value for %q parameter: %s.", param.Name, err, ), Subject: e.Range().Ptr(), Expression: e, EvalContext: ctx, Extra: &diagExtra, }) default: // This is the most degenerate case of all, where the // index is out of range even for the declared parameters, // and so we can't tell which parameter the function is // trying to report an error for. Just a generic error // report in that case. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Error in function call", Detail: fmt.Sprintf( "Call to function %q failed: %s.", e.Name, err, ), Subject: e.StartRange().Ptr(), Context: e.Range().Ptr(), Expression: e, EvalContext: ctx, Extra: &diagExtra, }) } } else { argExpr := args[i] // TODO: we should also unpick a PathError here and show the // path to the deep value where the error was detected. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid function argument", Detail: fmt.Sprintf( "Invalid value for %q parameter: %s.", param.Name, err, ), Subject: argExpr.StartRange().Ptr(), Context: e.Range().Ptr(), Expression: argExpr, EvalContext: ctx, Extra: &diagExtra, }) } default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Error in function call", Detail: fmt.Sprintf( "Call to function %q failed: %s.", e.Name, err, ), Subject: e.StartRange().Ptr(), Context: e.Range().Ptr(), Expression: e, EvalContext: ctx, Extra: &diagExtra, }) } return cty.DynamicVal, diags } return resultVal, diags } func (e *FunctionCallExpr) Range() hcl.Range { return hcl.RangeBetween(e.NameRange, e.CloseParenRange) } func (e *FunctionCallExpr) StartRange() hcl.Range { return hcl.RangeBetween(e.NameRange, e.OpenParenRange) } // Implementation for hcl.ExprCall. func (e *FunctionCallExpr) ExprCall() *hcl.StaticCall { ret := &hcl.StaticCall{ Name: e.Name, NameRange: e.NameRange, Arguments: make([]hcl.Expression, len(e.Args)), ArgsRange: hcl.RangeBetween(e.OpenParenRange, e.CloseParenRange), } // Need to convert our own Expression objects into hcl.Expression. for i, arg := range e.Args { ret.Arguments[i] = arg } return ret } // FunctionCallDiagExtra is an interface implemented by the value in the "Extra" // field of some diagnostics returned by FunctionCallExpr.Value, giving // cooperating callers access to some machine-readable information about the // call that a diagnostic relates to. type FunctionCallDiagExtra interface { // CalledFunctionName returns the name of the function being called at // the time the diagnostic was generated, if any. Returns an empty string // if there is no known called function. CalledFunctionName() string // FunctionCallError returns the error value returned by the implementation // of the function being called, if any. Returns nil if the diagnostic was // not returned in response to a call error. // // Some errors related to calling functions are generated by HCL itself // rather than by the underlying function, in which case this method // will return nil. FunctionCallError() error } type functionCallDiagExtra struct { calledFunctionName string functionCallError error } func (e *functionCallDiagExtra) CalledFunctionName() string { return e.calledFunctionName } func (e *functionCallDiagExtra) FunctionCallError() error { return e.functionCallError } type ConditionalExpr struct { Condition Expression TrueResult Expression FalseResult Expression SrcRange hcl.Range } func (e *ConditionalExpr) walkChildNodes(w internalWalkFunc) { w(e.Condition) w(e.TrueResult) w(e.FalseResult) } func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { trueResult, trueDiags := e.TrueResult.Value(ctx) falseResult, falseDiags := e.FalseResult.Value(ctx) var diags hcl.Diagnostics resultType := cty.DynamicPseudoType convs := make([]convert.Conversion, 2) switch { // If either case is a dynamic null value (which would result from a // literal null in the config), we know that it can convert to the expected // type of the opposite case, and we don't need to speculatively reduce the // final result type to DynamicPseudoType. // If we know that either Type is a DynamicPseudoType, we can be certain // that the other value can convert since it's a pass-through, and we don't // need to unify the types. If the final evaluation results in the dynamic // value being returned, there's no conversion we can do, so we return the // value directly. case trueResult.RawEquals(cty.NullVal(cty.DynamicPseudoType)): resultType = falseResult.Type() convs[0] = convert.GetConversionUnsafe(cty.DynamicPseudoType, resultType) case falseResult.RawEquals(cty.NullVal(cty.DynamicPseudoType)): resultType = trueResult.Type() convs[1] = convert.GetConversionUnsafe(cty.DynamicPseudoType, resultType) case trueResult.Type() == cty.DynamicPseudoType, falseResult.Type() == cty.DynamicPseudoType: // the final resultType type is still unknown // we don't need to get the conversion, because both are a noop. default: // Try to find a type that both results can be converted to. resultType, convs = convert.UnifyUnsafe([]cty.Type{trueResult.Type(), falseResult.Type()}) } if resultType == cty.NilType { return cty.DynamicVal, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Inconsistent conditional result types", Detail: fmt.Sprintf( "The true and false result expressions must have consistent types. %s.", describeConditionalTypeMismatch(trueResult.Type(), falseResult.Type()), ), Subject: hcl.RangeBetween(e.TrueResult.Range(), e.FalseResult.Range()).Ptr(), Context: &e.SrcRange, Expression: e, EvalContext: ctx, }, } } condResult, condDiags := e.Condition.Value(ctx) diags = append(diags, condDiags...) if condResult.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Null condition", Detail: "The condition value is null. Conditions must either be true or false.", Subject: e.Condition.Range().Ptr(), Context: &e.SrcRange, Expression: e.Condition, EvalContext: ctx, }) return cty.UnknownVal(resultType), diags } if !condResult.IsKnown() { return cty.UnknownVal(resultType), diags } condResult, err := convert.Convert(condResult, cty.Bool) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect condition type", Detail: "The condition expression must be of type bool.", Subject: e.Condition.Range().Ptr(), Context: &e.SrcRange, Expression: e.Condition, EvalContext: ctx, }) return cty.UnknownVal(resultType), diags } // Unmark result before testing for truthiness condResult, _ = condResult.UnmarkDeep() if condResult.True() { diags = append(diags, trueDiags...) if convs[0] != nil { var err error trueResult, err = convs[0](trueResult) if err != nil { // Unsafe conversion failed with the concrete result value diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Inconsistent conditional result types", Detail: fmt.Sprintf( "The true result value has the wrong type: %s.", err.Error(), ), Subject: e.TrueResult.Range().Ptr(), Context: &e.SrcRange, Expression: e.TrueResult, EvalContext: ctx, }) trueResult = cty.UnknownVal(resultType) } } return trueResult, diags } else { diags = append(diags, falseDiags...) if convs[1] != nil { var err error falseResult, err = convs[1](falseResult) if err != nil { // Unsafe conversion failed with the concrete result value diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Inconsistent conditional result types", Detail: fmt.Sprintf( "The false result value has the wrong type: %s.", err.Error(), ), Subject: e.FalseResult.Range().Ptr(), Context: &e.SrcRange, Expression: e.FalseResult, EvalContext: ctx, }) falseResult = cty.UnknownVal(resultType) } } return falseResult, diags } } // describeConditionalTypeMismatch makes a best effort to describe the // difference between types in the true and false arms of a conditional // expression in a way that would be useful to someone trying to understand // why their conditional expression isn't valid. // // NOTE: This function is only designed to deal with situations // where trueTy and falseTy are different. Calling it with two equal // types will produce a nonsense result. This function also only really // deals with situations that type unification can't resolve, so we should // call this function only after trying type unification first. func describeConditionalTypeMismatch(trueTy, falseTy cty.Type) string { // The main tricky cases here are when both trueTy and falseTy are // of the same structural type kind, such as both being object types // or both being tuple types. In that case the "FriendlyName" method // returns only "object" or "tuple" and so we need to do some more // work to describe what's different inside them. switch { case trueTy.IsObjectType() && falseTy.IsObjectType(): // We'll first gather up the attribute names and sort them. In the // event that there are multiple attributes that disagree across // the two types, we'll prefer to report the one that sorts lexically // least just so that our error message is consistent between // evaluations. var trueAttrs, falseAttrs []string for name := range trueTy.AttributeTypes() { trueAttrs = append(trueAttrs, name) } sort.Strings(trueAttrs) for name := range falseTy.AttributeTypes() { falseAttrs = append(falseAttrs, name) } sort.Strings(falseAttrs) for _, name := range trueAttrs { if !falseTy.HasAttribute(name) { return fmt.Sprintf("The 'true' value includes object attribute %q, which is absent in the 'false' value", name) } trueAty := trueTy.AttributeType(name) falseAty := falseTy.AttributeType(name) if !trueAty.Equals(falseAty) { // For deeply-nested differences this will likely get very // clunky quickly by nesting these messages inside one another, // but we'll accept that for now in the interests of producing // _some_ useful feedback, even if it isn't as concise as // we'd prefer it to be. Deeply-nested structures in // conditionals are thankfully not super common. return fmt.Sprintf( "Type mismatch for object attribute %q: %s", name, describeConditionalTypeMismatch(trueAty, falseAty), ) } } for _, name := range falseAttrs { if !trueTy.HasAttribute(name) { return fmt.Sprintf("The 'false' value includes object attribute %q, which is absent in the 'true' value", name) } // NOTE: We don't need to check the attribute types again, because // any attribute that both types have in common would already have // been checked in the previous loop. } case trueTy.IsTupleType() && falseTy.IsTupleType(): trueEtys := trueTy.TupleElementTypes() falseEtys := falseTy.TupleElementTypes() if trueCount, falseCount := len(trueEtys), len(falseEtys); trueCount != falseCount { return fmt.Sprintf("The 'true' tuple has length %d, but the 'false' tuple has length %d", trueCount, falseCount) } // NOTE: Thanks to the condition above, we know that both tuples are // of the same length and so they must have some differing types // instead. for i := range trueEtys { trueEty := trueEtys[i] falseEty := falseEtys[i] if !trueEty.Equals(falseEty) { // For deeply-nested differences this will likely get very // clunky quickly by nesting these messages inside one another, // but we'll accept that for now in the interests of producing // _some_ useful feedback, even if it isn't as concise as // we'd prefer it to be. Deeply-nested structures in // conditionals are thankfully not super common. return fmt.Sprintf( "Type mismatch for tuple element %d: %s", i, describeConditionalTypeMismatch(trueEty, falseEty), ) } } case trueTy.IsCollectionType() && falseTy.IsCollectionType(): // For this case we're specifically interested in the situation where: // - both collections are of the same kind, AND // - the element types of both are either object or tuple types. // This is just to avoid writing a useless statement like // "The 'true' value is list of object, but the 'false' value is list of object". // This still doesn't account for more awkward cases like collections // of collections of structural types, but we won't let perfect be // the enemy of the good. trueEty := trueTy.ElementType() falseEty := falseTy.ElementType() if (trueTy.IsListType() && falseTy.IsListType()) || (trueTy.IsMapType() && falseTy.IsMapType()) || (trueTy.IsSetType() && falseTy.IsSetType()) { if (trueEty.IsObjectType() && falseEty.IsObjectType()) || (trueEty.IsTupleType() && falseEty.IsTupleType()) { noun := "collection" switch { // NOTE: We now know that trueTy and falseTy have the same collection kind case trueTy.IsListType(): noun = "list" case trueTy.IsSetType(): noun = "set" case trueTy.IsMapType(): noun = "map" } return fmt.Sprintf( "Mismatched %s element types: %s", noun, describeConditionalTypeMismatch(trueEty, falseEty), ) } } } // If we don't manage any more specialized message, we'll just report // what the two types are. trueName := trueTy.FriendlyName() falseName := falseTy.FriendlyName() if trueName == falseName { // Absolute last resort for when we have no special rule above but // we have two types with the same friendly name anyway. This is // the most vague of all possible messages but is reserved for // particularly awkward cases, like lists of lists of differing tuple // types. return "At least one deeply-nested attribute or element is not compatible across both the 'true' and the 'false' value" } return fmt.Sprintf( "The 'true' value is %s, but the 'false' value is %s", trueTy.FriendlyName(), falseTy.FriendlyName(), ) } func (e *ConditionalExpr) Range() hcl.Range { return e.SrcRange } func (e *ConditionalExpr) StartRange() hcl.Range { return e.Condition.StartRange() } type IndexExpr struct { Collection Expression Key Expression SrcRange hcl.Range OpenRange hcl.Range BracketRange hcl.Range } func (e *IndexExpr) walkChildNodes(w internalWalkFunc) { w(e.Collection) w(e.Key) } func (e *IndexExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics coll, collDiags := e.Collection.Value(ctx) key, keyDiags := e.Key.Value(ctx) diags = append(diags, collDiags...) diags = append(diags, keyDiags...) val, indexDiags := hcl.Index(coll, key, &e.BracketRange) setDiagEvalContext(indexDiags, e, ctx) diags = append(diags, indexDiags...) return val, diags } func (e *IndexExpr) Range() hcl.Range { return e.SrcRange } func (e *IndexExpr) StartRange() hcl.Range { return e.OpenRange } type TupleConsExpr struct { Exprs []Expression SrcRange hcl.Range OpenRange hcl.Range } func (e *TupleConsExpr) walkChildNodes(w internalWalkFunc) { for _, expr := range e.Exprs { w(expr) } } func (e *TupleConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var vals []cty.Value var diags hcl.Diagnostics vals = make([]cty.Value, len(e.Exprs)) for i, expr := range e.Exprs { val, valDiags := expr.Value(ctx) vals[i] = val diags = append(diags, valDiags...) } return cty.TupleVal(vals), diags } func (e *TupleConsExpr) Range() hcl.Range { return e.SrcRange } func (e *TupleConsExpr) StartRange() hcl.Range { return e.OpenRange } // Implementation for hcl.ExprList func (e *TupleConsExpr) ExprList() []hcl.Expression { ret := make([]hcl.Expression, len(e.Exprs)) for i, expr := range e.Exprs { ret[i] = expr } return ret } type ObjectConsExpr struct { Items []ObjectConsItem SrcRange hcl.Range OpenRange hcl.Range } type ObjectConsItem struct { KeyExpr Expression ValueExpr Expression } func (e *ObjectConsExpr) walkChildNodes(w internalWalkFunc) { for _, item := range e.Items { w(item.KeyExpr) w(item.ValueExpr) } } func (e *ObjectConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var vals map[string]cty.Value var diags hcl.Diagnostics var marks []cty.ValueMarks // This will get set to true if we fail to produce any of our keys, // either because they are actually unknown or if the evaluation produces // errors. In all of these case we must return DynamicPseudoType because // we're unable to know the full set of keys our object has, and thus // we can't produce a complete value of the intended type. // // We still evaluate all of the item keys and values to make sure that we // get as complete as possible a set of diagnostics. known := true vals = make(map[string]cty.Value, len(e.Items)) for _, item := range e.Items { key, keyDiags := item.KeyExpr.Value(ctx) diags = append(diags, keyDiags...) val, valDiags := item.ValueExpr.Value(ctx) diags = append(diags, valDiags...) if keyDiags.HasErrors() { known = false continue } if key.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Null value as key", Detail: "Can't use a null value as a key.", Subject: item.ValueExpr.Range().Ptr(), Expression: item.KeyExpr, EvalContext: ctx, }) known = false continue } key, keyMarks := key.Unmark() marks = append(marks, keyMarks) var err error key, err = convert.Convert(key, cty.String) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect key type", Detail: fmt.Sprintf("Can't use this value as a key: %s.", err.Error()), Subject: item.KeyExpr.Range().Ptr(), Expression: item.KeyExpr, EvalContext: ctx, }) known = false continue } if !key.IsKnown() { known = false continue } keyStr := key.AsString() vals[keyStr] = val } if !known { return cty.DynamicVal, diags } return cty.ObjectVal(vals).WithMarks(marks...), diags } func (e *ObjectConsExpr) Range() hcl.Range { return e.SrcRange } func (e *ObjectConsExpr) StartRange() hcl.Range { return e.OpenRange } // Implementation for hcl.ExprMap func (e *ObjectConsExpr) ExprMap() []hcl.KeyValuePair { ret := make([]hcl.KeyValuePair, len(e.Items)) for i, item := range e.Items { ret[i] = hcl.KeyValuePair{ Key: item.KeyExpr, Value: item.ValueExpr, } } return ret } // ObjectConsKeyExpr is a special wrapper used only for ObjectConsExpr keys, // which deals with the special case that a naked identifier in that position // must be interpreted as a literal string rather than evaluated directly. type ObjectConsKeyExpr struct { Wrapped Expression ForceNonLiteral bool } func (e *ObjectConsKeyExpr) literalName() string { // This is our logic for deciding whether to behave like a literal string. // We lean on our AbsTraversalForExpr implementation here, which already // deals with some awkward cases like the expression being the result // of the keywords "null", "true" and "false" which we'd want to interpret // as keys here too. return hcl.ExprAsKeyword(e.Wrapped) } func (e *ObjectConsKeyExpr) walkChildNodes(w internalWalkFunc) { // We only treat our wrapped expression as a real expression if we're // not going to interpret it as a literal. if e.literalName() == "" { w(e.Wrapped) } } func (e *ObjectConsKeyExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { // Because we accept a naked identifier as a literal key rather than a // reference, it's confusing to accept a traversal containing periods // here since we can't tell if the user intends to create a key with // periods or actually reference something. To avoid confusing downstream // errors we'll just prohibit a naked multi-step traversal here and // require the user to state their intent more clearly. // (This is handled at evaluation time rather than parse time because // an application using static analysis _can_ accept a naked multi-step // traversal here, if desired.) if !e.ForceNonLiteral { if travExpr, isTraversal := e.Wrapped.(*ScopeTraversalExpr); isTraversal && len(travExpr.Traversal) > 1 { var diags hcl.Diagnostics diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Ambiguous attribute key", Detail: "If this expression is intended to be a reference, wrap it in parentheses. If it's instead intended as a literal name containing periods, wrap it in quotes to create a string literal.", Subject: e.Range().Ptr(), }) return cty.DynamicVal, diags } if ln := e.literalName(); ln != "" { return cty.StringVal(ln), nil } } return e.Wrapped.Value(ctx) } func (e *ObjectConsKeyExpr) Range() hcl.Range { return e.Wrapped.Range() } func (e *ObjectConsKeyExpr) StartRange() hcl.Range { return e.Wrapped.StartRange() } // Implementation for hcl.AbsTraversalForExpr. func (e *ObjectConsKeyExpr) AsTraversal() hcl.Traversal { // If we're forcing a non-literal then we can never be interpreted // as a traversal. if e.ForceNonLiteral { return nil } // We can produce a traversal only if our wrappee can. st, diags := hcl.AbsTraversalForExpr(e.Wrapped) if diags.HasErrors() { return nil } return st } func (e *ObjectConsKeyExpr) UnwrapExpression() Expression { return e.Wrapped } // ForExpr represents iteration constructs: // // tuple = [for i, v in list: upper(v) if i > 2] // object = {for k, v in map: k => upper(v)} // object_of_tuples = {for v in list: v.key: v...} type ForExpr struct { KeyVar string // empty if ignoring the key ValVar string CollExpr Expression KeyExpr Expression // nil when producing a tuple ValExpr Expression CondExpr Expression // null if no "if" clause is present Group bool // set if the ellipsis is used on the value in an object for SrcRange hcl.Range OpenRange hcl.Range CloseRange hcl.Range } func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { var diags hcl.Diagnostics var marks []cty.ValueMarks collVal, collDiags := e.CollExpr.Value(ctx) diags = append(diags, collDiags...) if collVal.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Iteration over null value", Detail: "A null value cannot be used as the collection in a 'for' expression.", Subject: e.CollExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CollExpr, EvalContext: ctx, }) return cty.DynamicVal, diags } if collVal.Type() == cty.DynamicPseudoType { return cty.DynamicVal, diags } // Unmark collection before checking for iterability, because marked // values cannot be iterated collVal, collMarks := collVal.Unmark() marks = append(marks, collMarks) if !collVal.CanIterateElements() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Iteration over non-iterable value", Detail: fmt.Sprintf( "A value of type %s cannot be used as the collection in a 'for' expression.", collVal.Type().FriendlyName(), ), Subject: e.CollExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CollExpr, EvalContext: ctx, }) return cty.DynamicVal, diags } if !collVal.IsKnown() { return cty.DynamicVal, diags } // Before we start we'll do an early check to see if any CondExpr we've // been given is of the wrong type. This isn't 100% reliable (it may // be DynamicVal until real values are given) but it should catch some // straightforward cases and prevent a barrage of repeated errors. if e.CondExpr != nil { childCtx := ctx.NewChild() childCtx.Variables = map[string]cty.Value{} if e.KeyVar != "" { childCtx.Variables[e.KeyVar] = cty.DynamicVal } childCtx.Variables[e.ValVar] = cty.DynamicVal result, condDiags := e.CondExpr.Value(childCtx) diags = append(diags, condDiags...) if result.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Condition is null", Detail: "The value of the 'if' clause must not be null.", Subject: e.CondExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CondExpr, EvalContext: ctx, }) return cty.DynamicVal, diags } _, err := convert.Convert(result, cty.Bool) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' condition", Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()), Subject: e.CondExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CondExpr, EvalContext: ctx, }) return cty.DynamicVal, diags } if condDiags.HasErrors() { return cty.DynamicVal, diags } } if e.KeyExpr != nil { // Producing an object var vals map[string]cty.Value var groupVals map[string][]cty.Value if e.Group { groupVals = map[string][]cty.Value{} } else { vals = map[string]cty.Value{} } it := collVal.ElementIterator() known := true for it.Next() { k, v := it.Element() childCtx := ctx.NewChild() childCtx.Variables = map[string]cty.Value{} if e.KeyVar != "" { childCtx.Variables[e.KeyVar] = k } childCtx.Variables[e.ValVar] = v if e.CondExpr != nil { includeRaw, condDiags := e.CondExpr.Value(childCtx) diags = append(diags, condDiags...) if includeRaw.IsNull() { if known { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' condition", Detail: "The value of the 'if' clause must not be null.", Subject: e.CondExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CondExpr, EvalContext: childCtx, }) } known = false continue } include, err := convert.Convert(includeRaw, cty.Bool) if err != nil { if known { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' condition", Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()), Subject: e.CondExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CondExpr, EvalContext: childCtx, }) } known = false continue } if !include.IsKnown() { known = false continue } // Extract and merge marks from the include expression into the // main set of marks includeUnmarked, includeMarks := include.Unmark() marks = append(marks, includeMarks) if includeUnmarked.False() { // Skip this element continue } } keyRaw, keyDiags := e.KeyExpr.Value(childCtx) diags = append(diags, keyDiags...) if keyRaw.IsNull() { if known { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid object key", Detail: "Key expression in 'for' expression must not produce a null value.", Subject: e.KeyExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.KeyExpr, EvalContext: childCtx, }) } known = false continue } if !keyRaw.IsKnown() { known = false continue } key, err := convert.Convert(keyRaw, cty.String) if err != nil { if known { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid object key", Detail: fmt.Sprintf("The key expression produced an invalid result: %s.", err.Error()), Subject: e.KeyExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.KeyExpr, EvalContext: childCtx, }) } known = false continue } key, keyMarks := key.Unmark() marks = append(marks, keyMarks) val, valDiags := e.ValExpr.Value(childCtx) diags = append(diags, valDiags...) if e.Group { k := key.AsString() groupVals[k] = append(groupVals[k], val) } else { k := key.AsString() if _, exists := vals[k]; exists { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Duplicate object key", Detail: fmt.Sprintf( "Two different items produced the key %q in this 'for' expression. If duplicates are expected, use the ellipsis (...) after the value expression to enable grouping by key.", k, ), Subject: e.KeyExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.KeyExpr, EvalContext: childCtx, }) } else { vals[key.AsString()] = val } } } if !known { return cty.DynamicVal, diags } if e.Group { vals = map[string]cty.Value{} for k, gvs := range groupVals { vals[k] = cty.TupleVal(gvs) } } return cty.ObjectVal(vals).WithMarks(marks...), diags } else { // Producing a tuple vals := []cty.Value{} it := collVal.ElementIterator() known := true for it.Next() { k, v := it.Element() childCtx := ctx.NewChild() childCtx.Variables = map[string]cty.Value{} if e.KeyVar != "" { childCtx.Variables[e.KeyVar] = k } childCtx.Variables[e.ValVar] = v if e.CondExpr != nil { includeRaw, condDiags := e.CondExpr.Value(childCtx) diags = append(diags, condDiags...) if includeRaw.IsNull() { if known { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' condition", Detail: "The value of the 'if' clause must not be null.", Subject: e.CondExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CondExpr, EvalContext: childCtx, }) } known = false continue } if !includeRaw.IsKnown() { // We will eventually return DynamicVal, but we'll continue // iterating in case there are other diagnostics to gather // for later elements. known = false continue } include, err := convert.Convert(includeRaw, cty.Bool) if err != nil { if known { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' condition", Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()), Subject: e.CondExpr.Range().Ptr(), Context: &e.SrcRange, Expression: e.CondExpr, EvalContext: childCtx, }) } known = false continue } // Extract and merge marks from the include expression into the // main set of marks includeUnmarked, includeMarks := include.Unmark() marks = append(marks, includeMarks) if includeUnmarked.False() { // Skip this element continue } } val, valDiags := e.ValExpr.Value(childCtx) diags = append(diags, valDiags...) vals = append(vals, val) } if !known { return cty.DynamicVal, diags } return cty.TupleVal(vals).WithMarks(marks...), diags } } func (e *ForExpr) walkChildNodes(w internalWalkFunc) { w(e.CollExpr) scopeNames := map[string]struct{}{} if e.KeyVar != "" { scopeNames[e.KeyVar] = struct{}{} } if e.ValVar != "" { scopeNames[e.ValVar] = struct{}{} } if e.KeyExpr != nil { w(ChildScope{ LocalNames: scopeNames, Expr: e.KeyExpr, }) } w(ChildScope{ LocalNames: scopeNames, Expr: e.ValExpr, }) if e.CondExpr != nil { w(ChildScope{ LocalNames: scopeNames, Expr: e.CondExpr, }) } } func (e *ForExpr) Range() hcl.Range { return e.SrcRange } func (e *ForExpr) StartRange() hcl.Range { return e.OpenRange } type SplatExpr struct { Source Expression Each Expression Item *AnonSymbolExpr SrcRange hcl.Range MarkerRange hcl.Range } func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { sourceVal, diags := e.Source.Value(ctx) if diags.HasErrors() { // We'll evaluate our "Each" expression here just to see if it // produces any more diagnostics we can report. Since we're not // assigning a value to our AnonSymbolExpr here it will return // DynamicVal, which should short-circuit any use of it. _, itemDiags := e.Item.Value(ctx) diags = append(diags, itemDiags...) return cty.DynamicVal, diags } sourceTy := sourceVal.Type() // A "special power" of splat expressions is that they can be applied // both to tuples/lists and to other values, and in the latter case // the value will be treated as an implicit single-item tuple, or as // an empty tuple if the value is null. autoUpgrade := !(sourceTy.IsTupleType() || sourceTy.IsListType() || sourceTy.IsSetType()) if sourceVal.IsNull() { if autoUpgrade { return cty.EmptyTupleVal, diags } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Splat of null value", Detail: "Splat expressions (with the * symbol) cannot be applied to null sequences.", Subject: e.Source.Range().Ptr(), Context: hcl.RangeBetween(e.Source.Range(), e.MarkerRange).Ptr(), Expression: e.Source, EvalContext: ctx, }) return cty.DynamicVal, diags } if sourceTy == cty.DynamicPseudoType { // If we don't even know the _type_ of our source value yet then // we'll need to defer all processing, since we can't decide our // result type either. return cty.DynamicVal, diags } upgradedUnknown := false if autoUpgrade { // If we're upgrading an unknown value to a tuple/list, the result // cannot be known. Otherwise a tuple containing an unknown value will // upgrade to a different number of elements depending on whether // sourceVal becomes null or not. // We record this condition here so we can process any remaining // expression after the * to verify the result of the traversal. For // example, it is valid to use a splat on a single object to retrieve a // list of a single attribute, but we still need to check if that // attribute actually exists. upgradedUnknown = !sourceVal.IsKnown() sourceVal = cty.TupleVal([]cty.Value{sourceVal}) sourceTy = sourceVal.Type() } // We'll compute our result type lazily if we need it. In the normal case // it's inferred automatically from the value we construct. resultTy := func() (cty.Type, hcl.Diagnostics) { chiCtx := ctx.NewChild() var diags hcl.Diagnostics switch { case sourceTy.IsListType() || sourceTy.IsSetType(): ety := sourceTy.ElementType() e.Item.setValue(chiCtx, cty.UnknownVal(ety)) val, itemDiags := e.Each.Value(chiCtx) diags = append(diags, itemDiags...) e.Item.clearValue(chiCtx) // clean up our temporary value return cty.List(val.Type()), diags case sourceTy.IsTupleType(): etys := sourceTy.TupleElementTypes() resultTys := make([]cty.Type, 0, len(etys)) for _, ety := range etys { e.Item.setValue(chiCtx, cty.UnknownVal(ety)) val, itemDiags := e.Each.Value(chiCtx) diags = append(diags, itemDiags...) e.Item.clearValue(chiCtx) // clean up our temporary value resultTys = append(resultTys, val.Type()) } return cty.Tuple(resultTys), diags default: // Should never happen because of our promotion to list above. return cty.DynamicPseudoType, diags } } if !sourceVal.IsKnown() { // We can't produce a known result in this case, but we'll still // indicate what the result type would be, allowing any downstream type // checking to proceed. ty, tyDiags := resultTy() diags = append(diags, tyDiags...) return cty.UnknownVal(ty), diags } // Unmark the collection, and save the marks to apply to the returned // collection result sourceVal, marks := sourceVal.Unmark() vals := make([]cty.Value, 0, sourceVal.LengthInt()) it := sourceVal.ElementIterator() if ctx == nil { // we need a context to use our AnonSymbolExpr, so we'll just // make an empty one here to use as a placeholder. ctx = ctx.NewChild() } isKnown := true for it.Next() { _, sourceItem := it.Element() e.Item.setValue(ctx, sourceItem) newItem, itemDiags := e.Each.Value(ctx) diags = append(diags, itemDiags...) if itemDiags.HasErrors() { isKnown = false } vals = append(vals, newItem) } e.Item.clearValue(ctx) // clean up our temporary value if upgradedUnknown { return cty.DynamicVal, diags } if !isKnown { // We'll ingore the resultTy diagnostics in this case since they // will just be the same errors we saw while iterating above. ty, _ := resultTy() return cty.UnknownVal(ty), diags } switch { case sourceTy.IsListType() || sourceTy.IsSetType(): if len(vals) == 0 { ty, tyDiags := resultTy() diags = append(diags, tyDiags...) return cty.ListValEmpty(ty.ElementType()), diags } return cty.ListVal(vals).WithMarks(marks), diags default: return cty.TupleVal(vals).WithMarks(marks), diags } } func (e *SplatExpr) walkChildNodes(w internalWalkFunc) { w(e.Source) w(e.Each) } func (e *SplatExpr) Range() hcl.Range { return e.SrcRange } func (e *SplatExpr) StartRange() hcl.Range { return e.MarkerRange } // AnonSymbolExpr is used as a placeholder for a value in an expression that // can be applied dynamically to any value at runtime. // // This is a rather odd, synthetic expression. It is used as part of the // representation of splat expressions as a placeholder for the current item // being visited in the splat evaluation. // // AnonSymbolExpr cannot be evaluated in isolation. If its Value is called // directly then cty.DynamicVal will be returned. Instead, it is evaluated // in terms of another node (i.e. a splat expression) which temporarily // assigns it a value. type AnonSymbolExpr struct { SrcRange hcl.Range // values and its associated lock are used to isolate concurrent // evaluations of a symbol from one another. It is the calling application's // responsibility to ensure that the same splat expression is not evalauted // concurrently within the _same_ EvalContext, but it is fine and safe to // do cuncurrent evaluations with distinct EvalContexts. values map[*hcl.EvalContext]cty.Value valuesLock sync.RWMutex } func (e *AnonSymbolExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { if ctx == nil { return cty.DynamicVal, nil } e.valuesLock.RLock() defer e.valuesLock.RUnlock() val, exists := e.values[ctx] if !exists { return cty.DynamicVal, nil } return val, nil } // setValue sets a temporary local value for the expression when evaluated // in the given context, which must be non-nil. func (e *AnonSymbolExpr) setValue(ctx *hcl.EvalContext, val cty.Value) { e.valuesLock.Lock() defer e.valuesLock.Unlock() if e.values == nil { e.values = make(map[*hcl.EvalContext]cty.Value) } if ctx == nil { panic("can't setValue for a nil EvalContext") } e.values[ctx] = val } func (e *AnonSymbolExpr) clearValue(ctx *hcl.EvalContext) { e.valuesLock.Lock() defer e.valuesLock.Unlock() if e.values == nil { return } if ctx == nil { panic("can't clearValue for a nil EvalContext") } delete(e.values, ctx) } func (e *AnonSymbolExpr) walkChildNodes(w internalWalkFunc) { // AnonSymbolExpr is a leaf node in the tree } func (e *AnonSymbolExpr) Range() hcl.Range { return e.SrcRange } func (e *AnonSymbolExpr) StartRange() hcl.Range { return e.SrcRange } hcl-2.14.1/hclsyntax/expression_ops.go000066400000000000000000000137221431334125700200010ustar00rootroot00000000000000package hclsyntax import ( "fmt" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" ) type Operation struct { Impl function.Function Type cty.Type } var ( OpLogicalOr = &Operation{ Impl: stdlib.OrFunc, Type: cty.Bool, } OpLogicalAnd = &Operation{ Impl: stdlib.AndFunc, Type: cty.Bool, } OpLogicalNot = &Operation{ Impl: stdlib.NotFunc, Type: cty.Bool, } OpEqual = &Operation{ Impl: stdlib.EqualFunc, Type: cty.Bool, } OpNotEqual = &Operation{ Impl: stdlib.NotEqualFunc, Type: cty.Bool, } OpGreaterThan = &Operation{ Impl: stdlib.GreaterThanFunc, Type: cty.Bool, } OpGreaterThanOrEqual = &Operation{ Impl: stdlib.GreaterThanOrEqualToFunc, Type: cty.Bool, } OpLessThan = &Operation{ Impl: stdlib.LessThanFunc, Type: cty.Bool, } OpLessThanOrEqual = &Operation{ Impl: stdlib.LessThanOrEqualToFunc, Type: cty.Bool, } OpAdd = &Operation{ Impl: stdlib.AddFunc, Type: cty.Number, } OpSubtract = &Operation{ Impl: stdlib.SubtractFunc, Type: cty.Number, } OpMultiply = &Operation{ Impl: stdlib.MultiplyFunc, Type: cty.Number, } OpDivide = &Operation{ Impl: stdlib.DivideFunc, Type: cty.Number, } OpModulo = &Operation{ Impl: stdlib.ModuloFunc, Type: cty.Number, } OpNegate = &Operation{ Impl: stdlib.NegateFunc, Type: cty.Number, } ) var binaryOps []map[TokenType]*Operation func init() { // This operation table maps from the operator's token type // to the AST operation type. All expressions produced from // binary operators are BinaryOp nodes. // // Binary operator groups are listed in order of precedence, with // the *lowest* precedence first. Operators within the same group // have left-to-right associativity. binaryOps = []map[TokenType]*Operation{ { TokenOr: OpLogicalOr, }, { TokenAnd: OpLogicalAnd, }, { TokenEqualOp: OpEqual, TokenNotEqual: OpNotEqual, }, { TokenGreaterThan: OpGreaterThan, TokenGreaterThanEq: OpGreaterThanOrEqual, TokenLessThan: OpLessThan, TokenLessThanEq: OpLessThanOrEqual, }, { TokenPlus: OpAdd, TokenMinus: OpSubtract, }, { TokenStar: OpMultiply, TokenSlash: OpDivide, TokenPercent: OpModulo, }, } } type BinaryOpExpr struct { LHS Expression Op *Operation RHS Expression SrcRange hcl.Range } func (e *BinaryOpExpr) walkChildNodes(w internalWalkFunc) { w(e.LHS) w(e.RHS) } func (e *BinaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { impl := e.Op.Impl // assumed to be a function taking exactly two arguments params := impl.Params() lhsParam := params[0] rhsParam := params[1] var diags hcl.Diagnostics givenLHSVal, lhsDiags := e.LHS.Value(ctx) givenRHSVal, rhsDiags := e.RHS.Value(ctx) diags = append(diags, lhsDiags...) diags = append(diags, rhsDiags...) lhsVal, err := convert.Convert(givenLHSVal, lhsParam.Type) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid operand", Detail: fmt.Sprintf("Unsuitable value for left operand: %s.", err), Subject: e.LHS.Range().Ptr(), Context: &e.SrcRange, Expression: e.LHS, EvalContext: ctx, }) } rhsVal, err := convert.Convert(givenRHSVal, rhsParam.Type) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid operand", Detail: fmt.Sprintf("Unsuitable value for right operand: %s.", err), Subject: e.RHS.Range().Ptr(), Context: &e.SrcRange, Expression: e.RHS, EvalContext: ctx, }) } if diags.HasErrors() { // Don't actually try the call if we have errors already, since the // this will probably just produce a confusing duplicative diagnostic. return cty.UnknownVal(e.Op.Type), diags } args := []cty.Value{lhsVal, rhsVal} result, err := impl.Call(args) if err != nil { diags = append(diags, &hcl.Diagnostic{ // FIXME: This diagnostic is useless. Severity: hcl.DiagError, Summary: "Operation failed", Detail: fmt.Sprintf("Error during operation: %s.", err), Subject: &e.SrcRange, Expression: e, EvalContext: ctx, }) return cty.UnknownVal(e.Op.Type), diags } return result, diags } func (e *BinaryOpExpr) Range() hcl.Range { return e.SrcRange } func (e *BinaryOpExpr) StartRange() hcl.Range { return e.LHS.StartRange() } type UnaryOpExpr struct { Op *Operation Val Expression SrcRange hcl.Range SymbolRange hcl.Range } func (e *UnaryOpExpr) walkChildNodes(w internalWalkFunc) { w(e.Val) } func (e *UnaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { impl := e.Op.Impl // assumed to be a function taking exactly one argument params := impl.Params() param := params[0] givenVal, diags := e.Val.Value(ctx) val, err := convert.Convert(givenVal, param.Type) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid operand", Detail: fmt.Sprintf("Unsuitable value for unary operand: %s.", err), Subject: e.Val.Range().Ptr(), Context: &e.SrcRange, Expression: e.Val, EvalContext: ctx, }) } if diags.HasErrors() { // Don't actually try the call if we have errors already, since the // this will probably just produce a confusing duplicative diagnostic. return cty.UnknownVal(e.Op.Type), diags } args := []cty.Value{val} result, err := impl.Call(args) if err != nil { diags = append(diags, &hcl.Diagnostic{ // FIXME: This diagnostic is useless. Severity: hcl.DiagError, Summary: "Operation failed", Detail: fmt.Sprintf("Error during operation: %s.", err), Subject: &e.SrcRange, Expression: e, EvalContext: ctx, }) return cty.UnknownVal(e.Op.Type), diags } return result, diags } func (e *UnaryOpExpr) Range() hcl.Range { return e.SrcRange } func (e *UnaryOpExpr) StartRange() hcl.Range { return e.SymbolRange } hcl-2.14.1/hclsyntax/expression_static_test.go000066400000000000000000000060201431334125700215170ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/go-test/deep" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func TestTraversalStatic(t *testing.T) { expr, diags := ParseExpression([]byte(`a.b.c`), "", hcl.Pos{Line: 1, Column: 1}) got, moreDiags := hcl.AbsTraversalForExpr(expr) diags = append(diags, moreDiags...) if len(diags) != 0 { t.Errorf("wrong number of diags %d; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag) } return } want := hcl.Traversal{ hcl.TraverseRoot{ Name: "a", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, hcl.TraverseAttr{ Name: "b", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, hcl.TraverseAttr{ Name: "c", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 4, Byte: 3}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, } for _, problem := range deep.Equal(got, want) { t.Errorf(problem) } } func TestTupleStatic(t *testing.T) { expr, diags := ParseExpression([]byte(`[true, false]`), "", hcl.Pos{Line: 1, Column: 1}) exprs, moreDiags := hcl.ExprList(expr) diags = append(diags, moreDiags...) if len(diags) != 0 { t.Errorf("wrong number of diags %d; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag) } return } if got, want := len(exprs), 2; got != want { t.Fatalf("wrong length %d; want %d", got, want) } got := make([]cty.Value, len(exprs)) want := []cty.Value{ cty.True, cty.False, } for i, itemExpr := range exprs { val, valDiags := itemExpr.Value(nil) if len(valDiags) != 0 { t.Errorf("wrong number of diags %d; want 0", len(valDiags)) for _, diag := range valDiags { t.Logf("- %s", diag) } return } got[i] = val } for _, problem := range deep.Equal(got, want) { t.Errorf(problem) } } func TestMapStatic(t *testing.T) { expr, diags := ParseExpression([]byte(`{"foo":true,"bar":false}`), "", hcl.Pos{Line: 1, Column: 1}) items, moreDiags := hcl.ExprMap(expr) diags = append(diags, moreDiags...) if len(diags) != 0 { t.Errorf("wrong number of diags %d; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag) } return } if got, want := len(items), 2; got != want { t.Fatalf("wrong length %d; want %d", got, want) } got := make(map[cty.Value]cty.Value) want := map[cty.Value]cty.Value{ cty.StringVal("foo"): cty.True, cty.StringVal("bar"): cty.False, } for _, item := range items { var itemDiags hcl.Diagnostics key, keyDiags := item.Key.Value(nil) itemDiags = append(itemDiags, keyDiags...) val, valDiags := item.Value.Value(nil) itemDiags = append(itemDiags, valDiags...) if len(itemDiags) != 0 { t.Errorf("wrong number of diags %d; want 0", len(itemDiags)) for _, diag := range itemDiags { t.Logf("- %s", diag) } return } got[key] = val } for _, problem := range deep.Equal(got, want) { t.Errorf(problem) } } hcl-2.14.1/hclsyntax/expression_template.go000066400000000000000000000143421431334125700210120ustar00rootroot00000000000000package hclsyntax import ( "bytes" "fmt" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" ) type TemplateExpr struct { Parts []Expression SrcRange hcl.Range } func (e *TemplateExpr) walkChildNodes(w internalWalkFunc) { for _, part := range e.Parts { w(part) } } func (e *TemplateExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { buf := &bytes.Buffer{} var diags hcl.Diagnostics isKnown := true // Maintain a set of marks for values used in the template marks := make(cty.ValueMarks) for _, part := range e.Parts { partVal, partDiags := part.Value(ctx) diags = append(diags, partDiags...) if partVal.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid template interpolation value", Detail: fmt.Sprintf( "The expression result is null. Cannot include a null value in a string template.", ), Subject: part.Range().Ptr(), Context: &e.SrcRange, Expression: part, EvalContext: ctx, }) continue } // Unmark the part and merge its marks into the set unmarkedVal, partMarks := partVal.Unmark() for k, v := range partMarks { marks[k] = v } if !partVal.IsKnown() { // If any part is unknown then the result as a whole must be // unknown too. We'll keep on processing the rest of the parts // anyway, because we want to still emit any diagnostics resulting // from evaluating those. isKnown = false continue } strVal, err := convert.Convert(unmarkedVal, cty.String) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid template interpolation value", Detail: fmt.Sprintf( "Cannot include the given value in a string template: %s.", err.Error(), ), Subject: part.Range().Ptr(), Context: &e.SrcRange, Expression: part, EvalContext: ctx, }) continue } buf.WriteString(strVal.AsString()) } var ret cty.Value if !isKnown { ret = cty.UnknownVal(cty.String) } else { ret = cty.StringVal(buf.String()) } // Apply the full set of marks to the returned value return ret.WithMarks(marks), diags } func (e *TemplateExpr) Range() hcl.Range { return e.SrcRange } func (e *TemplateExpr) StartRange() hcl.Range { return e.Parts[0].StartRange() } // IsStringLiteral returns true if and only if the template consists only of // single string literal, as would be created for a simple quoted string like // "foo". // // If this function returns true, then calling Value on the same expression // with a nil EvalContext will return the literal value. // // Note that "${"foo"}", "${1}", etc aren't considered literal values for the // purposes of this method, because the intent of this method is to identify // situations where the user seems to be explicitly intending literal string // interpretation, not situations that result in literals as a technicality // of the template expression unwrapping behavior. func (e *TemplateExpr) IsStringLiteral() bool { if len(e.Parts) != 1 { return false } _, ok := e.Parts[0].(*LiteralValueExpr) return ok } // TemplateJoinExpr is used to convert tuples of strings produced by template // constructs (i.e. for loops) into flat strings, by converting the values // tos strings and joining them. This AST node is not used directly; it's // produced as part of the AST of a "for" loop in a template. type TemplateJoinExpr struct { Tuple Expression } func (e *TemplateJoinExpr) walkChildNodes(w internalWalkFunc) { w(e.Tuple) } func (e *TemplateJoinExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { tuple, diags := e.Tuple.Value(ctx) if tuple.IsNull() { // This indicates a bug in the code that constructed the AST. panic("TemplateJoinExpr got null tuple") } if tuple.Type() == cty.DynamicPseudoType { return cty.UnknownVal(cty.String), diags } if !tuple.Type().IsTupleType() { // This indicates a bug in the code that constructed the AST. panic("TemplateJoinExpr got non-tuple tuple") } if !tuple.IsKnown() { return cty.UnknownVal(cty.String), diags } tuple, marks := tuple.Unmark() allMarks := []cty.ValueMarks{marks} buf := &bytes.Buffer{} it := tuple.ElementIterator() for it.Next() { _, val := it.Element() if val.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid template interpolation value", Detail: fmt.Sprintf( "An iteration result is null. Cannot include a null value in a string template.", ), Subject: e.Range().Ptr(), Expression: e, EvalContext: ctx, }) continue } if val.Type() == cty.DynamicPseudoType { return cty.UnknownVal(cty.String).WithMarks(marks), diags } strVal, err := convert.Convert(val, cty.String) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid template interpolation value", Detail: fmt.Sprintf( "Cannot include one of the interpolation results into the string template: %s.", err.Error(), ), Subject: e.Range().Ptr(), Expression: e, EvalContext: ctx, }) continue } if !val.IsKnown() { return cty.UnknownVal(cty.String).WithMarks(marks), diags } strVal, strValMarks := strVal.Unmark() if len(strValMarks) > 0 { allMarks = append(allMarks, strValMarks) } buf.WriteString(strVal.AsString()) } return cty.StringVal(buf.String()).WithMarks(allMarks...), diags } func (e *TemplateJoinExpr) Range() hcl.Range { return e.Tuple.Range() } func (e *TemplateJoinExpr) StartRange() hcl.Range { return e.Tuple.StartRange() } // TemplateWrapExpr is used instead of a TemplateExpr when a template // consists _only_ of a single interpolation sequence. In that case, the // template's result is the single interpolation's result, verbatim with // no type conversions. type TemplateWrapExpr struct { Wrapped Expression SrcRange hcl.Range } func (e *TemplateWrapExpr) walkChildNodes(w internalWalkFunc) { w(e.Wrapped) } func (e *TemplateWrapExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { return e.Wrapped.Value(ctx) } func (e *TemplateWrapExpr) Range() hcl.Range { return e.SrcRange } func (e *TemplateWrapExpr) StartRange() hcl.Range { return e.SrcRange } hcl-2.14.1/hclsyntax/expression_template_test.go000066400000000000000000000174301431334125700220520ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func TestTemplateExprParseAndValue(t *testing.T) { // This is a combo test that exercises both the parser and the Value // method, with the focus on the latter but indirectly testing the former. tests := []struct { input string ctx *hcl.EvalContext want cty.Value diagCount int }{ { `1`, nil, cty.StringVal("1"), 0, }, { `(1)`, nil, cty.StringVal("(1)"), 0, }, { `true`, nil, cty.StringVal("true"), 0, }, { ` hello world `, nil, cty.StringVal("\nhello world\n"), 0, }, { `hello ${"world"}`, nil, cty.StringVal("hello world"), 0, }, { `hello\nworld`, // backslash escapes not supported in bare templates nil, cty.StringVal("hello\\nworld"), 0, }, { `hello ${12.5}`, nil, cty.StringVal("hello 12.5"), 0, }, { `silly ${"${"nesting"}"}`, nil, cty.StringVal("silly nesting"), 0, }, { `silly ${"${true}"}`, nil, cty.StringVal("silly true"), 0, }, { `hello $${escaped}`, nil, cty.StringVal("hello ${escaped}"), 0, }, { `hello $$nonescape`, nil, cty.StringVal("hello $$nonescape"), 0, }, { `hello %${"world"}`, nil, cty.StringVal("hello %world"), 0, }, { `${true}`, nil, cty.True, // any single expression is unwrapped without stringification 0, }, { `trim ${~ "trim"}`, nil, cty.StringVal("trimtrim"), 0, }, { `${"trim" ~} trim`, nil, cty.StringVal("trimtrim"), 0, }, { `trim ${~"trim"~} trim`, nil, cty.StringVal("trimtrimtrim"), 0, }, { ` ${~ true ~} `, nil, cty.StringVal("true"), // can't trim space to reduce to a single expression 0, }, { `${"hello "}${~"trim"~}${" hello"}`, nil, cty.StringVal("hello trim hello"), // trimming can't reach into a neighboring interpolation 0, }, { `${true}${~"trim"~}${true}`, nil, cty.StringVal("truetrimtrue"), // trimming is no-op of neighbors aren't literal strings 0, }, { `%{ if true ~} hello %{~ endif }`, nil, cty.StringVal("hello"), 0, }, { `%{ if false ~} hello %{~ endif}`, nil, cty.StringVal(""), 0, }, { `%{ if true ~} hello %{~ else ~} goodbye %{~ endif }`, nil, cty.StringVal("hello"), 0, }, { `%{ if false ~} hello %{~ else ~} goodbye %{~ endif }`, nil, cty.StringVal("goodbye"), 0, }, { `%{ if true ~} %{~ if false ~} hello %{~ else ~} goodbye %{~ endif ~} %{~ endif }`, nil, cty.StringVal("goodbye"), 0, }, { `%{ if false ~} %{~ if false ~} hello %{~ else ~} goodbye %{~ endif ~} %{~ endif }`, nil, cty.StringVal(""), 0, }, { `%{ of true ~} hello %{~ endif}`, nil, cty.UnknownVal(cty.String), 2, // "of" is not a valid control keyword, and "endif" is therefore also unexpected }, { `%{ for v in ["a", "b", "c"] }${v}%{ endfor }`, nil, cty.StringVal("abc"), 0, }, { `%{ for v in ["a", "b", "c"] } ${v} %{ endfor }`, nil, cty.StringVal(" a b c "), 0, }, { `%{ for v in ["a", "b", "c"] ~} ${v} %{~ endfor }`, nil, cty.StringVal("abc"), 0, }, { `%{ for v in [] }${v}%{ endfor }`, nil, cty.StringVal(""), 0, }, { `%{ for i, v in ["a", "b", "c"] }${i}${v}%{ endfor }`, nil, cty.StringVal("0a1b2c"), 0, }, { `%{ for k, v in {"A" = "a", "B" = "b", "C" = "c"} }${k}${v}%{ endfor }`, nil, cty.StringVal("AaBbCc"), 0, }, { `%{ for v in ["a", "b", "c"] }${v}${nl}%{ endfor }`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "nl": cty.StringVal("\n"), }, }, cty.StringVal("a\nb\nc\n"), 0, }, { `\n`, // backslash escapes are not interpreted in template literals nil, cty.StringVal("\\n"), 0, }, { `\uu1234`, // backslash escapes are not interpreted in template literals nil, // (this is intentionally an invalid one to ensure we don't produce an error) cty.StringVal("\\uu1234"), 0, }, { `$`, nil, cty.StringVal("$"), 0, }, { `$$`, nil, cty.StringVal("$$"), 0, }, { `%`, nil, cty.StringVal("%"), 0, }, { `%%`, nil, cty.StringVal("%%"), 0, }, { `hello %%{ if true }world%%{ endif }`, nil, cty.StringVal(`hello %{ if true }world%{ endif }`), 0, }, { `hello $%{ if true }world%{ endif }`, nil, cty.StringVal("hello $world"), 0, }, { `%{ endif }`, nil, cty.UnknownVal(cty.String), 1, // Unexpected endif directive }, { `%{ endfor }`, nil, cty.UnknownVal(cty.String), 1, // Unexpected endfor directive }, { // marks from uninterpolated values are ignored `hello%{ if false } ${target}%{ endif }`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "target": cty.StringVal("world").Mark("sensitive"), }, }, cty.StringVal("hello"), 0, }, { // marks from interpolated values are passed through `${greeting} ${target}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "greeting": cty.StringVal("hello").Mark("english"), "target": cty.StringVal("world").Mark("sensitive"), }, }, cty.StringVal("hello world").WithMarks(cty.NewValueMarks("english", "sensitive")), 0, }, { // can use marks by traversing complex values `Authenticate with "${secrets.passphrase}"`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "secrets": cty.MapVal(map[string]cty.Value{ "passphrase": cty.StringVal("my voice is my passport").Mark("sensitive"), }).Mark("sensitive"), }, }, cty.StringVal(`Authenticate with "my voice is my passport"`).WithMarks(cty.NewValueMarks("sensitive")), 0, }, { // can loop over marked collections `%{ for s in secrets }${s}%{ endfor }`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "secrets": cty.ListVal([]cty.Value{ cty.StringVal("foo"), cty.StringVal("bar"), cty.StringVal("baz"), }).Mark("sensitive"), }, }, cty.StringVal("foobarbaz").Mark("sensitive"), 0, }, { // marks on individual elements propagate to the result `%{ for s in secrets }${s}%{ endfor }`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "secrets": cty.ListVal([]cty.Value{ cty.StringVal("foo"), cty.StringVal("bar").Mark("sensitive"), cty.StringVal("baz"), }), }, }, cty.StringVal("foobarbaz").Mark("sensitive"), 0, }, { // lots of marks! `%{ for s in secrets }${s}%{ endfor }`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "secrets": cty.ListVal([]cty.Value{ cty.StringVal("foo").Mark("x"), cty.StringVal("bar").Mark("y"), cty.StringVal("baz").Mark("z"), }).Mark("x"), // second instance of x }, }, cty.StringVal("foobarbaz").WithMarks(cty.NewValueMarks("x", "y", "z")), 0, }, { // marks from unknown values are maintained `test_${target}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "target": cty.UnknownVal(cty.String).Mark("sensitive"), }, }, cty.UnknownVal(cty.String).Mark("sensitive"), 0, }, } for _, test := range tests { t.Run(test.input, func(t *testing.T) { expr, parseDiags := ParseTemplate([]byte(test.input), "", hcl.Pos{Line: 1, Column: 1, Byte: 0}) got, valDiags := expr.Value(test.ctx) diagCount := len(parseDiags) + len(valDiags) if diagCount != test.diagCount { t.Errorf("wrong number of diagnostics %d; want %d", diagCount, test.diagCount) for _, diag := range parseDiags { t.Logf(" - %s", diag.Error()) } for _, diag := range valDiags { t.Logf(" - %s", diag.Error()) } } if !got.RawEquals(test.want) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, test.want) } }) } } hcl-2.14.1/hclsyntax/expression_test.go000066400000000000000000001356371431334125700201710ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" ) func TestExpressionParseAndValue(t *testing.T) { // This is a combo test that exercises both the parser and the Value // method, with the focus on the latter but indirectly testing the former. tests := []struct { input string ctx *hcl.EvalContext want cty.Value diagCount int }{ { `1`, nil, cty.NumberIntVal(1), 0, }, { `(1)`, nil, cty.NumberIntVal(1), 0, }, { `(2+3)`, nil, cty.NumberIntVal(5), 0, }, { `2*5+1`, nil, cty.NumberIntVal(11), 0, }, { `9%8`, nil, cty.NumberIntVal(1), 0, }, { `(2+unk)`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unk": cty.UnknownVal(cty.Number), }, }, cty.UnknownVal(cty.Number), 0, }, { `(2+unk)`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unk": cty.DynamicVal, }, }, cty.UnknownVal(cty.Number), 0, }, { `(unk+unk)`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unk": cty.DynamicVal, }, }, cty.UnknownVal(cty.Number), 0, }, { `(2+true)`, nil, cty.UnknownVal(cty.Number), 1, // unsuitable type for right operand }, { `(false+true)`, nil, cty.UnknownVal(cty.Number), 2, // unsuitable type for each operand }, { `(5 == 5)`, nil, cty.True, 0, }, { `(5 == 4)`, nil, cty.False, 0, }, { `(1 == true)`, nil, cty.False, 0, }, { `("true" == true)`, nil, cty.False, 0, }, { `(true == "true")`, nil, cty.False, 0, }, { `(true != "true")`, nil, cty.True, 0, }, { `(- 2)`, nil, cty.NumberIntVal(-2), 0, }, { `(! true)`, nil, cty.False, 0, }, { `( 1 )`, nil, cty.NumberIntVal(1), 0, }, { `(1`, nil, cty.NumberIntVal(1), 1, // Unbalanced parentheses }, { `true`, nil, cty.True, 0, }, { `false`, nil, cty.False, 0, }, { `null`, nil, cty.NullVal(cty.DynamicPseudoType), 0, }, { `true true`, nil, cty.True, 1, // extra characters after expression }, { `"hello"`, nil, cty.StringVal("hello"), 0, }, { "\"hello `backtick` world\"", nil, cty.StringVal("hello `backtick` world"), 0, }, { `"hello\nworld"`, nil, cty.StringVal("hello\nworld"), 0, }, { `"unclosed`, nil, cty.StringVal("unclosed"), 1, // Unterminated template string }, { `"hello ${"world"}"`, nil, cty.StringVal("hello world"), 0, }, { `"hello ${12.5}"`, nil, cty.StringVal("hello 12.5"), 0, }, { `"silly ${"${"nesting"}"}"`, nil, cty.StringVal("silly nesting"), 0, }, { `"silly ${"${true}"}"`, nil, cty.StringVal("silly true"), 0, }, { `"hello $${escaped}"`, nil, cty.StringVal("hello ${escaped}"), 0, }, { `"hello $$nonescape"`, nil, cty.StringVal("hello $$nonescape"), 0, }, { `"$"`, nil, cty.StringVal("$"), 0, }, { `"%"`, nil, cty.StringVal("%"), 0, }, { `upper("foo")`, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.StringVal("FOO"), 0, }, { ` upper( "foo" ) `, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.StringVal("FOO"), 0, }, { `upper(["foo"]...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.StringVal("FOO"), 0, }, { `upper("foo", []...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.StringVal("FOO"), 0, }, { `upper("foo", "bar")`, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.DynamicVal, 1, // too many function arguments }, { `upper(["foo", "bar"]...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.DynamicVal, 1, // too many function arguments }, { `concat([1, null]...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "concat": stdlib.ConcatFunc, }, }, cty.DynamicVal, 1, // argument cannot be null }, { `concat(var.unknownlist...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "concat": stdlib.ConcatFunc, }, Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "unknownlist": cty.UnknownVal(cty.DynamicPseudoType), }), }, }, cty.DynamicVal, 0, }, { `misbehave()`, &hcl.EvalContext{ Functions: map[string]function.Function{ "misbehave": function.New(&function.Spec{ Type: func(args []cty.Value) (cty.Type, error) { // This function misbehaves by indicating an error // on an argument index that is out of range for // its declared parameters. That would always be // a bug in the function, but we want to avoid // panicking in this case and just behave like it // was a normal (non-arg) error. return cty.NilType, function.NewArgErrorf(1, "out of range") }, }), }, }, cty.DynamicVal, 1, // Call to function "misbehave" failed: out of range }, { `misbehave() /* variadic */`, &hcl.EvalContext{ Functions: map[string]function.Function{ "misbehave": function.New(&function.Spec{ VarParam: &function.Parameter{ Name: "foo", Type: cty.String, }, Type: func(args []cty.Value) (cty.Type, error) { // This function misbehaves by indicating an error // on an argument index that is out of range for // the given arguments. That would always be a // bug in the function, but to avoid panicking we // just treat it like a problem related to the // declared variadic argument. return cty.NilType, function.NewArgErrorf(1, "out of range") }, }), }, }, cty.DynamicVal, 1, // Invalid value for "foo" parameter: out of range }, { `misbehave([]...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "misbehave": function.New(&function.Spec{ VarParam: &function.Parameter{ Name: "foo", Type: cty.String, }, Type: func(args []cty.Value) (cty.Type, error) { // This function misbehaves by indicating an error // on an argument index that is out of range for // the given arguments. That would always be a // bug in the function, but to avoid panicking we // just treat it like a problem related to the // declared variadic argument. return cty.NilType, function.NewArgErrorf(1, "out of range") }, }), }, }, cty.DynamicVal, 1, // Invalid value for "foo" parameter: out of range }, { `argerrorexpand(["a", "b"]...)`, &hcl.EvalContext{ Functions: map[string]function.Function{ "argerrorexpand": function.New(&function.Spec{ VarParam: &function.Parameter{ Name: "foo", Type: cty.String, }, Type: func(args []cty.Value) (cty.Type, error) { // We should be able to indicate an error in // argument 1 because the indices are into the // arguments _after_ "..." expansion. An earlier // HCL version had a bug where it used the // pre-expansion arguments and would thus panic // in this case. return cty.NilType, function.NewArgErrorf(1, "blah blah") }, }), }, }, cty.DynamicVal, 1, // Invalid value for "foo" parameter: blah blah }, { `[]`, nil, cty.EmptyTupleVal, 0, }, { `[1]`, nil, cty.TupleVal([]cty.Value{cty.NumberIntVal(1)}), 0, }, { `[1,]`, nil, cty.TupleVal([]cty.Value{cty.NumberIntVal(1)}), 0, }, { `[1,true]`, nil, cty.TupleVal([]cty.Value{cty.NumberIntVal(1), cty.True}), 0, }, { `[ 1, true ]`, nil, cty.TupleVal([]cty.Value{cty.NumberIntVal(1), cty.True}), 0, }, { `{}`, nil, cty.EmptyObjectVal, 0, }, { `{"hello": "world"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{"hello" = "world"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{hello = "world"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{hello: "world"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{true: "yes"}`, nil, cty.ObjectVal(map[string]cty.Value{ "true": cty.StringVal("yes"), }), 0, }, { `{false: "yes"}`, nil, cty.ObjectVal(map[string]cty.Value{ "false": cty.StringVal("yes"), }), 0, }, { `{null: "yes"}`, nil, cty.ObjectVal(map[string]cty.Value{ "null": cty.StringVal("yes"), }), 0, }, { `{15: "yes"}`, nil, cty.ObjectVal(map[string]cty.Value{ "15": cty.StringVal("yes"), }), 0, }, { `{[]: "yes"}`, nil, cty.DynamicVal, 1, // Incorrect key type; Can't use this value as a key: string required }, { `{"centos_7.2_ap-south-1" = "ami-abc123"}`, nil, cty.ObjectVal(map[string]cty.Value{ "centos_7.2_ap-south-1": cty.StringVal("ami-abc123"), }), 0, }, { // This is syntactically valid (it's similar to foo["bar"]) // but is rejected during evaluation to force the user to be explicit // about which of the following interpretations they mean: // -{(foo.bar) = "baz"} // -{"foo.bar" = "baz"} // naked traversals as keys are allowed when analyzing an expression // statically so an application can define object-syntax-based // language constructs with looser requirements, but we reject // this during normal expression evaluation. `{foo.bar = "ami-abc123"}`, nil, cty.DynamicVal, 1, // Ambiguous attribute key; If this expression is intended to be a reference, wrap it in parentheses. If it's instead intended as a literal name containing periods, wrap it in quotes to create a string literal. }, { // This is a weird variant of the above where a period is followed // by a digit, causing the parser to interpret it as an index // operator using the legacy HIL/Terraform index syntax. // This one _does_ fail parsing, causing it to be subject to // parser recovery behavior. `{centos_7.2_ap-south-1 = "ami-abc123"}`, nil, cty.EmptyObjectVal, // (due to parser recovery behavior) 1, // Missing key/value separator; Expected an equals sign ("=") to mark the beginning of the attribute value. If you intended to given an attribute name containing periods or spaces, write the name in quotes to create a string literal. }, { `{var.greeting = "world"}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "greeting": cty.StringVal("hello"), }), }, }, cty.DynamicVal, 1, // Ambiguous attribute key }, { `{(var.greeting) = "world"}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "greeting": cty.StringVal("hello"), }), }, }, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { // Marked values as object keys `{(var.greeting) = "world", "goodbye" = "earth"}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "greeting": cty.StringVal("hello").Mark("marked"), }), }, }, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), "goodbye": cty.StringVal("earth"), }).Mark("marked"), 0, }, { `{"${var.greeting}" = "world"}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "greeting": cty.StringVal("hello"), }), }, }, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{"hello" = "world", "goodbye" = "cruel world"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), "goodbye": cty.StringVal("cruel world"), }), 0, }, { `{ "hello" = "world" }`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{ "hello" = "world" "goodbye" = "cruel world" }`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), "goodbye": cty.StringVal("cruel world"), }), 0, }, { `{ "hello" = "world", "goodbye" = "cruel world" }`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), "goodbye": cty.StringVal("cruel world"), }), 0, }, { `{ "hello" = "world", "goodbye" = "cruel world", }`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), "goodbye": cty.StringVal("cruel world"), }), 0, }, { "{\n for k, v in {hello: \"world\"}:\nk => v\n}", nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { // This one is different than the previous because the extra level of // object constructor causes the inner for expression to begin parsing // in newline-sensitive mode, which it must then properly disable in // order to peek the "for" keyword. "{\n a = {\n for k, v in {hello: \"world\"}:\nk => v\n }\n}", nil, cty.ObjectVal(map[string]cty.Value{ "a": cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), }), 0, }, { `{for k, v in {hello: "world"}: k => v if k == "hello"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 0, }, { `{for k, v in {hello: "world"}: upper(k) => upper(v) if k == "hello"}`, &hcl.EvalContext{ Functions: map[string]function.Function{ "upper": stdlib.UpperFunc, }, }, cty.ObjectVal(map[string]cty.Value{ "HELLO": cty.StringVal("WORLD"), }), 0, }, { `{for k, v in ["world"]: k => v if k == 0}`, nil, cty.ObjectVal(map[string]cty.Value{ "0": cty.StringVal("world"), }), 0, }, { `{for v in ["world"]: v => v}`, nil, cty.ObjectVal(map[string]cty.Value{ "world": cty.StringVal("world"), }), 0, }, { `{for k, v in {hello: "world"}: k => v if k == "foo"}`, nil, cty.EmptyObjectVal, 0, }, { `{for k, v in {hello: "world"}: 5 => v}`, nil, cty.ObjectVal(map[string]cty.Value{ "5": cty.StringVal("world"), }), 0, }, { `{for k, v in {hello: "world"}: [] => v}`, nil, cty.DynamicVal, 1, // key expression has the wrong type }, { `{for k, v in {hello: "world"}: k => k if k == "hello"}`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("hello"), }), 0, }, { `{for k, v in {hello: "world"}: k => foo}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.StringVal("foo"), }, }, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("foo"), }), 0, }, { `[for k, v in {hello: "world"}: "${k}=${v}"]`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("hello=world"), }), 0, }, { `[for k, v in {hello: "world"}: k => v]`, nil, cty.ObjectVal(map[string]cty.Value{ "hello": cty.StringVal("world"), }), 1, // can't have a key expr when producing a tuple }, { `{for v in {hello: "world"}: v}`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("world"), }), 1, // must have a key expr when producing a map }, { `{for i, v in ["a", "b", "c", "b", "d"]: v => i...}`, nil, cty.ObjectVal(map[string]cty.Value{ "a": cty.TupleVal([]cty.Value{ cty.NumberIntVal(0), }), "b": cty.TupleVal([]cty.Value{ cty.NumberIntVal(1), cty.NumberIntVal(3), }), "c": cty.TupleVal([]cty.Value{ cty.NumberIntVal(2), }), "d": cty.TupleVal([]cty.Value{ cty.NumberIntVal(4), }), }), 0, }, { `{for i, v in ["a", "b", "c", "b", "d"]: v => i... if i <= 2}`, nil, cty.ObjectVal(map[string]cty.Value{ "a": cty.TupleVal([]cty.Value{ cty.NumberIntVal(0), }), "b": cty.TupleVal([]cty.Value{ cty.NumberIntVal(1), }), "c": cty.TupleVal([]cty.Value{ cty.NumberIntVal(2), }), }), 0, }, { `{for i, v in ["a", "b", "c", "b", "d"]: v => i}`, nil, cty.ObjectVal(map[string]cty.Value{ "a": cty.NumberIntVal(0), "b": cty.NumberIntVal(1), "c": cty.NumberIntVal(2), "d": cty.NumberIntVal(4), }), 1, // duplicate key "b" }, { `[for v in {hello: "world"}: v...]`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("world"), }), 1, // can't use grouping when producing a tuple }, { `[for v in "hello": v]`, nil, cty.DynamicVal, 1, // can't iterate over a string }, { `[for v in null: v]`, nil, cty.DynamicVal, 1, // can't iterate over a null value }, { `[for v in unk: v]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unk": cty.UnknownVal(cty.List(cty.String)), }, }, cty.DynamicVal, 0, }, { `[for v in unk: v]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unk": cty.DynamicVal, }, }, cty.DynamicVal, 0, }, { `[for v in unk: v]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unk": cty.UnknownVal(cty.String), }, }, cty.DynamicVal, 1, // can't iterate over a string (even if it's unknown) }, { `[for v in ["a", "b"]: v if unkbool]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unkbool": cty.UnknownVal(cty.Bool), }, }, cty.DynamicVal, 0, }, { `[for v in ["a", "b"]: v if nullbool]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "nullbool": cty.NullVal(cty.Bool), }, }, cty.DynamicVal, 1, // value of if clause must not be null }, { `[for v in ["a", "b"]: v if dyn]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "dyn": cty.DynamicVal, }, }, cty.DynamicVal, 0, }, { `[for v in ["a", "b"]: v if unknum]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unknum": cty.UnknownVal(cty.List(cty.Number)), }, }, cty.DynamicVal, 1, // if expression must be bool }, { `[for i, v in ["a", "b"]: v if i + i]`, nil, cty.DynamicVal, 1, // if expression must be bool }, { `[for v in ["a", "b"]: unkstr]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unkstr": cty.UnknownVal(cty.String), }, }, cty.TupleVal([]cty.Value{ cty.UnknownVal(cty.String), cty.UnknownVal(cty.String), }), 0, }, { // Marked sequence results in a marked tuple `[for x in things: x if x != ""]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.ListVal([]cty.Value{ cty.StringVal("a"), cty.StringVal("b"), cty.StringVal(""), cty.StringVal("c"), }).Mark("sensitive"), }, }, cty.TupleVal([]cty.Value{ cty.StringVal("a"), cty.StringVal("b"), cty.StringVal("c"), }).Mark("sensitive"), 0, }, { // Marked map results in a marked object `{for k, v in things: k => !v}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.MapVal(map[string]cty.Value{ "a": cty.True, "b": cty.False, }).Mark("sensitive"), }, }, cty.ObjectVal(map[string]cty.Value{ "a": cty.False, "b": cty.True, }).Mark("sensitive"), 0, }, { // Marked map member carries marks through `{for k, v in things: k => !v}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.MapVal(map[string]cty.Value{ "a": cty.True.Mark("sensitive"), "b": cty.False, }), }, }, cty.ObjectVal(map[string]cty.Value{ "a": cty.False.Mark("sensitive"), "b": cty.True, }), 0, }, { // Mark object if keys include marked values, members retain // their original marks in their values `{for v in things: v => "${v}-friend"}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("rosie").Mark("marked"), "b": cty.StringVal("robin"), // Check for double-marking when a key val has a duplicate mark "c": cty.StringVal("rowan").Mark("marked"), "d": cty.StringVal("ruben").Mark("also-marked"), }), }, }, cty.ObjectVal(map[string]cty.Value{ "rosie": cty.StringVal("rosie-friend").Mark("marked"), "robin": cty.StringVal("robin-friend"), "rowan": cty.StringVal("rowan-friend").Mark("marked"), "ruben": cty.StringVal("ruben-friend").Mark("also-marked"), }).WithMarks(cty.NewValueMarks("marked", "also-marked")), 0, }, { // object itself is marked, contains marked value `{for v in things: v => "${v}-friend"}`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("rosie").Mark("marked"), "b": cty.StringVal("robin"), }).Mark("marks"), }, }, cty.ObjectVal(map[string]cty.Value{ "rosie": cty.StringVal("rosie-friend").Mark("marked"), "robin": cty.StringVal("robin-friend"), }).WithMarks(cty.NewValueMarks("marked", "marks")), 0, }, { // Sequence for loop with marked conditional expression `[for x in things: x if x != secret]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.ListVal([]cty.Value{ cty.StringVal("a"), cty.StringVal("b"), cty.StringVal("c"), }), "secret": cty.StringVal("b").Mark("sensitive"), }, }, cty.TupleVal([]cty.Value{ cty.StringVal("a"), cty.StringVal("c"), }).Mark("sensitive"), 0, }, { // Map for loop with marked conditional expression `{ for k, v in things: k => v if k != secret }`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "things": cty.MapVal(map[string]cty.Value{ "a": cty.True, "b": cty.False, "c": cty.False, }), "secret": cty.StringVal("b").Mark("sensitive"), }, }, cty.ObjectVal(map[string]cty.Value{ "a": cty.True, "c": cty.False, }).Mark("sensitive"), 0, }, { `[{name: "Steve"}, {name: "Ermintrude"}].*.name`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("Steve"), cty.StringVal("Ermintrude"), }), 0, }, { `{name: "Steve"}.*.name`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("Steve"), }), 0, }, { `null[*]`, nil, cty.EmptyTupleVal, 0, }, { `{name: "Steve"}[*].name`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("Steve"), }), 0, }, { `set.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "set": cty.SetVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "name": cty.StringVal("Steve"), }), }), }, }, cty.ListVal([]cty.Value{ cty.StringVal("Steve"), }), 0, }, { `unkstr[*]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unkstr": cty.UnknownVal(cty.String), }, }, cty.DynamicVal, 0, }, { `unkstr.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unkstr": cty.UnknownVal(cty.String), }, }, cty.DynamicVal, 1, // a string has no attribute "name" }, { `dyn.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "dyn": cty.DynamicVal, }, }, cty.DynamicVal, 0, }, { `unkobj.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unkobj": cty.UnknownVal(cty.Object(map[string]cty.Type{ "name": cty.String, })), }, }, cty.DynamicVal, 0, }, { `unkobj.*.names`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unkobj": cty.UnknownVal(cty.Object(map[string]cty.Type{ "names": cty.List(cty.String), })), }, }, cty.DynamicVal, 0, }, { `unklistobj.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unklistobj": cty.UnknownVal(cty.List(cty.Object(map[string]cty.Type{ "name": cty.String, }))), }, }, cty.UnknownVal(cty.List(cty.String)), 0, }, { `unktupleobj.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "unktupleobj": cty.UnknownVal( cty.Tuple([]cty.Type{ cty.Object(map[string]cty.Type{ "name": cty.String, }), cty.Object(map[string]cty.Type{ "name": cty.Bool, }), }), ), }, }, cty.UnknownVal(cty.Tuple([]cty.Type{cty.String, cty.Bool})), 0, }, { `nullobj.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "nullobj": cty.NullVal(cty.Object(map[string]cty.Type{ "name": cty.String, })), }, }, cty.TupleVal([]cty.Value{}), 0, }, { `nulllist.*.name`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "nulllist": cty.NullVal(cty.List(cty.Object(map[string]cty.Type{ "name": cty.String, }))), }, }, cty.DynamicVal, 1, // splat cannot be applied to null sequence }, { `["hello", "goodbye"].*`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("hello"), cty.StringVal("goodbye"), }), 0, }, { `"hello".*`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }), 0, }, { `[["hello"], ["world", "unused"]].*.0`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("hello"), cty.StringVal("world"), }), 0, }, { `[[{name:"foo"}], [{name:"bar"}, {name:"baz"}]].*.0.name`, nil, cty.TupleVal([]cty.Value{ cty.StringVal("foo"), cty.StringVal("bar"), }), 0, }, { `[[[{name:"foo"}]], [[{name:"bar"}], [{name:"baz"}]]].*.0.0.name`, nil, cty.TupleVal([]cty.Value{ cty.DynamicVal, cty.DynamicVal, }), 1, // can't chain legacy index syntax together, like .0.0 (because 0.0 parses as a single number) }, { // For an "attribute-only" splat, an index operator applies to // the splat result as a whole, rather than being incorporated // into the splat traversal itself. `[{name: "Steve"}, {name: "Ermintrude"}].*.name[0]`, nil, cty.StringVal("Steve"), 0, }, { // For a "full" splat, an index operator is consumed as part // of the splat's traversal. `[{names: ["Steve"]}, {names: ["Ermintrude"]}][*].names[0]`, nil, cty.TupleVal([]cty.Value{cty.StringVal("Steve"), cty.StringVal("Ermintrude")}), 0, }, { // Another "full" splat, this time with the index first. `[[{name: "Steve"}], [{name: "Ermintrude"}]][*][0].name`, nil, cty.TupleVal([]cty.Value{cty.StringVal("Steve"), cty.StringVal("Ermintrude")}), 0, }, { // Full splats can nest, which produces nested tuples. `[[{name: "Steve"}], [{name: "Ermintrude"}]][*][*].name`, nil, cty.TupleVal([]cty.Value{ cty.TupleVal([]cty.Value{cty.StringVal("Steve")}), cty.TupleVal([]cty.Value{cty.StringVal("Ermintrude")}), }), 0, }, { `[["hello"], ["goodbye"]].*.*`, nil, cty.TupleVal([]cty.Value{ cty.TupleVal([]cty.Value{cty.StringVal("hello")}), cty.TupleVal([]cty.Value{cty.StringVal("goodbye")}), }), 1, }, { // splat with sensitive collection `maps.*.enabled`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "maps": cty.ListVal([]cty.Value{ cty.MapVal(map[string]cty.Value{"enabled": cty.True}), cty.MapVal(map[string]cty.Value{"enabled": cty.False}), }).Mark("sensitive"), }, }, cty.ListVal([]cty.Value{ cty.True, cty.False, }).Mark("sensitive"), 0, }, { // splat with collection with sensitive elements `maps.*.x`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "maps": cty.ListVal([]cty.Value{ cty.MapVal(map[string]cty.Value{ "x": cty.StringVal("foo").Mark("sensitive"), }), cty.MapVal(map[string]cty.Value{ "x": cty.StringVal("bar"), }), }), }, }, cty.ListVal([]cty.Value{ cty.StringVal("foo").Mark("sensitive"), cty.StringVal("bar"), }), 0, }, { `["hello"][0]`, nil, cty.StringVal("hello"), 0, }, { `["hello"].0`, nil, cty.StringVal("hello"), 0, }, { `[["hello"]].0.0`, nil, cty.DynamicVal, 1, // can't chain legacy index syntax together (because 0.0 parses as 0) }, { `[{greeting = "hello"}].0.greeting`, nil, cty.StringVal("hello"), 0, }, { `[][0]`, nil, cty.DynamicVal, 1, // invalid index }, { `["hello"][negate(0)]`, &hcl.EvalContext{ Functions: map[string]function.Function{ "negate": stdlib.NegateFunc, }, }, cty.StringVal("hello"), 0, }, { `[][negate(0)]`, &hcl.EvalContext{ Functions: map[string]function.Function{ "negate": stdlib.NegateFunc, }, }, cty.DynamicVal, 1, // invalid index }, { `["hello"]["0"]`, // key gets converted to number nil, cty.StringVal("hello"), 0, }, { `["boop"].foo[index]`, // index is a variable to force IndexExpr instead of traversal &hcl.EvalContext{ Variables: map[string]cty.Value{ "index": cty.NumberIntVal(0), }, }, cty.DynamicVal, 1, // expression ["boop"] does not have attributes }, { `foo`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.StringVal("hello"), }, }, cty.StringVal("hello"), 0, }, { `bar`, &hcl.EvalContext{}, cty.DynamicVal, 1, // variables not allowed here }, { `foo.bar`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.StringVal("hello"), }, }, cty.DynamicVal, 1, // foo does not have attributes }, { `foo.baz`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.ObjectVal(map[string]cty.Value{ "baz": cty.StringVal("hello"), }), }, }, cty.StringVal("hello"), 0, }, { `foo["baz"]`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.ObjectVal(map[string]cty.Value{ "baz": cty.StringVal("hello"), }), }, }, cty.StringVal("hello"), 0, }, { `foo[true]`, // key is converted to string &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.ObjectVal(map[string]cty.Value{ "true": cty.StringVal("hello"), }), }, }, cty.StringVal("hello"), 0, }, { `foo[0].baz`, &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "baz": cty.StringVal("hello"), }), }), }, }, cty.StringVal("hello"), 0, }, { ` </`: ``` $ ls hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate 582528ddfad69eb57775199a43e0f9fd5c94bba343ce7bb6724d4ebafe311ed4 ``` A good first step to fixing a detected crasher is to copy the failing input into one of the unit tests in the `hclsyntax` package and see it crash there too. After that, it's easy to re-run the test as you try to fix it. hcl-2.14.1/hclsyntax/fuzz/fuzz_test.go000066400000000000000000000026271431334125700177560ustar00rootroot00000000000000package fuzzhclsyntax import ( "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" ) func FuzzParseTemplate(f *testing.F) { f.Fuzz(func(t *testing.T, data []byte) { _, diags := hclsyntax.ParseTemplate(data, "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Logf("Error when parsing template %v", data) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } }) } func FuzzParseTraversalAbs(f *testing.F) { f.Fuzz(func(t *testing.T, data []byte) { _, diags := hclsyntax.ParseTraversalAbs(data, "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Logf("Error when parsing traversal %v", data) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } }) } func FuzzParseExpression(f *testing.F) { f.Fuzz(func(t *testing.T, data []byte) { _, diags := hclsyntax.ParseExpression(data, "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Logf("Error when parsing expression %v", data) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } }) } func FuzzParseConfig(f *testing.F) { f.Fuzz(func(t *testing.T, data []byte) { _, diags := hclsyntax.ParseConfig(data, "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Logf("Error when parsing config %v", data) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } }) } hcl-2.14.1/hclsyntax/fuzz/testdata/000077500000000000000000000000001431334125700171745ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/000077500000000000000000000000001431334125700201725ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/000077500000000000000000000000001431334125700232515ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/attr-expr.hcl000066400000000000000000000000651431334125700256700ustar00rootroot00000000000000go test fuzz v1 []byte("foo = upper(bar + baz[1])\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/attr-literal.hcl000066400000000000000000000000511431334125700263410ustar00rootroot00000000000000go test fuzz v1 []byte("foo = \"bar\"\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/block-attrs.hcl000066400000000000000000000000641431334125700261660ustar00rootroot00000000000000go test fuzz v1 []byte("block {\n foo = true\n}\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/block-empty.hcl000066400000000000000000000000461431334125700261670ustar00rootroot00000000000000go test fuzz v1 []byte("block {\n}\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/block-nested.hcl000066400000000000000000000001151431334125700263100ustar00rootroot00000000000000go test fuzz v1 []byte("block {\n another_block {\n foo = bar\n }\n}\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/empty.hcl000066400000000000000000000000321431334125700250720ustar00rootroot00000000000000go test fuzz v1 []byte("")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseConfig/utf8.hcl000066400000000000000000000000741431334125700246300ustar00rootroot00000000000000go test fuzz v1 []byte("foo = \"föo ${föo(\"föo\")}\"\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/000077500000000000000000000000001431334125700242035ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/empty.hcle000066400000000000000000000000361431334125700261750ustar00rootroot00000000000000go test fuzz v1 []byte("\"\"")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/escape-dollar.hcle000066400000000000000000000000541431334125700275520ustar00rootroot00000000000000go test fuzz v1 []byte("\"hi $${var.foo}\"")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/escape-newline.hcle000066400000000000000000000000511431334125700277330ustar00rootroot00000000000000go test fuzz v1 []byte("\"bar\\nbaz\"\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/function-call.hcle000066400000000000000000000000511431334125700275720ustar00rootroot00000000000000go test fuzz v1 []byte("title(var.name)")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/int.hcle000066400000000000000000000000341431334125700256270ustar00rootroot00000000000000go test fuzz v1 []byte("42")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/literal.hcle000066400000000000000000000000351431334125700264720ustar00rootroot00000000000000go test fuzz v1 []byte("foo")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/splat-attr.hcle000066400000000000000000000000511431334125700271270ustar00rootroot00000000000000go test fuzz v1 []byte("foo.bar.*.baz\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/splat-full.hcle000066400000000000000000000000521431334125700271200ustar00rootroot00000000000000go test fuzz v1 []byte("foo.bar[*].baz\n")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/utf8.hcle000066400000000000000000000000571431334125700257300ustar00rootroot00000000000000go test fuzz v1 []byte("föo(\"föo\") + föo")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseExpression/var.hcle000066400000000000000000000000411431334125700256230ustar00rootroot00000000000000go test fuzz v1 []byte("var.bar")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/000077500000000000000000000000001431334125700236175ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/empty.tmpl000066400000000000000000000000321431334125700256460ustar00rootroot00000000000000go test fuzz v1 []byte("")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/escape-dollar.tmpl000066400000000000000000000000501431334125700272230ustar00rootroot00000000000000go test fuzz v1 []byte("hi $${var.foo}")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/escape-newline.tmpl000066400000000000000000000000561431334125700274150ustar00rootroot00000000000000go test fuzz v1 []byte("foo ${\"bar\\nbaz\"}")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/function-call.tmpl000066400000000000000000000000571431334125700272550ustar00rootroot00000000000000go test fuzz v1 []byte("hi ${title(var.name)}")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/int.tmpl000066400000000000000000000000431431334125700253040ustar00rootroot00000000000000go test fuzz v1 []byte("foo ${42}")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/just-interp.tmpl000066400000000000000000000000441431334125700267770ustar00rootroot00000000000000go test fuzz v1 []byte("${var.bar}")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/literal.tmpl000066400000000000000000000000351431334125700261470ustar00rootroot00000000000000go test fuzz v1 []byte("foo")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTemplate/utf8.tmpl000066400000000000000000000000601431334125700253770ustar00rootroot00000000000000go test fuzz v1 []byte("föo ${föo(\"föo\")}")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTraversalAbs/000077500000000000000000000000001431334125700244355ustar00rootroot00000000000000hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTraversalAbs/attr.hclt000066400000000000000000000000411431334125700262560ustar00rootroot00000000000000go test fuzz v1 []byte("foo.bar")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTraversalAbs/complex.hclt000066400000000000000000000000671431334125700267630ustar00rootroot00000000000000go test fuzz v1 []byte("foo.bar[1].baz[\"foo\"].pizza")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTraversalAbs/index.hclt000066400000000000000000000000401431334125700264120ustar00rootroot00000000000000go test fuzz v1 []byte("foo[1]")hcl-2.14.1/hclsyntax/fuzz/testdata/fuzz/FuzzParseTraversalAbs/root.hclt000066400000000000000000000000351431334125700262720ustar00rootroot00000000000000go test fuzz v1 []byte("foo")hcl-2.14.1/hclsyntax/generate.go000066400000000000000000000007171431334125700165130ustar00rootroot00000000000000package hclsyntax //go:generate go run expression_vars_gen.go //go:generate ruby unicode2ragel.rb --url=http://www.unicode.org/Public/9.0.0/ucd/DerivedCoreProperties.txt -m UnicodeDerived -p ID_Start,ID_Continue -o unicode_derived.rl //go:generate ragel -Z scan_tokens.rl //go:generate gofmt -w scan_tokens.go //go:generate ragel -Z scan_string_lit.rl //go:generate gofmt -w scan_string_lit.go //go:generate stringer -type TokenType -output token_type_string.go hcl-2.14.1/hclsyntax/keywords.go000066400000000000000000000010111431334125700165540ustar00rootroot00000000000000package hclsyntax import ( "bytes" ) type Keyword []byte var forKeyword = Keyword([]byte{'f', 'o', 'r'}) var inKeyword = Keyword([]byte{'i', 'n'}) var ifKeyword = Keyword([]byte{'i', 'f'}) var elseKeyword = Keyword([]byte{'e', 'l', 's', 'e'}) var endifKeyword = Keyword([]byte{'e', 'n', 'd', 'i', 'f'}) var endforKeyword = Keyword([]byte{'e', 'n', 'd', 'f', 'o', 'r'}) func (kw Keyword) TokenMatches(token Token) bool { if token.Type != TokenIdent { return false } return bytes.Equal([]byte(kw), token.Bytes) } hcl-2.14.1/hclsyntax/navigation.go000066400000000000000000000020401431334125700170470ustar00rootroot00000000000000package hclsyntax import ( "bytes" "fmt" "github.com/hashicorp/hcl/v2" ) type navigation struct { root *Body } // Implementation of hcled.ContextString func (n navigation) ContextString(offset int) string { // We will walk our top-level blocks until we find one that contains // the given offset, and then construct a representation of the header // of the block. var block *Block for _, candidate := range n.root.Blocks { if candidate.Range().ContainsOffset(offset) { block = candidate break } } if block == nil { return "" } if len(block.Labels) == 0 { // Easy case! return block.Type } buf := &bytes.Buffer{} buf.WriteString(block.Type) for _, label := range block.Labels { fmt.Fprintf(buf, " %q", label) } return buf.String() } func (n navigation) ContextDefRange(offset int) hcl.Range { var block *Block for _, candidate := range n.root.Blocks { if candidate.Range().ContainsOffset(offset) { block = candidate break } } if block == nil { return hcl.Range{} } return block.DefRange() } hcl-2.14.1/hclsyntax/navigation_test.go000066400000000000000000000055301431334125700201150ustar00rootroot00000000000000package hclsyntax import ( "fmt" "strconv" "testing" "github.com/hashicorp/hcl/v2" ) func TestNavigationContextString(t *testing.T) { cfg := ` resource { } resource "random_type" { } resource "null_resource" "baz" { name = "foo" boz = { one = "111" two = "22222" } } data "another" "baz" { name = "foo" boz = { one = "111" two = "22222" } } ` file, diags := ParseConfig([]byte(cfg), "", hcl.Pos{Byte: 0, Line: 1, Column: 1}) if len(diags) != 0 { fmt.Printf("offset %d\n", diags[0].Subject.Start.Byte) t.Errorf("Unexpected diagnostics: %s", diags) } if file == nil { t.Fatalf("Got nil file") } nav := file.Nav.(navigation) testCases := []struct { Offset int Want string }{ {0, ``}, {2, ``}, {4, `resource`}, {17, `resource "random_type"`}, {25, `resource "random_type"`}, {45, `resource "null_resource" "baz"`}, {142, `data "another" "baz"`}, {180, `data "another" "baz"`}, {99999, ``}, } for _, tc := range testCases { t.Run(strconv.Itoa(tc.Offset), func(t *testing.T) { got := nav.ContextString(tc.Offset) if got != tc.Want { t.Errorf("wrong result\ngot: %s\nwant: %s", got, tc.Want) } }) } } func TestNavigationContextDefRange(t *testing.T) { cfg := ` resource { } resource "random_type" { } resource "null_resource" "baz" { name = "foo" boz = { one = "111" two = "22222" } } data "another" "baz" { name = "foo" boz = { one = "111" two = "22222" } } ` file, diags := ParseConfig([]byte(cfg), "", hcl.Pos{Byte: 0, Line: 1, Column: 1}) if len(diags) != 0 { fmt.Printf("offset %d\n", diags[0].Subject.Start.Byte) t.Errorf("Unexpected diagnostics: %s", diags) } if file == nil { t.Fatalf("Got nil file") } nav := file.Nav.(navigation) testCases := []struct { Offset int WantRange hcl.Range }{ {0, hcl.Range{}}, {2, hcl.Range{}}, {4, hcl.Range{Filename: "", Start: hcl.Pos{Line: 4, Column: 1, Byte: 3}, End: hcl.Pos{Line: 4, Column: 9, Byte: 11}}}, {17, hcl.Range{Filename: "", Start: hcl.Pos{Line: 7, Column: 1, Byte: 17}, End: hcl.Pos{Line: 7, Column: 23, Byte: 39}}}, {25, hcl.Range{Filename: "", Start: hcl.Pos{Line: 7, Column: 1, Byte: 17}, End: hcl.Pos{Line: 7, Column: 23, Byte: 39}}}, {45, hcl.Range{Filename: "", Start: hcl.Pos{Line: 10, Column: 1, Byte: 45}, End: hcl.Pos{Line: 10, Column: 31, Byte: 75}}}, {142, hcl.Range{Filename: "", Start: hcl.Pos{Line: 18, Column: 1, Byte: 142}, End: hcl.Pos{Line: 18, Column: 21, Byte: 162}}}, {180, hcl.Range{Filename: "", Start: hcl.Pos{Line: 18, Column: 1, Byte: 142}, End: hcl.Pos{Line: 18, Column: 21, Byte: 162}}}, {99999, hcl.Range{}}, } for _, tc := range testCases { t.Run(strconv.Itoa(tc.Offset), func(t *testing.T) { got := nav.ContextDefRange(tc.Offset) if got != tc.WantRange { t.Errorf("wrong range\ngot: %#v\nwant: %#v", got, tc.WantRange) } }) } } hcl-2.14.1/hclsyntax/node.go000066400000000000000000000011601431334125700156370ustar00rootroot00000000000000package hclsyntax import ( "github.com/hashicorp/hcl/v2" ) // Node is the abstract type that every AST node implements. // // This is a closed interface, so it cannot be implemented from outside of // this package. type Node interface { // This is the mechanism by which the public-facing walk functions // are implemented. Implementations should call the given function // for each child node and then replace that node with its return value. // The return value might just be the same node, for non-transforming // walks. walkChildNodes(w internalWalkFunc) Range() hcl.Range } type internalWalkFunc func(Node) hcl-2.14.1/hclsyntax/parse_traversal_test.go000066400000000000000000000113421431334125700211510ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/go-test/deep" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func TestParseTraversalAbs(t *testing.T) { tests := []struct { src string want hcl.Traversal diagCount int }{ { "", nil, 1, // variable name required }, { "foo", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, 0, }, { "foo.bar.baz", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, hcl.TraverseAttr{ Name: "bar", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 4, Byte: 3}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, hcl.TraverseAttr{ Name: "baz", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, 0, }, { "foo[1]", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, hcl.TraverseIndex{ Key: cty.NumberIntVal(1), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 4, Byte: 3}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, }, 0, }, { "foo[1][2]", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, hcl.TraverseIndex{ Key: cty.NumberIntVal(1), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 4, Byte: 3}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, hcl.TraverseIndex{ Key: cty.NumberIntVal(2), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, }, }, 0, }, { "foo[1].bar", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, hcl.TraverseIndex{ Key: cty.NumberIntVal(1), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 4, Byte: 3}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, hcl.TraverseAttr{ Name: "bar", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 11, Byte: 10}, }, }, }, 0, }, { "foo.", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, 1, // attribute name required }, { "foo[", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, 1, // index required }, { "foo[index]", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, 1, // index must be literal }, { "foo[0", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, hcl.TraverseIndex{ Key: cty.NumberIntVal(0), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 4, Byte: 3}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, }, 1, // missing close bracket }, { "foo 0", hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, 1, // extra junk after traversal }, } for _, test := range tests { t.Run(test.src, func(t *testing.T) { got, diags := ParseTraversalAbs([]byte(test.src), "", hcl.Pos{Line: 1, Column: 1}) if len(diags) != test.diagCount { for _, diag := range diags { t.Logf(" - %s", diag.Error()) } t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount) } if diff := deep.Equal(got, test.want); diff != nil { for _, problem := range diff { t.Error(problem) } } }) } } hcl-2.14.1/hclsyntax/parser.go000066400000000000000000001730451431334125700162220ustar00rootroot00000000000000package hclsyntax import ( "bytes" "fmt" "strconv" "unicode/utf8" "github.com/apparentlymart/go-textseg/v13/textseg" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) type parser struct { *peeker // set to true if any recovery is attempted. The parser can use this // to attempt to reduce error noise by suppressing "bad token" errors // in recovery mode, assuming that the recovery heuristics have failed // in this case and left the peeker in a wrong place. recovery bool } func (p *parser) ParseBody(end TokenType) (*Body, hcl.Diagnostics) { attrs := Attributes{} blocks := Blocks{} var diags hcl.Diagnostics startRange := p.PrevRange() var endRange hcl.Range Token: for { next := p.Peek() if next.Type == end { endRange = p.NextRange() p.Read() break Token } switch next.Type { case TokenNewline: p.Read() continue case TokenIdent: item, itemDiags := p.ParseBodyItem() diags = append(diags, itemDiags...) switch titem := item.(type) { case *Block: blocks = append(blocks, titem) case *Attribute: if existing, exists := attrs[titem.Name]; exists { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Attribute redefined", Detail: fmt.Sprintf( "The argument %q was already set at %s. Each argument may be set only once.", titem.Name, existing.NameRange.String(), ), Subject: &titem.NameRange, }) } else { attrs[titem.Name] = titem } default: // This should never happen for valid input, but may if a // syntax error was detected in ParseBodyItem that prevented // it from even producing a partially-broken item. In that // case, it would've left at least one error in the diagnostics // slice we already dealt with above. // // We'll assume ParseBodyItem attempted recovery to leave // us in a reasonable position to try parsing the next item. continue } default: bad := p.Read() if !p.recovery { switch bad.Type { case TokenOQuote: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid argument name", Detail: "Argument names must not be quoted.", Subject: &bad.Range, }) case TokenEOF: switch end { case TokenCBrace: // If we're looking for a closing brace then we're parsing a block diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed configuration block", Detail: "There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &startRange, }) default: // The only other "end" should itself be TokenEOF (for // the top-level body) and so we shouldn't get here, // but we'll return a generic error message anyway to // be resilient. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed configuration body", Detail: "Found end of file before the end of this configuration body.", Subject: &startRange, }) } default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Argument or block definition required", Detail: "An argument or block definition is required here.", Subject: &bad.Range, }) } } endRange = p.PrevRange() // arbitrary, but somewhere inside the body means better diagnostics p.recover(end) // attempt to recover to the token after the end of this body break Token } } return &Body{ Attributes: attrs, Blocks: blocks, SrcRange: hcl.RangeBetween(startRange, endRange), EndRange: hcl.Range{ Filename: endRange.Filename, Start: endRange.End, End: endRange.End, }, }, diags } func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) { ident := p.Read() if ident.Type != TokenIdent { p.recoverAfterBodyItem() return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Argument or block definition required", Detail: "An argument or block definition is required here.", Subject: &ident.Range, }, } } next := p.Peek() switch next.Type { case TokenEqual: return p.finishParsingBodyAttribute(ident, false) case TokenOQuote, TokenOBrace, TokenIdent: return p.finishParsingBodyBlock(ident) default: p.recoverAfterBodyItem() return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Argument or block definition required", Detail: "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.", Subject: &ident.Range, }, } } } // parseSingleAttrBody is a weird variant of ParseBody that deals with the // body of a nested block containing only one attribute value all on a single // line, like foo { bar = baz } . It expects to find a single attribute item // immediately followed by the end token type with no intervening newlines. func (p *parser) parseSingleAttrBody(end TokenType) (*Body, hcl.Diagnostics) { ident := p.Read() if ident.Type != TokenIdent { p.recoverAfterBodyItem() return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Argument or block definition required", Detail: "An argument or block definition is required here.", Subject: &ident.Range, }, } } var attr *Attribute var diags hcl.Diagnostics next := p.Peek() switch next.Type { case TokenEqual: node, attrDiags := p.finishParsingBodyAttribute(ident, true) diags = append(diags, attrDiags...) attr = node.(*Attribute) case TokenOQuote, TokenOBrace, TokenIdent: p.recoverAfterBodyItem() return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Argument definition required", Detail: fmt.Sprintf("A single-line block definition can contain only a single argument. If you meant to define argument %q, use an equals sign to assign it a value. To define a nested block, place it on a line of its own within its parent block.", ident.Bytes), Subject: hcl.RangeBetween(ident.Range, next.Range).Ptr(), }, } default: p.recoverAfterBodyItem() return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Argument or block definition required", Detail: "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.", Subject: &ident.Range, }, } } return &Body{ Attributes: Attributes{ string(ident.Bytes): attr, }, SrcRange: attr.SrcRange, EndRange: hcl.Range{ Filename: attr.SrcRange.Filename, Start: attr.SrcRange.End, End: attr.SrcRange.End, }, }, diags } func (p *parser) finishParsingBodyAttribute(ident Token, singleLine bool) (Node, hcl.Diagnostics) { eqTok := p.Read() // eat equals token if eqTok.Type != TokenEqual { // should never happen if caller behaves panic("finishParsingBodyAttribute called with next not equals") } var endRange hcl.Range expr, diags := p.ParseExpression() if p.recovery && diags.HasErrors() { // recovery within expressions tends to be tricky, so we've probably // landed somewhere weird. We'll try to reset to the start of a body // item so parsing can continue. endRange = p.PrevRange() p.recoverAfterBodyItem() } else { endRange = p.PrevRange() if !singleLine { end := p.Peek() if end.Type != TokenNewline && end.Type != TokenEOF { if !p.recovery { summary := "Missing newline after argument" detail := "An argument definition must end with a newline." if end.Type == TokenComma { summary = "Unexpected comma after argument" detail = "Argument definitions must be separated by newlines, not commas. " + detail } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: summary, Detail: detail, Subject: &end.Range, Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(), }) } endRange = p.PrevRange() p.recoverAfterBodyItem() } else { endRange = p.PrevRange() p.Read() // eat newline } } } return &Attribute{ Name: string(ident.Bytes), Expr: expr, SrcRange: hcl.RangeBetween(ident.Range, endRange), NameRange: ident.Range, EqualsRange: eqTok.Range, }, diags } func (p *parser) finishParsingBodyBlock(ident Token) (Node, hcl.Diagnostics) { var blockType = string(ident.Bytes) var diags hcl.Diagnostics var labels []string var labelRanges []hcl.Range var oBrace Token Token: for { tok := p.Peek() switch tok.Type { case TokenOBrace: oBrace = p.Read() break Token case TokenOQuote: label, labelRange, labelDiags := p.parseQuotedStringLiteral() diags = append(diags, labelDiags...) labels = append(labels, label) labelRanges = append(labelRanges, labelRange) // parseQuoteStringLiteral recovers up to the closing quote // if it encounters problems, so we can continue looking for // more labels and eventually the block body even. case TokenIdent: tok = p.Read() // eat token label, labelRange := string(tok.Bytes), tok.Range labels = append(labels, label) labelRanges = append(labelRanges, labelRange) default: switch tok.Type { case TokenEqual: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid block definition", Detail: "The equals sign \"=\" indicates an argument definition, and must not be used when defining a block.", Subject: &tok.Range, Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(), }) case TokenNewline: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid block definition", Detail: "A block definition must have block content delimited by \"{\" and \"}\", starting on the same line as the block header.", Subject: &tok.Range, Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(), }) default: if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid block definition", Detail: "Either a quoted string block label or an opening brace (\"{\") is expected here.", Subject: &tok.Range, Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(), }) } } p.recoverAfterBodyItem() return &Block{ Type: blockType, Labels: labels, Body: &Body{ SrcRange: ident.Range, EndRange: ident.Range, }, TypeRange: ident.Range, LabelRanges: labelRanges, OpenBraceRange: ident.Range, // placeholder CloseBraceRange: ident.Range, // placeholder }, diags } } // Once we fall out here, the peeker is pointed just after our opening // brace, so we can begin our nested body parsing. var body *Body var bodyDiags hcl.Diagnostics switch p.Peek().Type { case TokenNewline, TokenEOF, TokenCBrace: body, bodyDiags = p.ParseBody(TokenCBrace) default: // Special one-line, single-attribute block parsing mode. body, bodyDiags = p.parseSingleAttrBody(TokenCBrace) switch p.Peek().Type { case TokenCBrace: p.Read() // the happy path - just consume the closing brace case TokenComma: // User seems to be trying to use the object-constructor // comma-separated style, which isn't permitted for blocks. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid single-argument block definition", Detail: "Single-line block syntax can include only one argument definition. To define multiple arguments, use the multi-line block syntax with one argument definition per line.", Subject: p.Peek().Range.Ptr(), }) p.recover(TokenCBrace) case TokenNewline: // We don't allow weird mixtures of single and multi-line syntax. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid single-argument block definition", Detail: "An argument definition on the same line as its containing block creates a single-line block definition, which must also be closed on the same line. Place the block's closing brace immediately after the argument definition.", Subject: p.Peek().Range.Ptr(), }) p.recover(TokenCBrace) default: // Some other weird thing is going on. Since we can't guess a likely // user intent for this one, we'll skip it if we're already in // recovery mode. if !p.recovery { switch p.Peek().Type { case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed configuration block", Detail: "There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: oBrace.Range.Ptr(), Context: hcl.RangeBetween(ident.Range, oBrace.Range).Ptr(), }) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid single-argument block definition", Detail: "A single-line block definition must end with a closing brace immediately after its single argument definition.", Subject: p.Peek().Range.Ptr(), }) } } p.recover(TokenCBrace) } } diags = append(diags, bodyDiags...) cBraceRange := p.PrevRange() eol := p.Peek() if eol.Type == TokenNewline || eol.Type == TokenEOF { p.Read() // eat newline } else { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing newline after block definition", Detail: "A block definition must end with a newline.", Subject: &eol.Range, Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(), }) } p.recoverAfterBodyItem() } // We must never produce a nil body, since the caller may attempt to // do analysis of a partial result when there's an error, so we'll // insert a placeholder if we otherwise failed to produce a valid // body due to one of the syntax error paths above. if body == nil && diags.HasErrors() { body = &Body{ SrcRange: hcl.RangeBetween(oBrace.Range, cBraceRange), EndRange: cBraceRange, } } return &Block{ Type: blockType, Labels: labels, Body: body, TypeRange: ident.Range, LabelRanges: labelRanges, OpenBraceRange: oBrace.Range, CloseBraceRange: cBraceRange, }, diags } func (p *parser) ParseExpression() (Expression, hcl.Diagnostics) { return p.parseTernaryConditional() } func (p *parser) parseTernaryConditional() (Expression, hcl.Diagnostics) { // The ternary conditional operator (.. ? .. : ..) behaves somewhat // like a binary operator except that the "symbol" is itself // an expression enclosed in two punctuation characters. // The middle expression is parsed as if the ? and : symbols // were parentheses. The "rhs" (the "false expression") is then // treated right-associatively so it behaves similarly to the // middle in terms of precedence. startRange := p.NextRange() var condExpr, trueExpr, falseExpr Expression var diags hcl.Diagnostics condExpr, condDiags := p.parseBinaryOps(binaryOps) diags = append(diags, condDiags...) if p.recovery && condDiags.HasErrors() { return condExpr, diags } questionMark := p.Peek() if questionMark.Type != TokenQuestion { return condExpr, diags } p.Read() // eat question mark trueExpr, trueDiags := p.ParseExpression() diags = append(diags, trueDiags...) if p.recovery && trueDiags.HasErrors() { return condExpr, diags } colon := p.Peek() if colon.Type != TokenColon { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing false expression in conditional", Detail: "The conditional operator (...?...:...) requires a false expression, delimited by a colon.", Subject: &colon.Range, Context: hcl.RangeBetween(startRange, colon.Range).Ptr(), }) return condExpr, diags } p.Read() // eat colon falseExpr, falseDiags := p.ParseExpression() diags = append(diags, falseDiags...) if p.recovery && falseDiags.HasErrors() { return condExpr, diags } return &ConditionalExpr{ Condition: condExpr, TrueResult: trueExpr, FalseResult: falseExpr, SrcRange: hcl.RangeBetween(startRange, falseExpr.Range()), }, diags } // parseBinaryOps calls itself recursively to work through all of the // operator precedence groups, and then eventually calls parseExpressionTerm // for each operand. func (p *parser) parseBinaryOps(ops []map[TokenType]*Operation) (Expression, hcl.Diagnostics) { if len(ops) == 0 { // We've run out of operators, so now we'll just try to parse a term. return p.parseExpressionWithTraversals() } thisLevel := ops[0] remaining := ops[1:] var lhs, rhs Expression var operation *Operation var diags hcl.Diagnostics // Parse a term that might be the first operand of a binary // operation or it might just be a standalone term. // We won't know until we've parsed it and can look ahead // to see if there's an operator token for this level. lhs, lhsDiags := p.parseBinaryOps(remaining) diags = append(diags, lhsDiags...) if p.recovery && lhsDiags.HasErrors() { return lhs, diags } // We'll keep eating up operators until we run out, so that operators // with the same precedence will combine in a left-associative manner: // a+b+c => (a+b)+c, not a+(b+c) // // Should we later want to have right-associative operators, a way // to achieve that would be to call back up to ParseExpression here // instead of iteratively parsing only the remaining operators. for { next := p.Peek() var newOp *Operation var ok bool if newOp, ok = thisLevel[next.Type]; !ok { break } // Are we extending an expression started on the previous iteration? if operation != nil { lhs = &BinaryOpExpr{ LHS: lhs, Op: operation, RHS: rhs, SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()), } } operation = newOp p.Read() // eat operator token var rhsDiags hcl.Diagnostics rhs, rhsDiags = p.parseBinaryOps(remaining) diags = append(diags, rhsDiags...) if p.recovery && rhsDiags.HasErrors() { return lhs, diags } } if operation == nil { return lhs, diags } return &BinaryOpExpr{ LHS: lhs, Op: operation, RHS: rhs, SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()), }, diags } func (p *parser) parseExpressionWithTraversals() (Expression, hcl.Diagnostics) { term, diags := p.parseExpressionTerm() ret, moreDiags := p.parseExpressionTraversals(term) diags = append(diags, moreDiags...) return ret, diags } func (p *parser) parseExpressionTraversals(from Expression) (Expression, hcl.Diagnostics) { var diags hcl.Diagnostics ret := from Traversal: for { next := p.Peek() switch next.Type { case TokenDot: // Attribute access or splat dot := p.Read() attrTok := p.Peek() switch attrTok.Type { case TokenIdent: attrTok = p.Read() // eat token name := string(attrTok.Bytes) rng := hcl.RangeBetween(dot.Range, attrTok.Range) step := hcl.TraverseAttr{ Name: name, SrcRange: rng, } ret = makeRelativeTraversal(ret, step, rng) case TokenNumberLit: // This is a weird form we inherited from HIL, allowing numbers // to be used as attributes as a weird way of writing [n]. // This was never actually a first-class thing in HIL, but // HIL tolerated sequences like .0. in its variable names and // calling applications like Terraform exploited that to // introduce indexing syntax where none existed. numTok := p.Read() // eat token attrTok = numTok // This syntax is ambiguous if multiple indices are used in // succession, like foo.0.1.baz: that actually parses as // a fractional number 0.1. Since we're only supporting this // syntax for compatibility with legacy Terraform // configurations, and Terraform does not tend to have lists // of lists, we'll choose to reject that here with a helpful // error message, rather than failing later because the index // isn't a whole number. if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 { first := numTok.Bytes[:dotIdx] second := numTok.Bytes[dotIdx+1:] diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid legacy index syntax", Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second), Subject: &attrTok.Range, }) rng := hcl.RangeBetween(dot.Range, numTok.Range) step := hcl.TraverseIndex{ Key: cty.DynamicVal, SrcRange: rng, } ret = makeRelativeTraversal(ret, step, rng) break } numVal, numDiags := p.numberLitValue(numTok) diags = append(diags, numDiags...) rng := hcl.RangeBetween(dot.Range, numTok.Range) step := hcl.TraverseIndex{ Key: numVal, SrcRange: rng, } ret = makeRelativeTraversal(ret, step, rng) case TokenStar: // "Attribute-only" splat expression. // (This is a kinda weird construct inherited from HIL, which // behaves a bit like a [*] splat except that it is only able // to do attribute traversals into each of its elements, // whereas foo[*] can support _any_ traversal. marker := p.Read() // eat star trav := make(hcl.Traversal, 0, 1) var firstRange, lastRange hcl.Range firstRange = p.NextRange() lastRange = marker.Range for p.Peek().Type == TokenDot { dot := p.Read() if p.Peek().Type == TokenNumberLit { // Continuing the "weird stuff inherited from HIL" // theme, we also allow numbers as attribute names // inside splats and interpret them as indexing // into a list, for expressions like: // foo.bar.*.baz.0.foo numTok := p.Read() // Weird special case if the user writes something // like foo.bar.*.baz.0.0.foo, where 0.0 parses // as a number. if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 { first := numTok.Bytes[:dotIdx] second := numTok.Bytes[dotIdx+1:] diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid legacy index syntax", Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second), Subject: &attrTok.Range, }) trav = append(trav, hcl.TraverseIndex{ Key: cty.DynamicVal, SrcRange: hcl.RangeBetween(dot.Range, numTok.Range), }) lastRange = numTok.Range continue } numVal, numDiags := p.numberLitValue(numTok) diags = append(diags, numDiags...) trav = append(trav, hcl.TraverseIndex{ Key: numVal, SrcRange: hcl.RangeBetween(dot.Range, numTok.Range), }) lastRange = numTok.Range continue } if p.Peek().Type != TokenIdent { if !p.recovery { if p.Peek().Type == TokenStar { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Nested splat expression not allowed", Detail: "A splat expression (*) cannot be used inside another attribute-only splat expression.", Subject: p.Peek().Range.Ptr(), }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid attribute name", Detail: "An attribute name is required after a dot.", Subject: &attrTok.Range, }) } } p.setRecovery() continue Traversal } attrTok := p.Read() trav = append(trav, hcl.TraverseAttr{ Name: string(attrTok.Bytes), SrcRange: hcl.RangeBetween(dot.Range, attrTok.Range), }) lastRange = attrTok.Range } itemExpr := &AnonSymbolExpr{ SrcRange: hcl.RangeBetween(dot.Range, marker.Range), } var travExpr Expression if len(trav) == 0 { travExpr = itemExpr } else { travExpr = &RelativeTraversalExpr{ Source: itemExpr, Traversal: trav, SrcRange: hcl.RangeBetween(firstRange, lastRange), } } ret = &SplatExpr{ Source: ret, Each: travExpr, Item: itemExpr, SrcRange: hcl.RangeBetween(from.Range(), lastRange), MarkerRange: hcl.RangeBetween(dot.Range, marker.Range), } default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid attribute name", Detail: "An attribute name is required after a dot.", Subject: &attrTok.Range, }) // This leaves the peeker in a bad place, so following items // will probably be misparsed until we hit something that // allows us to re-sync. // // We will probably need to do something better here eventually // in order to support autocomplete triggered by typing a // period. p.setRecovery() } case TokenOBrack: // Indexing of a collection. // This may or may not be a hcl.Traverser, depending on whether // the key value is something constant. open := p.Read() switch p.Peek().Type { case TokenStar: // This is a full splat expression, like foo[*], which consumes // the rest of the traversal steps after it using a recursive // call to this function. p.Read() // consume star close := p.Read() if close.Type != TokenCBrack && !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing close bracket on splat index", Detail: "The star for a full splat operator must be immediately followed by a closing bracket (\"]\").", Subject: &close.Range, }) close = p.recover(TokenCBrack) } // Splat expressions use a special "anonymous symbol" as a // placeholder in an expression to be evaluated once for each // item in the source expression. itemExpr := &AnonSymbolExpr{ SrcRange: hcl.RangeBetween(open.Range, close.Range), } // Now we'll recursively call this same function to eat any // remaining traversal steps against the anonymous symbol. travExpr, nestedDiags := p.parseExpressionTraversals(itemExpr) diags = append(diags, nestedDiags...) ret = &SplatExpr{ Source: ret, Each: travExpr, Item: itemExpr, SrcRange: hcl.RangeBetween(from.Range(), travExpr.Range()), MarkerRange: hcl.RangeBetween(open.Range, close.Range), } default: var close Token p.PushIncludeNewlines(false) // arbitrary newlines allowed in brackets keyExpr, keyDiags := p.ParseExpression() diags = append(diags, keyDiags...) if p.recovery && keyDiags.HasErrors() { close = p.recover(TokenCBrack) } else { close = p.Read() if close.Type != TokenCBrack && !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing close bracket on index", Detail: "The index operator must end with a closing bracket (\"]\").", Subject: &close.Range, }) close = p.recover(TokenCBrack) } } p.PopIncludeNewlines() if lit, isLit := keyExpr.(*LiteralValueExpr); isLit { litKey, _ := lit.Value(nil) rng := hcl.RangeBetween(open.Range, close.Range) step := hcl.TraverseIndex{ Key: litKey, SrcRange: rng, } ret = makeRelativeTraversal(ret, step, rng) } else if tmpl, isTmpl := keyExpr.(*TemplateExpr); isTmpl && tmpl.IsStringLiteral() { litKey, _ := tmpl.Value(nil) rng := hcl.RangeBetween(open.Range, close.Range) step := hcl.TraverseIndex{ Key: litKey, SrcRange: rng, } ret = makeRelativeTraversal(ret, step, rng) } else { rng := hcl.RangeBetween(open.Range, close.Range) ret = &IndexExpr{ Collection: ret, Key: keyExpr, SrcRange: hcl.RangeBetween(from.Range(), rng), OpenRange: open.Range, BracketRange: rng, } } } default: break Traversal } } return ret, diags } // makeRelativeTraversal takes an expression and a traverser and returns // a traversal expression that combines the two. If the given expression // is already a traversal, it is extended in place (mutating it) and // returned. If it isn't, a new RelativeTraversalExpr is created and returned. func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) Expression { switch texpr := expr.(type) { case *ScopeTraversalExpr: texpr.Traversal = append(texpr.Traversal, next) texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng) return texpr case *RelativeTraversalExpr: texpr.Traversal = append(texpr.Traversal, next) texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng) return texpr default: return &RelativeTraversalExpr{ Source: expr, Traversal: hcl.Traversal{next}, SrcRange: hcl.RangeBetween(expr.Range(), rng), } } } func (p *parser) parseExpressionTerm() (Expression, hcl.Diagnostics) { start := p.Peek() switch start.Type { case TokenOParen: oParen := p.Read() // eat open paren p.PushIncludeNewlines(false) expr, diags := p.ParseExpression() if diags.HasErrors() { // attempt to place the peeker after our closing paren // before we return, so that the next parser has some // chance of finding a valid expression. p.recover(TokenCParen) p.PopIncludeNewlines() return expr, diags } close := p.Peek() if close.Type != TokenCParen { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unbalanced parentheses", Detail: "Expected a closing parenthesis to terminate the expression.", Subject: &close.Range, Context: hcl.RangeBetween(start.Range, close.Range).Ptr(), }) p.setRecovery() } cParen := p.Read() // eat closing paren p.PopIncludeNewlines() // Our parser's already taken care of the precedence effect of the // parentheses by considering them to be a kind of "term", but we // still need to include the parentheses in our AST so we can give // an accurate representation of the source range that includes the // open and closing parentheses. expr = &ParenthesesExpr{ Expression: expr, SrcRange: hcl.RangeBetween(oParen.Range, cParen.Range), } return expr, diags case TokenNumberLit: tok := p.Read() // eat number token numVal, diags := p.numberLitValue(tok) return &LiteralValueExpr{ Val: numVal, SrcRange: tok.Range, }, diags case TokenIdent: tok := p.Read() // eat identifier token if p.Peek().Type == TokenOParen { return p.finishParsingFunctionCall(tok) } name := string(tok.Bytes) switch name { case "true": return &LiteralValueExpr{ Val: cty.True, SrcRange: tok.Range, }, nil case "false": return &LiteralValueExpr{ Val: cty.False, SrcRange: tok.Range, }, nil case "null": return &LiteralValueExpr{ Val: cty.NullVal(cty.DynamicPseudoType), SrcRange: tok.Range, }, nil default: return &ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: name, SrcRange: tok.Range, }, }, SrcRange: tok.Range, }, nil } case TokenOQuote, TokenOHeredoc: open := p.Read() // eat opening marker closer := p.oppositeBracket(open.Type) exprs, passthru, _, diags := p.parseTemplateInner(closer, tokenOpensFlushHeredoc(open)) closeRange := p.PrevRange() if passthru { if len(exprs) != 1 { panic("passthru set with len(exprs) != 1") } return &TemplateWrapExpr{ Wrapped: exprs[0], SrcRange: hcl.RangeBetween(open.Range, closeRange), }, diags } return &TemplateExpr{ Parts: exprs, SrcRange: hcl.RangeBetween(open.Range, closeRange), }, diags case TokenMinus: tok := p.Read() // eat minus token // Important to use parseExpressionWithTraversals rather than parseExpression // here, otherwise we can capture a following binary expression into // our negation. // e.g. -46+5 should parse as (-46)+5, not -(46+5) operand, diags := p.parseExpressionWithTraversals() return &UnaryOpExpr{ Op: OpNegate, Val: operand, SrcRange: hcl.RangeBetween(tok.Range, operand.Range()), SymbolRange: tok.Range, }, diags case TokenBang: tok := p.Read() // eat bang token // Important to use parseExpressionWithTraversals rather than parseExpression // here, otherwise we can capture a following binary expression into // our negation. operand, diags := p.parseExpressionWithTraversals() return &UnaryOpExpr{ Op: OpLogicalNot, Val: operand, SrcRange: hcl.RangeBetween(tok.Range, operand.Range()), SymbolRange: tok.Range, }, diags case TokenOBrack: return p.parseTupleCons() case TokenOBrace: return p.parseObjectCons() default: var diags hcl.Diagnostics if !p.recovery { switch start.Type { case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing expression", Detail: "Expected the start of an expression, but found the end of the file.", Subject: &start.Range, }) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid expression", Detail: "Expected the start of an expression, but found an invalid expression token.", Subject: &start.Range, }) } } p.setRecovery() // Return a placeholder so that the AST is still structurally sound // even in the presence of parse errors. return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: start.Range, }, diags } } func (p *parser) numberLitValue(tok Token) (cty.Value, hcl.Diagnostics) { // The cty.ParseNumberVal is always the same behavior as converting a // string to a number, ensuring we always interpret decimal numbers in // the same way. numVal, err := cty.ParseNumberVal(string(tok.Bytes)) if err != nil { ret := cty.UnknownVal(cty.Number) return ret, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid number literal", // FIXME: not a very good error message, but convert only // gives us "a number is required", so not much help either. Detail: "Failed to recognize the value of this number literal.", Subject: &tok.Range, }, } } return numVal, nil } // finishParsingFunctionCall parses a function call assuming that the function // name was already read, and so the peeker should be pointing at the opening // parenthesis after the name. func (p *parser) finishParsingFunctionCall(name Token) (Expression, hcl.Diagnostics) { openTok := p.Read() if openTok.Type != TokenOParen { // should never happen if callers behave panic("finishParsingFunctionCall called with non-parenthesis as next token") } var args []Expression var diags hcl.Diagnostics var expandFinal bool var closeTok Token // Arbitrary newlines are allowed inside the function call parentheses. p.PushIncludeNewlines(false) Token: for { tok := p.Peek() if tok.Type == TokenCParen { closeTok = p.Read() // eat closing paren break Token } arg, argDiags := p.ParseExpression() args = append(args, arg) diags = append(diags, argDiags...) if p.recovery && argDiags.HasErrors() { // if there was a parse error in the argument then we've // probably been left in a weird place in the token stream, // so we'll bail out with a partial argument list. p.recover(TokenCParen) break Token } sep := p.Read() if sep.Type == TokenCParen { closeTok = sep break Token } if sep.Type == TokenEllipsis { expandFinal = true if p.Peek().Type != TokenCParen { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing closing parenthesis", Detail: "An expanded function argument (with ...) must be immediately followed by closing parentheses.", Subject: &sep.Range, Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(), }) } closeTok = p.recover(TokenCParen) } else { closeTok = p.Read() // eat closing paren } break Token } if sep.Type != TokenComma { switch sep.Type { case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unterminated function call", Detail: "There is no closing parenthesis for this function call before the end of the file. This may be caused by incorrect parethesis nesting elsewhere in this file.", Subject: hcl.RangeBetween(name.Range, openTok.Range).Ptr(), }) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing argument separator", Detail: "A comma is required to separate each function argument from the next.", Subject: &sep.Range, Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(), }) } closeTok = p.recover(TokenCParen) break Token } if p.Peek().Type == TokenCParen { // A trailing comma after the last argument gets us in here. closeTok = p.Read() // eat closing paren break Token } } p.PopIncludeNewlines() return &FunctionCallExpr{ Name: string(name.Bytes), Args: args, ExpandFinal: expandFinal, NameRange: name.Range, OpenParenRange: openTok.Range, CloseParenRange: closeTok.Range, }, diags } func (p *parser) parseTupleCons() (Expression, hcl.Diagnostics) { open := p.Read() if open.Type != TokenOBrack { // Should never happen if callers are behaving panic("parseTupleCons called without peeker pointing to open bracket") } p.PushIncludeNewlines(false) defer p.PopIncludeNewlines() if forKeyword.TokenMatches(p.Peek()) { return p.finishParsingForExpr(open) } var close Token var diags hcl.Diagnostics var exprs []Expression for { next := p.Peek() if next.Type == TokenCBrack { close = p.Read() // eat closer break } expr, exprDiags := p.ParseExpression() exprs = append(exprs, expr) diags = append(diags, exprDiags...) if p.recovery && exprDiags.HasErrors() { // If expression parsing failed then we are probably in a strange // place in the token stream, so we'll bail out and try to reset // to after our closing bracket to allow parsing to continue. close = p.recover(TokenCBrack) break } next = p.Peek() if next.Type == TokenCBrack { close = p.Read() // eat closer break } if next.Type != TokenComma { if !p.recovery { switch next.Type { case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unterminated tuple constructor expression", Detail: "There is no corresponding closing bracket before the end of the file. This may be caused by incorrect bracket nesting elsewhere in this file.", Subject: open.Range.Ptr(), }) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing item separator", Detail: "Expected a comma to mark the beginning of the next item.", Subject: &next.Range, Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), }) } } close = p.recover(TokenCBrack) break } p.Read() // eat comma } return &TupleConsExpr{ Exprs: exprs, SrcRange: hcl.RangeBetween(open.Range, close.Range), OpenRange: open.Range, }, diags } func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) { open := p.Read() if open.Type != TokenOBrace { // Should never happen if callers are behaving panic("parseObjectCons called without peeker pointing to open brace") } // We must temporarily stop looking at newlines here while we check for // a "for" keyword, since for expressions are _not_ newline-sensitive, // even though object constructors are. p.PushIncludeNewlines(false) isFor := forKeyword.TokenMatches(p.Peek()) p.PopIncludeNewlines() if isFor { return p.finishParsingForExpr(open) } p.PushIncludeNewlines(true) defer p.PopIncludeNewlines() var close Token var diags hcl.Diagnostics var items []ObjectConsItem for { next := p.Peek() if next.Type == TokenNewline { p.Read() // eat newline continue } if next.Type == TokenCBrace { close = p.Read() // eat closer break } // Wrapping parens are not explicitly represented in the AST, but // we want to use them here to disambiguate intepreting a mapping // key as a full expression rather than just a name, and so // we'll remember this was present and use it to force the // behavior of our final ObjectConsKeyExpr. forceNonLiteral := (p.Peek().Type == TokenOParen) var key Expression var keyDiags hcl.Diagnostics key, keyDiags = p.ParseExpression() diags = append(diags, keyDiags...) if p.recovery && keyDiags.HasErrors() { // If expression parsing failed then we are probably in a strange // place in the token stream, so we'll bail out and try to reset // to after our closing brace to allow parsing to continue. close = p.recover(TokenCBrace) break } // We wrap up the key expression in a special wrapper that deals // with our special case that naked identifiers as object keys // are interpreted as literal strings. key = &ObjectConsKeyExpr{ Wrapped: key, ForceNonLiteral: forceNonLiteral, } next = p.Peek() if next.Type != TokenEqual && next.Type != TokenColon { if !p.recovery { switch next.Type { case TokenNewline, TokenComma: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing attribute value", Detail: "Expected an attribute value, introduced by an equals sign (\"=\").", Subject: &next.Range, Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), }) case TokenIdent: // Although this might just be a plain old missing equals // sign before a reference, one way to get here is to try // to write an attribute name containing a period followed // by a digit, which was valid in HCL1, like this: // foo1.2_bar = "baz" // We can't know exactly what the user intended here, but // we'll augment our message with an extra hint in this case // in case it is helpful. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing key/value separator", Detail: "Expected an equals sign (\"=\") to mark the beginning of the attribute value. If you intended to given an attribute name containing periods or spaces, write the name in quotes to create a string literal.", Subject: &next.Range, Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), }) case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unterminated object constructor expression", Detail: "There is no corresponding closing brace before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: open.Range.Ptr(), }) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing key/value separator", Detail: "Expected an equals sign (\"=\") to mark the beginning of the attribute value.", Subject: &next.Range, Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), }) } } close = p.recover(TokenCBrace) break } p.Read() // eat equals sign or colon value, valueDiags := p.ParseExpression() diags = append(diags, valueDiags...) if p.recovery && valueDiags.HasErrors() { // If expression parsing failed then we are probably in a strange // place in the token stream, so we'll bail out and try to reset // to after our closing brace to allow parsing to continue. close = p.recover(TokenCBrace) break } items = append(items, ObjectConsItem{ KeyExpr: key, ValueExpr: value, }) next = p.Peek() if next.Type == TokenCBrace { close = p.Read() // eat closer break } if next.Type != TokenComma && next.Type != TokenNewline { if !p.recovery { switch next.Type { case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unterminated object constructor expression", Detail: "There is no corresponding closing brace before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: open.Range.Ptr(), }) default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing attribute separator", Detail: "Expected a newline or comma to mark the beginning of the next attribute.", Subject: &next.Range, Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), }) } } close = p.recover(TokenCBrace) break } p.Read() // eat comma or newline } return &ObjectConsExpr{ Items: items, SrcRange: hcl.RangeBetween(open.Range, close.Range), OpenRange: open.Range, }, diags } func (p *parser) finishParsingForExpr(open Token) (Expression, hcl.Diagnostics) { p.PushIncludeNewlines(false) defer p.PopIncludeNewlines() introducer := p.Read() if !forKeyword.TokenMatches(introducer) { // Should never happen if callers are behaving panic("finishParsingForExpr called without peeker pointing to 'for' identifier") } var makeObj bool var closeType TokenType switch open.Type { case TokenOBrace: makeObj = true closeType = TokenCBrace case TokenOBrack: makeObj = false // making a tuple closeType = TokenCBrack default: // Should never happen if callers are behaving panic("finishParsingForExpr called with invalid open token") } var diags hcl.Diagnostics var keyName, valName string if p.Peek().Type != TokenIdent { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "For expression requires variable name after 'for'.", Subject: p.Peek().Range.Ptr(), Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), }) } close := p.recover(closeType) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } valName = string(p.Read().Bytes) if p.Peek().Type == TokenComma { // What we just read was actually the key, then. keyName = valName p.Read() // eat comma if p.Peek().Type != TokenIdent { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "For expression requires value variable name after comma.", Subject: p.Peek().Range.Ptr(), Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), }) } close := p.recover(closeType) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } valName = string(p.Read().Bytes) } if !inKeyword.TokenMatches(p.Peek()) { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "For expression requires the 'in' keyword after its name declarations.", Subject: p.Peek().Range.Ptr(), Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), }) } close := p.recover(closeType) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } p.Read() // eat 'in' keyword collExpr, collDiags := p.ParseExpression() diags = append(diags, collDiags...) if p.recovery && collDiags.HasErrors() { close := p.recover(closeType) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } if p.Peek().Type != TokenColon { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "For expression requires a colon after the collection expression.", Subject: p.Peek().Range.Ptr(), Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), }) } close := p.recover(closeType) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } p.Read() // eat colon var keyExpr, valExpr Expression var keyDiags, valDiags hcl.Diagnostics valExpr, valDiags = p.ParseExpression() if p.Peek().Type == TokenFatArrow { // What we just parsed was actually keyExpr p.Read() // eat the fat arrow keyExpr, keyDiags = valExpr, valDiags valExpr, valDiags = p.ParseExpression() } diags = append(diags, keyDiags...) diags = append(diags, valDiags...) if p.recovery && (keyDiags.HasErrors() || valDiags.HasErrors()) { close := p.recover(closeType) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } group := false var ellipsis Token if p.Peek().Type == TokenEllipsis { ellipsis = p.Read() group = true } var condExpr Expression var condDiags hcl.Diagnostics if ifKeyword.TokenMatches(p.Peek()) { p.Read() // eat "if" condExpr, condDiags = p.ParseExpression() diags = append(diags, condDiags...) if p.recovery && condDiags.HasErrors() { close := p.recover(p.oppositeBracket(open.Type)) return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }, diags } } var close Token if p.Peek().Type == closeType { close = p.Read() } else { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "Extra characters after the end of the 'for' expression.", Subject: p.Peek().Range.Ptr(), Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), }) } close = p.recover(closeType) } if !makeObj { if keyExpr != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "Key expression is not valid when building a tuple.", Subject: keyExpr.Range().Ptr(), Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), }) } if group { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "Grouping ellipsis (...) cannot be used when building a tuple.", Subject: &ellipsis.Range, Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), }) } } else { if keyExpr == nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' expression", Detail: "Key expression is required when building an object.", Subject: valExpr.Range().Ptr(), Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), }) } } return &ForExpr{ KeyVar: keyName, ValVar: valName, CollExpr: collExpr, KeyExpr: keyExpr, ValExpr: valExpr, CondExpr: condExpr, Group: group, SrcRange: hcl.RangeBetween(open.Range, close.Range), OpenRange: open.Range, CloseRange: close.Range, }, diags } // parseQuotedStringLiteral is a helper for parsing quoted strings that // aren't allowed to contain any interpolations, such as block labels. func (p *parser) parseQuotedStringLiteral() (string, hcl.Range, hcl.Diagnostics) { oQuote := p.Read() if oQuote.Type != TokenOQuote { return "", oQuote.Range, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid string literal", Detail: "A quoted string is required here.", Subject: &oQuote.Range, }, } } var diags hcl.Diagnostics ret := &bytes.Buffer{} var endRange hcl.Range Token: for { tok := p.Read() switch tok.Type { case TokenCQuote: endRange = tok.Range break Token case TokenQuotedLit: s, sDiags := ParseStringLiteralToken(tok) diags = append(diags, sDiags...) ret.WriteString(s) case TokenTemplateControl, TokenTemplateInterp: which := "$" if tok.Type == TokenTemplateControl { which = "%" } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid string literal", Detail: fmt.Sprintf( "Template sequences are not allowed in this string. To include a literal %q, double it (as \"%s%s\") to escape it.", which, which, which, ), Subject: &tok.Range, Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(), }) // Now that we're returning an error callers won't attempt to use // the result for any real operations, but they might try to use // the partial AST for other analyses, so we'll leave a marker // to indicate that there was something invalid in the string to // help avoid misinterpretation of the partial result ret.WriteString(which) ret.WriteString("{ ... }") p.recover(TokenTemplateSeqEnd) // we'll try to keep parsing after the sequence ends case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unterminated string literal", Detail: "Unable to find the closing quote mark before the end of the file.", Subject: &tok.Range, Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(), }) endRange = tok.Range break Token default: // Should never happen, as long as the scanner is behaving itself diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid string literal", Detail: "This item is not valid in a string literal.", Subject: &tok.Range, Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(), }) p.recover(TokenCQuote) endRange = tok.Range break Token } } return ret.String(), hcl.RangeBetween(oQuote.Range, endRange), diags } // ParseStringLiteralToken processes the given token, which must be either a // TokenQuotedLit or a TokenStringLit, returning the string resulting from // resolving any escape sequences. // // If any error diagnostics are returned, the returned string may be incomplete // or otherwise invalid. func ParseStringLiteralToken(tok Token) (string, hcl.Diagnostics) { var quoted bool switch tok.Type { case TokenQuotedLit: quoted = true case TokenStringLit: quoted = false default: panic("ParseStringLiteralToken can only be used with TokenStringLit and TokenQuotedLit tokens") } var diags hcl.Diagnostics ret := make([]byte, 0, len(tok.Bytes)) slices := scanStringLit(tok.Bytes, quoted) // We will mutate rng constantly as we walk through our token slices below. // Any diagnostics must take a copy of this rng rather than simply pointing // to it, e.g. by using rng.Ptr() rather than &rng. rng := tok.Range rng.End = rng.Start Slices: for _, slice := range slices { if len(slice) == 0 { continue } // Advance the start of our range to where the previous token ended rng.Start = rng.End // Advance the end of our range to after our token. b := slice for len(b) > 0 { adv, ch, _ := textseg.ScanGraphemeClusters(b, true) rng.End.Byte += adv switch ch[0] { case '\r', '\n': rng.End.Line++ rng.End.Column = 1 default: rng.End.Column++ } b = b[adv:] } TokenType: switch slice[0] { case '\\': if !quoted { // If we're not in quoted mode then just treat this token as // normal. (Slices can still start with backslash even if we're // not specifically looking for backslash sequences.) break TokenType } if len(slice) < 2 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid escape sequence", Detail: "Backslash must be followed by an escape sequence selector character.", Subject: rng.Ptr(), }) break TokenType } switch slice[1] { case 'n': ret = append(ret, '\n') continue Slices case 'r': ret = append(ret, '\r') continue Slices case 't': ret = append(ret, '\t') continue Slices case '"': ret = append(ret, '"') continue Slices case '\\': ret = append(ret, '\\') continue Slices case 'u', 'U': if slice[1] == 'u' && len(slice) != 6 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid escape sequence", Detail: "The \\u escape sequence must be followed by four hexadecimal digits.", Subject: rng.Ptr(), }) break TokenType } else if slice[1] == 'U' && len(slice) != 10 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid escape sequence", Detail: "The \\U escape sequence must be followed by eight hexadecimal digits.", Subject: rng.Ptr(), }) break TokenType } numHex := string(slice[2:]) num, err := strconv.ParseUint(numHex, 16, 32) if err != nil { // Should never happen because the scanner won't match // a sequence of digits that isn't valid. panic(err) } r := rune(num) l := utf8.RuneLen(r) if l == -1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid escape sequence", Detail: fmt.Sprintf("Cannot encode character U+%04x in UTF-8.", num), Subject: rng.Ptr(), }) break TokenType } for i := 0; i < l; i++ { ret = append(ret, 0) } rb := ret[len(ret)-l:] utf8.EncodeRune(rb, r) continue Slices default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid escape sequence", Detail: fmt.Sprintf("The symbol %q is not a valid escape sequence selector.", slice[1:]), Subject: rng.Ptr(), }) ret = append(ret, slice[1:]...) continue Slices } case '$', '%': if len(slice) != 3 { // Not long enough to be our escape sequence, so it's literal. break TokenType } if slice[1] == slice[0] && slice[2] == '{' { ret = append(ret, slice[0]) ret = append(ret, '{') continue Slices } break TokenType } // If we fall out here or break out of here from the switch above // then this slice is just a literal. ret = append(ret, slice...) } return string(ret), diags } // setRecovery turns on recovery mode without actually doing any recovery. // This can be used when a parser knowingly leaves the peeker in a useless // place and wants to suppress errors that might result from that decision. func (p *parser) setRecovery() { p.recovery = true } // recover seeks forward in the token stream until it finds TokenType "end", // then returns with the peeker pointed at the following token. // // If the given token type is a bracketer, this function will additionally // count nested instances of the brackets to try to leave the peeker at // the end of the _current_ instance of that bracketer, skipping over any // nested instances. This is a best-effort operation and may have // unpredictable results on input with bad bracketer nesting. func (p *parser) recover(end TokenType) Token { start := p.oppositeBracket(end) p.recovery = true nest := 0 for { tok := p.Read() ty := tok.Type if end == TokenTemplateSeqEnd && ty == TokenTemplateControl { // normalize so that our matching behavior can work, since // TokenTemplateControl/TokenTemplateInterp are asymmetrical // with TokenTemplateSeqEnd and thus we need to count both // openers if that's the closer we're looking for. ty = TokenTemplateInterp } switch ty { case start: nest++ case end: if nest < 1 { return tok } nest-- case TokenEOF: return tok } } } // recoverOver seeks forward in the token stream until it finds a block // starting with TokenType "start", then finds the corresponding end token, // leaving the peeker pointed at the token after that end token. // // The given token type _must_ be a bracketer. For example, if the given // start token is TokenOBrace then the parser will be left at the _end_ of // the next brace-delimited block encountered, or at EOF if no such block // is found or it is unclosed. func (p *parser) recoverOver(start TokenType) { end := p.oppositeBracket(start) // find the opening bracket first Token: for { tok := p.Read() switch tok.Type { case start, TokenEOF: break Token } } // Now use our existing recover function to locate the _end_ of the // container we've found. p.recover(end) } func (p *parser) recoverAfterBodyItem() { p.recovery = true var open []TokenType Token: for { tok := p.Read() switch tok.Type { case TokenNewline: if len(open) == 0 { break Token } case TokenEOF: break Token case TokenOBrace, TokenOBrack, TokenOParen, TokenOQuote, TokenOHeredoc, TokenTemplateInterp, TokenTemplateControl: open = append(open, tok.Type) case TokenCBrace, TokenCBrack, TokenCParen, TokenCQuote, TokenCHeredoc: opener := p.oppositeBracket(tok.Type) for len(open) > 0 && open[len(open)-1] != opener { open = open[:len(open)-1] } if len(open) > 0 { open = open[:len(open)-1] } case TokenTemplateSeqEnd: for len(open) > 0 && open[len(open)-1] != TokenTemplateInterp && open[len(open)-1] != TokenTemplateControl { open = open[:len(open)-1] } if len(open) > 0 { open = open[:len(open)-1] } } } } // oppositeBracket finds the bracket that opposes the given bracketer, or // NilToken if the given token isn't a bracketer. // // "Bracketer", for the sake of this function, is one end of a matching // open/close set of tokens that establish a bracketing context. func (p *parser) oppositeBracket(ty TokenType) TokenType { switch ty { case TokenOBrace: return TokenCBrace case TokenOBrack: return TokenCBrack case TokenOParen: return TokenCParen case TokenOQuote: return TokenCQuote case TokenOHeredoc: return TokenCHeredoc case TokenCBrace: return TokenOBrace case TokenCBrack: return TokenOBrack case TokenCParen: return TokenOParen case TokenCQuote: return TokenOQuote case TokenCHeredoc: return TokenOHeredoc case TokenTemplateControl: return TokenTemplateSeqEnd case TokenTemplateInterp: return TokenTemplateSeqEnd case TokenTemplateSeqEnd: // This is ambigous, but we return Interp here because that's // what's assumed by the "recover" method. return TokenTemplateInterp default: return TokenNil } } func errPlaceholderExpr(rng hcl.Range) Expression { return &LiteralValueExpr{ Val: cty.DynamicVal, SrcRange: rng, } } hcl-2.14.1/hclsyntax/parser_template.go000066400000000000000000000552661431334125700201210ustar00rootroot00000000000000package hclsyntax import ( "fmt" "strings" "unicode" "github.com/apparentlymart/go-textseg/v13/textseg" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func (p *parser) ParseTemplate() (Expression, hcl.Diagnostics) { return p.parseTemplate(TokenEOF, false) } func (p *parser) parseTemplate(end TokenType, flushHeredoc bool) (Expression, hcl.Diagnostics) { exprs, passthru, rng, diags := p.parseTemplateInner(end, flushHeredoc) if passthru { if len(exprs) != 1 { panic("passthru set with len(exprs) != 1") } return &TemplateWrapExpr{ Wrapped: exprs[0], SrcRange: rng, }, diags } return &TemplateExpr{ Parts: exprs, SrcRange: rng, }, diags } func (p *parser) parseTemplateInner(end TokenType, flushHeredoc bool) ([]Expression, bool, hcl.Range, hcl.Diagnostics) { parts, diags := p.parseTemplateParts(end) if flushHeredoc { flushHeredocTemplateParts(parts) // Trim off leading spaces on lines per the flush heredoc spec } tp := templateParser{ Tokens: parts.Tokens, SrcRange: parts.SrcRange, } exprs, exprsDiags := tp.parseRoot() diags = append(diags, exprsDiags...) passthru := false if len(parts.Tokens) == 2 { // one real token and one synthetic "end" token if _, isInterp := parts.Tokens[0].(*templateInterpToken); isInterp { passthru = true } } return exprs, passthru, parts.SrcRange, diags } type templateParser struct { Tokens []templateToken SrcRange hcl.Range pos int } func (p *templateParser) parseRoot() ([]Expression, hcl.Diagnostics) { var exprs []Expression var diags hcl.Diagnostics for { next := p.Peek() if _, isEnd := next.(*templateEndToken); isEnd { break } expr, exprDiags := p.parseExpr() diags = append(diags, exprDiags...) exprs = append(exprs, expr) } return exprs, diags } func (p *templateParser) parseExpr() (Expression, hcl.Diagnostics) { next := p.Peek() switch tok := next.(type) { case *templateLiteralToken: p.Read() // eat literal return &LiteralValueExpr{ Val: cty.StringVal(tok.Val), SrcRange: tok.SrcRange, }, nil case *templateInterpToken: p.Read() // eat interp return tok.Expr, nil case *templateIfToken: return p.parseIf() case *templateForToken: return p.parseFor() case *templateEndToken: p.Read() // eat erroneous token return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{ { // This is a particularly unhelpful diagnostic, so callers // should attempt to pre-empt it and produce a more helpful // diagnostic that is context-aware. Severity: hcl.DiagError, Summary: "Unexpected end of template", Detail: "The control directives within this template are unbalanced.", Subject: &tok.SrcRange, }, } case *templateEndCtrlToken: p.Read() // eat erroneous token return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: fmt.Sprintf("Unexpected %s directive", tok.Name()), Detail: "The control directives within this template are unbalanced.", Subject: &tok.SrcRange, }, } default: // should never happen, because above should be exhaustive panic(fmt.Sprintf("unhandled template token type %T", next)) } } func (p *templateParser) parseIf() (Expression, hcl.Diagnostics) { open := p.Read() openIf, isIf := open.(*templateIfToken) if !isIf { // should never happen if caller is behaving panic("parseIf called with peeker not pointing at if token") } var ifExprs, elseExprs []Expression var diags hcl.Diagnostics var endifRange hcl.Range currentExprs := &ifExprs Token: for { next := p.Peek() if end, isEnd := next.(*templateEndToken); isEnd { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unexpected end of template", Detail: fmt.Sprintf( "The if directive at %s is missing its corresponding endif directive.", openIf.SrcRange, ), Subject: &end.SrcRange, }) return errPlaceholderExpr(end.SrcRange), diags } if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd { p.Read() // eat end directive switch end.Type { case templateElse: if currentExprs == &ifExprs { currentExprs = &elseExprs continue Token } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unexpected else directive", Detail: fmt.Sprintf( "Already in the else clause for the if started at %s.", openIf.SrcRange, ), Subject: &end.SrcRange, }) case templateEndIf: endifRange = end.SrcRange break Token default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unexpected %s directive", end.Name()), Detail: fmt.Sprintf( "Expecting an endif directive for the if started at %s.", openIf.SrcRange, ), Subject: &end.SrcRange, }) } return errPlaceholderExpr(end.SrcRange), diags } expr, exprDiags := p.parseExpr() diags = append(diags, exprDiags...) *currentExprs = append(*currentExprs, expr) } if len(ifExprs) == 0 { ifExprs = append(ifExprs, &LiteralValueExpr{ Val: cty.StringVal(""), SrcRange: hcl.Range{ Filename: openIf.SrcRange.Filename, Start: openIf.SrcRange.End, End: openIf.SrcRange.End, }, }) } if len(elseExprs) == 0 { elseExprs = append(elseExprs, &LiteralValueExpr{ Val: cty.StringVal(""), SrcRange: hcl.Range{ Filename: endifRange.Filename, Start: endifRange.Start, End: endifRange.Start, }, }) } trueExpr := &TemplateExpr{ Parts: ifExprs, SrcRange: hcl.RangeBetween(ifExprs[0].Range(), ifExprs[len(ifExprs)-1].Range()), } falseExpr := &TemplateExpr{ Parts: elseExprs, SrcRange: hcl.RangeBetween(elseExprs[0].Range(), elseExprs[len(elseExprs)-1].Range()), } return &ConditionalExpr{ Condition: openIf.CondExpr, TrueResult: trueExpr, FalseResult: falseExpr, SrcRange: hcl.RangeBetween(openIf.SrcRange, endifRange), }, diags } func (p *templateParser) parseFor() (Expression, hcl.Diagnostics) { open := p.Read() openFor, isFor := open.(*templateForToken) if !isFor { // should never happen if caller is behaving panic("parseFor called with peeker not pointing at for token") } var contentExprs []Expression var diags hcl.Diagnostics var endforRange hcl.Range Token: for { next := p.Peek() if end, isEnd := next.(*templateEndToken); isEnd { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unexpected end of template", Detail: fmt.Sprintf( "The for directive at %s is missing its corresponding endfor directive.", openFor.SrcRange, ), Subject: &end.SrcRange, }) return errPlaceholderExpr(end.SrcRange), diags } if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd { p.Read() // eat end directive switch end.Type { case templateElse: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unexpected else directive", Detail: "An else clause is not expected for a for directive.", Subject: &end.SrcRange, }) case templateEndFor: endforRange = end.SrcRange break Token default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unexpected %s directive", end.Name()), Detail: fmt.Sprintf( "Expecting an endfor directive corresponding to the for directive at %s.", openFor.SrcRange, ), Subject: &end.SrcRange, }) } return errPlaceholderExpr(end.SrcRange), diags } expr, exprDiags := p.parseExpr() diags = append(diags, exprDiags...) contentExprs = append(contentExprs, expr) } if len(contentExprs) == 0 { contentExprs = append(contentExprs, &LiteralValueExpr{ Val: cty.StringVal(""), SrcRange: hcl.Range{ Filename: openFor.SrcRange.Filename, Start: openFor.SrcRange.End, End: openFor.SrcRange.End, }, }) } contentExpr := &TemplateExpr{ Parts: contentExprs, SrcRange: hcl.RangeBetween(contentExprs[0].Range(), contentExprs[len(contentExprs)-1].Range()), } forExpr := &ForExpr{ KeyVar: openFor.KeyVar, ValVar: openFor.ValVar, CollExpr: openFor.CollExpr, ValExpr: contentExpr, SrcRange: hcl.RangeBetween(openFor.SrcRange, endforRange), OpenRange: openFor.SrcRange, CloseRange: endforRange, } return &TemplateJoinExpr{ Tuple: forExpr, }, diags } func (p *templateParser) Peek() templateToken { return p.Tokens[p.pos] } func (p *templateParser) Read() templateToken { ret := p.Peek() if _, end := ret.(*templateEndToken); !end { p.pos++ } return ret } // parseTemplateParts produces a flat sequence of "template tokens", which are // either literal values (with any "trimming" already applied), interpolation // sequences, or control flow markers. // // A further pass is required on the result to turn it into an AST. func (p *parser) parseTemplateParts(end TokenType) (*templateParts, hcl.Diagnostics) { var parts []templateToken var diags hcl.Diagnostics startRange := p.NextRange() ltrimNext := false nextCanTrimPrev := false var endRange hcl.Range Token: for { next := p.Read() if next.Type == end { // all done! endRange = next.Range break } ltrim := ltrimNext ltrimNext = false canTrimPrev := nextCanTrimPrev nextCanTrimPrev = false switch next.Type { case TokenStringLit, TokenQuotedLit: str, strDiags := ParseStringLiteralToken(next) diags = append(diags, strDiags...) if ltrim { str = strings.TrimLeftFunc(str, unicode.IsSpace) } parts = append(parts, &templateLiteralToken{ Val: str, SrcRange: next.Range, }) nextCanTrimPrev = true case TokenTemplateInterp: // if the opener is ${~ then we want to eat any trailing whitespace // in the preceding literal token, assuming it is indeed a literal // token. if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 { prevExpr := parts[len(parts)-1] if lexpr, ok := prevExpr.(*templateLiteralToken); ok { lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace) } } p.PushIncludeNewlines(false) expr, exprDiags := p.ParseExpression() diags = append(diags, exprDiags...) close := p.Peek() if close.Type != TokenTemplateSeqEnd { if !p.recovery { switch close.Type { case TokenEOF: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed template interpolation sequence", Detail: "There is no closing brace for this interpolation sequence before the end of the file. This might be caused by incorrect nesting inside the given expression.", Subject: &startRange, }) case TokenColon: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extra characters after interpolation expression", Detail: "Template interpolation doesn't expect a colon at this location. Did you intend this to be a literal sequence to be processed as part of another language? If so, you can escape it by starting with \"$${\" instead of just \"${\".", Subject: &close.Range, Context: hcl.RangeBetween(startRange, close.Range).Ptr(), }) default: if (close.Type == TokenCQuote || close.Type == TokenOQuote) && end == TokenCQuote { // We'll get here if we're processing a _quoted_ // template and we find an errant quote inside an // interpolation sequence, which suggests that // the interpolation sequence is missing its terminator. diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed template interpolation sequence", Detail: "There is no closing brace for this interpolation sequence before the end of the quoted template. This might be caused by incorrect nesting inside the given expression.", Subject: &startRange, }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extra characters after interpolation expression", Detail: "Expected a closing brace to end the interpolation expression, but found extra characters.\n\nThis can happen when you include interpolation syntax for another language, such as shell scripting, but forget to escape the interpolation start token. If this is an embedded sequence for another language, escape it by starting with \"$${\" instead of just \"${\".", Subject: &close.Range, Context: hcl.RangeBetween(startRange, close.Range).Ptr(), }) } } } p.recover(TokenTemplateSeqEnd) } else { p.Read() // eat closing brace // If the closer is ~} then we want to eat any leading // whitespace on the next token, if it turns out to be a // literal token. if len(close.Bytes) == 2 && close.Bytes[0] == '~' { ltrimNext = true } } p.PopIncludeNewlines() parts = append(parts, &templateInterpToken{ Expr: expr, SrcRange: hcl.RangeBetween(next.Range, close.Range), }) case TokenTemplateControl: // if the opener is %{~ then we want to eat any trailing whitespace // in the preceding literal token, assuming it is indeed a literal // token. if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 { prevExpr := parts[len(parts)-1] if lexpr, ok := prevExpr.(*templateLiteralToken); ok { lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace) } } p.PushIncludeNewlines(false) kw := p.Peek() if kw.Type != TokenIdent { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid template directive", Detail: "A template directive keyword (\"if\", \"for\", etc) is expected at the beginning of a %{ sequence.", Subject: &kw.Range, Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(), }) } p.recover(TokenTemplateSeqEnd) p.PopIncludeNewlines() continue Token } p.Read() // eat keyword token switch { case ifKeyword.TokenMatches(kw): condExpr, exprDiags := p.ParseExpression() diags = append(diags, exprDiags...) parts = append(parts, &templateIfToken{ CondExpr: condExpr, SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), }) case elseKeyword.TokenMatches(kw): parts = append(parts, &templateEndCtrlToken{ Type: templateElse, SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), }) case endifKeyword.TokenMatches(kw): parts = append(parts, &templateEndCtrlToken{ Type: templateEndIf, SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), }) case forKeyword.TokenMatches(kw): var keyName, valName string if p.Peek().Type != TokenIdent { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' directive", Detail: "For directive requires variable name after 'for'.", Subject: p.Peek().Range.Ptr(), }) } p.recover(TokenTemplateSeqEnd) p.PopIncludeNewlines() continue Token } valName = string(p.Read().Bytes) if p.Peek().Type == TokenComma { // What we just read was actually the key, then. keyName = valName p.Read() // eat comma if p.Peek().Type != TokenIdent { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' directive", Detail: "For directive requires value variable name after comma.", Subject: p.Peek().Range.Ptr(), }) } p.recover(TokenTemplateSeqEnd) p.PopIncludeNewlines() continue Token } valName = string(p.Read().Bytes) } if !inKeyword.TokenMatches(p.Peek()) { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid 'for' directive", Detail: "For directive requires 'in' keyword after names.", Subject: p.Peek().Range.Ptr(), }) } p.recover(TokenTemplateSeqEnd) p.PopIncludeNewlines() continue Token } p.Read() // eat 'in' keyword collExpr, collDiags := p.ParseExpression() diags = append(diags, collDiags...) parts = append(parts, &templateForToken{ KeyVar: keyName, ValVar: valName, CollExpr: collExpr, SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), }) case endforKeyword.TokenMatches(kw): parts = append(parts, &templateEndCtrlToken{ Type: templateEndFor, SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), }) default: if !p.recovery { suggestions := []string{"if", "for", "else", "endif", "endfor"} given := string(kw.Bytes) suggestion := nameSuggestion(given, suggestions) if suggestion != "" { suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid template control keyword", Detail: fmt.Sprintf("%q is not a valid template control keyword.%s", given, suggestion), Subject: &kw.Range, Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(), }) } p.recover(TokenTemplateSeqEnd) p.PopIncludeNewlines() continue Token } close := p.Peek() if close.Type != TokenTemplateSeqEnd { if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Extra characters in %s marker", kw.Bytes), Detail: "Expected a closing brace to end the sequence, but found extra characters.", Subject: &close.Range, Context: hcl.RangeBetween(startRange, close.Range).Ptr(), }) } p.recover(TokenTemplateSeqEnd) } else { p.Read() // eat closing brace // If the closer is ~} then we want to eat any leading // whitespace on the next token, if it turns out to be a // literal token. if len(close.Bytes) == 2 && close.Bytes[0] == '~' { ltrimNext = true } } p.PopIncludeNewlines() default: if !p.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unterminated template string", Detail: "No closing marker was found for the string.", Subject: &next.Range, Context: hcl.RangeBetween(startRange, next.Range).Ptr(), }) } final := p.recover(end) endRange = final.Range break Token } } if len(parts) == 0 { // If a sequence has no content, we'll treat it as if it had an // empty string in it because that's what the user probably means // if they write "" in configuration. parts = append(parts, &templateLiteralToken{ Val: "", SrcRange: hcl.Range{ // Range is the zero-character span immediately after the // opening quote. Filename: startRange.Filename, Start: startRange.End, End: startRange.End, }, }) } // Always end with an end token, so the parser can produce diagnostics // about unclosed items with proper position information. parts = append(parts, &templateEndToken{ SrcRange: endRange, }) ret := &templateParts{ Tokens: parts, SrcRange: hcl.RangeBetween(startRange, endRange), } return ret, diags } // flushHeredocTemplateParts modifies in-place the line-leading literal strings // to apply the flush heredoc processing rule: find the line with the smallest // number of whitespace characters as prefix and then trim that number of // characters from all of the lines. // // This rule is applied to static tokens rather than to the rendered result, // so interpolating a string with leading whitespace cannot affect the chosen // prefix length. func flushHeredocTemplateParts(parts *templateParts) { if len(parts.Tokens) == 0 { // Nothing to do return } const maxInt = int((^uint(0)) >> 1) minSpaces := maxInt newline := true var adjust []*templateLiteralToken for _, ttok := range parts.Tokens { if newline { newline = false var spaces int if lit, ok := ttok.(*templateLiteralToken); ok { orig := lit.Val trimmed := strings.TrimLeftFunc(orig, unicode.IsSpace) // If a token is entirely spaces and ends with a newline // then it's a "blank line" and thus not considered for // space-prefix-counting purposes. if len(trimmed) == 0 && strings.HasSuffix(orig, "\n") { spaces = maxInt } else { spaceBytes := len(lit.Val) - len(trimmed) spaces, _ = textseg.TokenCount([]byte(orig[:spaceBytes]), textseg.ScanGraphemeClusters) adjust = append(adjust, lit) } } else if _, ok := ttok.(*templateEndToken); ok { break // don't process the end token since it never has spaces before it } if spaces < minSpaces { minSpaces = spaces } } if lit, ok := ttok.(*templateLiteralToken); ok { if strings.HasSuffix(lit.Val, "\n") { newline = true // The following token, if any, begins a new line } } } for _, lit := range adjust { // Since we want to count space _characters_ rather than space _bytes_, // we can't just do a straightforward slice operation here and instead // need to hunt for the split point with a scanner. valBytes := []byte(lit.Val) spaceByteCount := 0 for i := 0; i < minSpaces; i++ { adv, _, _ := textseg.ScanGraphemeClusters(valBytes, true) spaceByteCount += adv valBytes = valBytes[adv:] } lit.Val = lit.Val[spaceByteCount:] lit.SrcRange.Start.Column += minSpaces lit.SrcRange.Start.Byte += spaceByteCount } } type templateParts struct { Tokens []templateToken SrcRange hcl.Range } // templateToken is a higher-level token that represents a single atom within // the template language. Our template parsing first raises the raw token // stream to a sequence of templateToken, and then transforms the result into // an expression tree. type templateToken interface { templateToken() templateToken } type templateLiteralToken struct { Val string SrcRange hcl.Range isTemplateToken } type templateInterpToken struct { Expr Expression SrcRange hcl.Range isTemplateToken } type templateIfToken struct { CondExpr Expression SrcRange hcl.Range isTemplateToken } type templateForToken struct { KeyVar string // empty if ignoring key ValVar string CollExpr Expression SrcRange hcl.Range isTemplateToken } type templateEndCtrlType int const ( templateEndIf templateEndCtrlType = iota templateElse templateEndFor ) type templateEndCtrlToken struct { Type templateEndCtrlType SrcRange hcl.Range isTemplateToken } func (t *templateEndCtrlToken) Name() string { switch t.Type { case templateEndIf: return "endif" case templateElse: return "else" case templateEndFor: return "endfor" default: // should never happen panic("invalid templateEndCtrlType") } } type templateEndToken struct { SrcRange hcl.Range isTemplateToken } type isTemplateToken [0]int func (t isTemplateToken) templateToken() templateToken { return t } hcl-2.14.1/hclsyntax/parser_test.go000066400000000000000000002334261431334125700172610ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/go-test/deep" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func init() { deep.MaxDepth = 999 } func TestParseConfig(t *testing.T) { tests := []struct { input string diagCount int want *Body }{ { ``, 0, &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 1, Byte: 0}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 1, Byte: 0}, }, }, }, { "block {}\n", 0, &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: nil, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: nil, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 9}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 9}, End: hcl.Pos{Line: 2, Column: 1, Byte: 9}, }, }, }, { "block {}", 0, &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: nil, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: nil, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, }, { "block {}block {}\n", 1, // missing newline after block definition &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: nil, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: nil, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 17}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, }, }, { "block { block {} }\n", 1, // can't nest another block in the single-line block syntax &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: nil, Body: &Body{ SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 2, Column: 1, Byte: 19}, }, EndRange: hcl.Range{ // Parser recovery behavior leaves us after this whole construct, on the next line Start: hcl.Pos{Line: 2, Column: 1, Byte: 19}, End: hcl.Pos{Line: 2, Column: 1, Byte: 19}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: nil, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, CloseBraceRange: hcl.Range{ // Parser recovery behavior leaves us after this whole construct, on the next line Start: hcl.Pos{Line: 2, Column: 1, Byte: 19}, End: hcl.Pos{Line: 2, Column: 1, Byte: 19}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 19}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 19}, End: hcl.Pos{Line: 2, Column: 1, Byte: 19}, }, }, }, { "block \"foo\" {}\n", 0, &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"foo"}, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 14, Byte: 13}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 15}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, }, }, { "block foo {}\n", 0, &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"foo"}, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 11, Byte: 10}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 11, Byte: 10}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 13}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, }, }, { "block \"invalid ${not_allowed_here} foo\" {}\n", 1, // Invalid string literal; Template sequences are not allowed in this string. &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"invalid ${ ... } foo"}, // invalid interpolation gets replaced with a placeholder here Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 41, Byte: 40}, End: hcl.Pos{Line: 1, Column: 43, Byte: 42}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 43, Byte: 42}, End: hcl.Pos{Line: 1, Column: 43, Byte: 42}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 40, Byte: 39}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 41, Byte: 40}, End: hcl.Pos{Line: 1, Column: 42, Byte: 41}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 42, Byte: 41}, End: hcl.Pos{Line: 1, Column: 43, Byte: 42}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 43}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 43}, End: hcl.Pos{Line: 2, Column: 1, Byte: 43}, }, }, }, { ` block "invalid" 1.2 {} block "valid" {} `, 1, &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"invalid"}, Body: &Body{ SrcRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 1}, End: hcl.Pos{Line: 2, Column: 6, Byte: 6}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 1}, End: hcl.Pos{Line: 2, Column: 6, Byte: 6}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 1}, End: hcl.Pos{Line: 2, Column: 6, Byte: 6}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 2, Column: 7, Byte: 7}, End: hcl.Pos{Line: 2, Column: 16, Byte: 16}, }, }, // Since we failed parsing before we got to the // braces, the type range is used as a placeholder // for these. OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 1}, End: hcl.Pos{Line: 2, Column: 6, Byte: 6}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 1}, End: hcl.Pos{Line: 2, Column: 6, Byte: 6}, }, }, // Recovery behavior should allow us to still see this // second block, even though the first was invalid. &Block{ Type: "block", Labels: []string{"valid"}, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 15, Byte: 38}, End: hcl.Pos{Line: 3, Column: 17, Byte: 40}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 17, Byte: 40}, End: hcl.Pos{Line: 3, Column: 17, Byte: 40}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 1, Byte: 24}, End: hcl.Pos{Line: 3, Column: 6, Byte: 29}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 3, Column: 7, Byte: 30}, End: hcl.Pos{Line: 3, Column: 14, Byte: 37}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 15, Byte: 38}, End: hcl.Pos{Line: 3, Column: 16, Byte: 39}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 16, Byte: 39}, End: hcl.Pos{Line: 3, Column: 17, Byte: 40}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 4, Column: 1, Byte: 41}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 4, Column: 1, Byte: 41}, End: hcl.Pos{Line: 4, Column: 1, Byte: 41}, }, }, }, { `block "f\o" {} `, 1, // "\o" is not a valid escape sequence &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"fo"}, Body: &Body{ Attributes: map[string]*Attribute{}, Blocks: []*Block{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 14, Byte: 13}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 15}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, }, }, { `block "f\n" {} `, 0, &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"f\n"}, Body: &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 14, Byte: 13}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 15}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, }, }, { "a = 1\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &LiteralValueExpr{ Val: cty.NumberIntVal(1), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 6}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 6}, End: hcl.Pos{Line: 2, Column: 1, Byte: 6}, }, }, }, { "a = 1", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &LiteralValueExpr{ Val: cty.NumberIntVal(1), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, }, { "a = \"hello ${true}\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, &LiteralValueExpr{ Val: cty.True, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 14, Byte: 13}, End: hcl.Pos{Line: 1, Column: 18, Byte: 17}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 20, Byte: 19}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 20, Byte: 19}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 20}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 20}, End: hcl.Pos{Line: 2, Column: 1, Byte: 20}, }, }, }, { "a = \"hello $${true}\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, &LiteralValueExpr{ Val: cty.StringVal("${"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, &LiteralValueExpr{ Val: cty.StringVal("true}"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 20, Byte: 19}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 21}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 21}, End: hcl.Pos{Line: 2, Column: 1, Byte: 21}, }, }, }, { "a = \"hello %%{true}\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, &LiteralValueExpr{ Val: cty.StringVal("%{"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, &LiteralValueExpr{ Val: cty.StringVal("true}"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 20, Byte: 19}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 21}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 21}, End: hcl.Pos{Line: 2, Column: 1, Byte: 21}, }, }, }, { "a = \"hello $$\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, // This parses oddly due to how the scanner // handles escaping of the $ sequence, but it's // functionally equivalent to a single literal. &LiteralValueExpr{ Val: cty.StringVal("$"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, &LiteralValueExpr{ Val: cty.StringVal("$"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 15}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, }, }, { "a = \"hello $\"\n", 0, // unterminated template interpolation sequence &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, &LiteralValueExpr{ Val: cty.StringVal("$"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 14}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 14}, End: hcl.Pos{Line: 2, Column: 1, Byte: 14}, }, }, }, { "a = \"hello %%\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, // This parses oddly due to how the scanner // handles escaping of the % sequence, but it's // functionally equivalent to a single literal. &LiteralValueExpr{ Val: cty.StringVal("%"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, &LiteralValueExpr{ Val: cty.StringVal("%"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 13, Byte: 12}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 15}, End: hcl.Pos{Line: 2, Column: 1, Byte: 15}, }, }, }, { "a = \"hello %\"\n", 0, // unterminated template control sequence &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello "), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, &LiteralValueExpr{ Val: cty.StringVal("%"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 14}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 14}, End: hcl.Pos{Line: 2, Column: 1, Byte: 14}, }, }, }, { "a = \"hello!\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("hello!"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 13}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, }, }, { "a = \"\\u2022\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\u2022"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 13}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, }, }, { "a = \"\\uu2022\"\n", 1, // \u must be followed by four hex digits &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\\uu2022"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 14}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 14}, End: hcl.Pos{Line: 2, Column: 1, Byte: 14}, }, }, }, { "a = \"\\U0001d11e\"\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\U0001d11e"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 17}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, }, }, { "a = \"\\u0001d11e\"\n", 0, // This is valid, but probably not what the user intended :( &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ // Only the first four digits were used for the // escape sequence, so the remaining four just // get echoed out literally. Val: cty.StringVal("\u0001d11e"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 17}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, }, }, { "a = \"\\U2022\"\n", 1, // Invalid escape sequence, since we need eight hex digits for \U &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\\U2022"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 13}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, }, }, { "a = \"\\u20m2\"\n", 1, // Invalid escape sequence &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\\u20m2"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 13}, End: hcl.Pos{Line: 2, Column: 1, Byte: 13}, }, }, }, { "a = \"\\U00300000\"\n", 1, // Invalid unicode character (can't encode in UTF-8) &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\\U00300000"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 17}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, }, }, { "a = \"\\Ub2705550\"\n", 1, // Invalid unicode character (can't encode in UTF-8) &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &TemplateExpr{ Parts: []Expression{ &LiteralValueExpr{ Val: cty.StringVal("\\Ub2705550"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 17}, End: hcl.Pos{Line: 2, Column: 1, Byte: 17}, }, }, }, { "a = < v... if true]\n", 2, // can't use => or ... in a tuple for &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &ForExpr{ KeyVar: "k", ValVar: "v", CollExpr: &ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 18, Byte: 17}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 18, Byte: 17}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, }, KeyExpr: &ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: "k", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 23, Byte: 22}, End: hcl.Pos{Line: 1, Column: 24, Byte: 23}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 23, Byte: 22}, End: hcl.Pos{Line: 1, Column: 24, Byte: 23}, }, }, ValExpr: &ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: "v", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 28, Byte: 27}, End: hcl.Pos{Line: 1, Column: 29, Byte: 28}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 28, Byte: 27}, End: hcl.Pos{Line: 1, Column: 29, Byte: 28}, }, }, CondExpr: &LiteralValueExpr{ Val: cty.True, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 36, Byte: 35}, End: hcl.Pos{Line: 1, Column: 40, Byte: 39}, }, }, Group: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 41, Byte: 40}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, CloseRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 40, Byte: 39}, End: hcl.Pos{Line: 1, Column: 41, Byte: 40}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 41, Byte: 40}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 41}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 41}, End: hcl.Pos{Line: 2, Column: 1, Byte: 41}, }, }, }, { ` `, 0, // the tab character is treated as a single whitespace character &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, }, { `\x81`, 2, // invalid UTF-8, and body item is required here &Body{ Attributes: Attributes{}, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, }, { "a = 1,", 1, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &LiteralValueExpr{ Val: cty.NumberIntVal(1), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, }, { "a = `str`", 2, // Invalid character and expression &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &LiteralValueExpr{ SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 10, Byte: 9}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, }, }, { `a = 'str'`, 2, // Invalid character and expression &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &LiteralValueExpr{ SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 10, Byte: 9}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, }, }, { "a = sort(data.first.ref.attr)[count.index]\n", 0, &Body{ Attributes: Attributes{ "a": { Name: "a", Expr: &IndexExpr{ Collection: &FunctionCallExpr{ Name: "sort", Args: []Expression{ &ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: "data", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 10, Byte: 9}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, }, hcl.TraverseAttr{ Name: "first", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 14, Byte: 13}, End: hcl.Pos{Line: 1, Column: 20, Byte: 19}, }, }, hcl.TraverseAttr{ Name: "ref", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 20, Byte: 19}, End: hcl.Pos{Line: 1, Column: 24, Byte: 23}, }, }, hcl.TraverseAttr{ Name: "attr", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 24, Byte: 23}, End: hcl.Pos{Line: 1, Column: 29, Byte: 28}, }, }, }, SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 10, Byte: 9}, End: hcl.Pos{Line: 1, Column: 29, Byte: 28}, }, }, }, ExpandFinal: false, NameRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, OpenParenRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, CloseParenRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 29, Byte: 28}, End: hcl.Pos{Line: 1, Column: 30, Byte: 29}, }, }, Key: &ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: "count", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 31, Byte: 30}, End: hcl.Pos{Line: 1, Column: 36, Byte: 35}, }, }, hcl.TraverseAttr{ Name: "index", SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 36, Byte: 35}, End: hcl.Pos{Line: 1, Column: 42, Byte: 41}, }, }, }, SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 31, Byte: 30}, End: hcl.Pos{Line: 1, Column: 42, Byte: 41}, }, }, SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 5, Byte: 4}, End: hcl.Pos{Line: 1, Column: 43, Byte: 42}, }, OpenRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 30, Byte: 29}, End: hcl.Pos{Line: 1, Column: 31, Byte: 30}, }, BracketRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 30, Byte: 29}, End: hcl.Pos{Line: 1, Column: 43, Byte: 42}, }, }, SrcRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 43, Byte: 42}, }, NameRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, EqualsRange: hcl.Range{ Filename: "", Start: hcl.Pos{Line: 1, Column: 3, Byte: 2}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, }, Blocks: Blocks{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 2, Column: 1, Byte: 43}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 1, Byte: 43}, End: hcl.Pos{Line: 2, Column: 1, Byte: 43}, }, }, }, { `block "unterminated_string "name" {}`, 2, // "Invalid string literal" and "Invalid block definition" &Body{ Attributes: Attributes{}, Blocks: Blocks{ &Block{ Type: "block", Labels: []string{"unterminated_string ", "name", " {}"}, Body: &Body{ SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, TypeRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, LabelRanges: []hcl.Range{ { Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 29, Byte: 28}, }, { Start: hcl.Pos{Line: 1, Column: 29, Byte: 28}, End: hcl.Pos{Line: 1, Column: 33, Byte: 32}, }, { Start: hcl.Pos{Line: 1, Column: 33, Byte: 32}, End: hcl.Pos{Line: 1, Column: 37, Byte: 36}, }, }, OpenBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, CloseBraceRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 37, Byte: 36}, }, EndRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 37, Byte: 36}, End: hcl.Pos{Line: 1, Column: 37, Byte: 36}, }, }, }, } for _, test := range tests { t.Run(test.input, func(t *testing.T) { t.Logf("\n%s", test.input) file, diags := ParseConfig([]byte(test.input), "", hcl.Pos{Byte: 0, Line: 1, Column: 1}) if len(diags) != test.diagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } got := file.Body if diff := deep.Equal(got, test.want); diff != nil { for _, problem := range diff { t.Errorf(problem) } } }) } } func TestParseConfigDiagnostics(t *testing.T) { // This test function is a variant of TestParseConfig which tests for // specific error messages for certain kinds of invalid input where we // intend to produce a particular helpful error message. tests := map[string]struct { input string want hcl.Diagnostics }{ "unclosed multi-line block (no contents)": { "blah {\n", hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unclosed configuration block", Detail: "There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, }, }, "unclosed multi-line block (after one argument)": { "blah {\n a = 1\n", hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unclosed configuration block", Detail: "There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, }, }, "unclosed single-line block (no contents)": { "blah {", hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unclosed configuration block", Detail: "There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, }, }, "unclosed single-line block (after its argument)": { "blah { a = 1", hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unclosed configuration block", Detail: "There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 6, Byte: 5}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, Context: &hcl.Range{ // In this case we can also report a context because we detect this error in a more convenient place in the parser Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, }, }, "unclosed object constructor (before element separator)": { `foo = { a = 1`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unterminated object constructor expression", Detail: "There is no corresponding closing brace before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, "unclosed object constructor (before equals)": { `foo = { a `, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unterminated object constructor expression", Detail: "There is no corresponding closing brace before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, "unclosed tuple constructor (before element separator)": { `foo = [ a`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unterminated tuple constructor expression", Detail: "There is no corresponding closing bracket before the end of the file. This may be caused by incorrect bracket nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, "unclosed function call": { `foo = boop("a"`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unterminated function call", Detail: "There is no closing parenthesis for this function call before the end of the file. This may be caused by incorrect parethesis nesting elsewhere in this file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, }, "unclosed grouping parentheses": { `foo = (1`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unbalanced parentheses", Detail: "Expected a closing parenthesis to terminate the expression.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, Context: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 7, Byte: 6}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, }, }, "unclosed template interpolation at EOF": { `foo = "${a`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unclosed template interpolation sequence", Detail: "There is no closing brace for this interpolation sequence before the end of the file. This might be caused by incorrect nesting inside the given expression.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, }, }, }, "unclosed quoted template interpolation at closing quote": { `foo = "${a"`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unclosed template interpolation sequence", Detail: "There is no closing brace for this interpolation sequence before the end of the quoted template. This might be caused by incorrect nesting inside the given expression.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }, }, }, }, "unclosed quoted template at literal part": { `foo = "${a}`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Unterminated template string", Detail: "No closing marker was found for the string.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 12, Byte: 11}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, Context: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, }, // Some of our "unclosed" situations happen at a less convenient time // when we only know we're waiting for an expression, so those get // an error message with much less context. "unclosed object constructor (before any expression)": { `foo = {`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Missing expression", Detail: "Expected the start of an expression, but found the end of the file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, "unclosed tuple constructor (before any expression)": { `foo = [`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Missing expression", Detail: "Expected the start of an expression, but found the end of the file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, }, }, "unclosed function call (before any argument)": { `foo = foo(`, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Missing expression", Detail: "Expected the start of an expression, but found the end of the file.", Subject: &hcl.Range{ Filename: "test.hcl", Start: hcl.Pos{Line: 1, Column: 11, Byte: 10}, End: hcl.Pos{Line: 1, Column: 11, Byte: 10}, }, }, }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { t.Logf("\n%s", test.input) _, diags := ParseConfig([]byte(test.input), "test.hcl", hcl.InitialPos) if diff := deep.Equal(diags, test.want); diff != nil { for _, problem := range diff { t.Errorf(problem) } } }) } } hcl-2.14.1/hclsyntax/parser_traversal.go000066400000000000000000000105421431334125700202750ustar00rootroot00000000000000package hclsyntax import ( "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) // ParseTraversalAbs parses an absolute traversal that is assumed to consume // all of the remaining tokens in the peeker. The usual parser recovery // behavior is not supported here because traversals are not expected to // be parsed as part of a larger program. func (p *parser) ParseTraversalAbs() (hcl.Traversal, hcl.Diagnostics) { var ret hcl.Traversal var diags hcl.Diagnostics // Absolute traversal must always begin with a variable name varTok := p.Read() if varTok.Type != TokenIdent { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Variable name required", Detail: "Must begin with a variable name.", Subject: &varTok.Range, }) return ret, diags } varName := string(varTok.Bytes) ret = append(ret, hcl.TraverseRoot{ Name: varName, SrcRange: varTok.Range, }) for { next := p.Peek() if next.Type == TokenEOF { return ret, diags } switch next.Type { case TokenDot: // Attribute access dot := p.Read() // eat dot nameTok := p.Read() if nameTok.Type != TokenIdent { if nameTok.Type == TokenStar { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Attribute name required", Detail: "Splat expressions (.*) may not be used here.", Subject: &nameTok.Range, Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(), }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Attribute name required", Detail: "Dot must be followed by attribute name.", Subject: &nameTok.Range, Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(), }) } return ret, diags } attrName := string(nameTok.Bytes) ret = append(ret, hcl.TraverseAttr{ Name: attrName, SrcRange: hcl.RangeBetween(dot.Range, nameTok.Range), }) case TokenOBrack: // Index open := p.Read() // eat open bracket next := p.Peek() switch next.Type { case TokenNumberLit: tok := p.Read() // eat number numVal, numDiags := p.numberLitValue(tok) diags = append(diags, numDiags...) close := p.Read() if close.Type != TokenCBrack { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed index brackets", Detail: "Index key must be followed by a closing bracket.", Subject: &close.Range, Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), }) } ret = append(ret, hcl.TraverseIndex{ Key: numVal, SrcRange: hcl.RangeBetween(open.Range, close.Range), }) if diags.HasErrors() { return ret, diags } case TokenOQuote: str, _, strDiags := p.parseQuotedStringLiteral() diags = append(diags, strDiags...) close := p.Read() if close.Type != TokenCBrack { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed index brackets", Detail: "Index key must be followed by a closing bracket.", Subject: &close.Range, Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), }) } ret = append(ret, hcl.TraverseIndex{ Key: cty.StringVal(str), SrcRange: hcl.RangeBetween(open.Range, close.Range), }) if diags.HasErrors() { return ret, diags } default: if next.Type == TokenStar { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Attribute name required", Detail: "Splat expressions ([*]) may not be used here.", Subject: &next.Range, Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(), }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Index value required", Detail: "Index brackets must contain either a literal number or a literal string.", Subject: &next.Range, Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(), }) } return ret, diags } default: diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid character", Detail: "Expected an attribute access or an index operator.", Subject: &next.Range, Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(), }) return ret, diags } } } hcl-2.14.1/hclsyntax/peeker.go000066400000000000000000000136311431334125700161730ustar00rootroot00000000000000package hclsyntax import ( "bytes" "fmt" "path/filepath" "runtime" "strings" "github.com/hashicorp/hcl/v2" ) // This is set to true at init() time in tests, to enable more useful output // if a stack discipline error is detected. It should not be enabled in // normal mode since there is a performance penalty from accessing the // runtime stack to produce the traces, but could be temporarily set to // true for debugging if desired. var tracePeekerNewlinesStack = false type peeker struct { Tokens Tokens NextIndex int IncludeComments bool IncludeNewlinesStack []bool // used only when tracePeekerNewlinesStack is set newlineStackChanges []peekerNewlineStackChange } // for use in debugging the stack usage only type peekerNewlineStackChange struct { Pushing bool // if false, then popping Frame runtime.Frame Include bool } func newPeeker(tokens Tokens, includeComments bool) *peeker { return &peeker{ Tokens: tokens, IncludeComments: includeComments, IncludeNewlinesStack: []bool{true}, } } func (p *peeker) Peek() Token { ret, _ := p.nextToken() return ret } func (p *peeker) Read() Token { ret, nextIdx := p.nextToken() p.NextIndex = nextIdx return ret } func (p *peeker) NextRange() hcl.Range { return p.Peek().Range } func (p *peeker) PrevRange() hcl.Range { if p.NextIndex == 0 { return p.NextRange() } return p.Tokens[p.NextIndex-1].Range } func (p *peeker) nextToken() (Token, int) { for i := p.NextIndex; i < len(p.Tokens); i++ { tok := p.Tokens[i] switch tok.Type { case TokenComment: if !p.IncludeComments { // Single-line comment tokens, starting with # or //, absorb // the trailing newline that terminates them as part of their // bytes. When we're filtering out comments, we must as a // special case transform these to newline tokens in order // to properly parse newline-terminated block items. if p.includingNewlines() { if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' { fakeNewline := Token{ Type: TokenNewline, Bytes: tok.Bytes[len(tok.Bytes)-1 : len(tok.Bytes)], // We use the whole token range as the newline // range, even though that's a little... weird, // because otherwise we'd need to go count // characters again in order to figure out the // column of the newline, and that complexity // isn't justified when ranges of newlines are // so rarely printed anyway. Range: tok.Range, } return fakeNewline, i + 1 } } continue } case TokenNewline: if !p.includingNewlines() { continue } } return tok, i + 1 } // if we fall out here then we'll return the EOF token, and leave // our index pointed off the end of the array so we'll keep // returning EOF in future too. return p.Tokens[len(p.Tokens)-1], len(p.Tokens) } func (p *peeker) includingNewlines() bool { return p.IncludeNewlinesStack[len(p.IncludeNewlinesStack)-1] } func (p *peeker) PushIncludeNewlines(include bool) { if tracePeekerNewlinesStack { // Record who called us so that we can more easily track down any // mismanagement of the stack in the parser. callers := []uintptr{0} runtime.Callers(2, callers) frames := runtime.CallersFrames(callers) frame, _ := frames.Next() p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{ true, frame, include, }) } p.IncludeNewlinesStack = append(p.IncludeNewlinesStack, include) } func (p *peeker) PopIncludeNewlines() bool { stack := p.IncludeNewlinesStack remain, ret := stack[:len(stack)-1], stack[len(stack)-1] p.IncludeNewlinesStack = remain if tracePeekerNewlinesStack { // Record who called us so that we can more easily track down any // mismanagement of the stack in the parser. callers := []uintptr{0} runtime.Callers(2, callers) frames := runtime.CallersFrames(callers) frame, _ := frames.Next() p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{ false, frame, ret, }) } return ret } // AssertEmptyNewlinesStack checks if the IncludeNewlinesStack is empty, doing // panicking if it is not. This can be used to catch stack mismanagement that // might otherwise just cause confusing downstream errors. // // This function is a no-op if the stack is empty when called. // // If newlines stack tracing is enabled by setting the global variable // tracePeekerNewlinesStack at init time, a full log of all of the push/pop // calls will be produced to help identify which caller in the parser is // misbehaving. func (p *peeker) AssertEmptyIncludeNewlinesStack() { if len(p.IncludeNewlinesStack) != 1 { // Should never happen; indicates mismanagement of the stack inside // the parser. if p.newlineStackChanges != nil { // only if traceNewlinesStack is enabled above panic(fmt.Errorf( "non-empty IncludeNewlinesStack after parse with %d calls unaccounted for:\n%s", len(p.IncludeNewlinesStack)-1, formatPeekerNewlineStackChanges(p.newlineStackChanges), )) } else { panic(fmt.Errorf("non-empty IncludeNewlinesStack after parse: %#v", p.IncludeNewlinesStack)) } } } func formatPeekerNewlineStackChanges(changes []peekerNewlineStackChange) string { indent := 0 var buf bytes.Buffer for _, change := range changes { funcName := change.Frame.Function if idx := strings.LastIndexByte(funcName, '.'); idx != -1 { funcName = funcName[idx+1:] } filename := change.Frame.File if idx := strings.LastIndexByte(filename, filepath.Separator); idx != -1 { filename = filename[idx+1:] } switch change.Pushing { case true: buf.WriteString(strings.Repeat(" ", indent)) fmt.Fprintf(&buf, "PUSH %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line) indent++ case false: indent-- buf.WriteString(strings.Repeat(" ", indent)) fmt.Fprintf(&buf, "POP %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line) } } return buf.String() } hcl-2.14.1/hclsyntax/peeker_test.go000066400000000000000000000050331431334125700172270ustar00rootroot00000000000000package hclsyntax import ( "reflect" "testing" ) func init() { // see the documentation of this variable for more information tracePeekerNewlinesStack = true } func TestPeeker(t *testing.T) { tokens := Tokens{ { Type: TokenIdent, }, { Type: TokenComment, }, { Type: TokenIdent, }, { Type: TokenComment, }, { Type: TokenIdent, }, { Type: TokenNewline, }, { Type: TokenIdent, }, { Type: TokenNewline, }, { Type: TokenIdent, }, { Type: TokenNewline, }, { Type: TokenEOF, }, } { peeker := newPeeker(tokens, true) wantTypes := []TokenType{ TokenIdent, TokenComment, TokenIdent, TokenComment, TokenIdent, TokenNewline, TokenIdent, TokenNewline, TokenIdent, TokenNewline, TokenEOF, } var gotTypes []TokenType for { peeked := peeker.Peek() read := peeker.Read() if peeked.Type != read.Type { t.Errorf("mismatched Peek %s and Read %s", peeked, read) } gotTypes = append(gotTypes, read.Type) if read.Type == TokenEOF { break } } if !reflect.DeepEqual(gotTypes, wantTypes) { t.Errorf("wrong types\ngot: %#v\nwant: %#v", gotTypes, wantTypes) } } { peeker := newPeeker(tokens, false) wantTypes := []TokenType{ TokenIdent, TokenIdent, TokenIdent, TokenNewline, TokenIdent, TokenNewline, TokenIdent, TokenNewline, TokenEOF, } var gotTypes []TokenType for { peeked := peeker.Peek() read := peeker.Read() if peeked.Type != read.Type { t.Errorf("mismatched Peek %s and Read %s", peeked, read) } gotTypes = append(gotTypes, read.Type) if read.Type == TokenEOF { break } } if !reflect.DeepEqual(gotTypes, wantTypes) { t.Errorf("wrong types\ngot: %#v\nwant: %#v", gotTypes, wantTypes) } } { peeker := newPeeker(tokens, false) peeker.PushIncludeNewlines(false) wantTypes := []TokenType{ TokenIdent, TokenIdent, TokenIdent, TokenIdent, TokenIdent, TokenNewline, // we'll pop off the PushIncludeNewlines before we get here TokenEOF, } var gotTypes []TokenType idx := 0 for { peeked := peeker.Peek() read := peeker.Read() if peeked.Type != read.Type { t.Errorf("mismatched Peek %s and Read %s", peeked, read) } gotTypes = append(gotTypes, read.Type) if read.Type == TokenEOF { break } if idx == 4 { peeker.PopIncludeNewlines() } idx++ } if !reflect.DeepEqual(gotTypes, wantTypes) { t.Errorf("wrong types\ngot: %#v\nwant: %#v", gotTypes, wantTypes) } } } hcl-2.14.1/hclsyntax/public.go000066400000000000000000000152301431334125700161730ustar00rootroot00000000000000package hclsyntax import ( "github.com/hashicorp/hcl/v2" ) // ParseConfig parses the given buffer as a whole HCL config file, returning // a *hcl.File representing its contents. If HasErrors called on the returned // diagnostics returns true, the returned body is likely to be incomplete // and should therefore be used with care. // // The body in the returned file has dynamic type *hclsyntax.Body, so callers // may freely type-assert this to get access to the full hclsyntax API in // situations where detailed access is required. However, most common use-cases // should be served using the hcl.Body interface to ensure compatibility with // other configurationg syntaxes, such as JSON. func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) { tokens, diags := LexConfig(src, filename, start) peeker := newPeeker(tokens, false) parser := &parser{peeker: peeker} body, parseDiags := parser.ParseBody(TokenEOF) diags = append(diags, parseDiags...) // Panic if the parser uses incorrect stack discipline with the peeker's // newlines stack, since otherwise it will produce confusing downstream // errors. peeker.AssertEmptyIncludeNewlinesStack() return &hcl.File{ Body: body, Bytes: src, Nav: navigation{ root: body, }, }, diags } // ParseExpression parses the given buffer as a standalone HCL expression, // returning it as an instance of Expression. func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) { tokens, diags := LexExpression(src, filename, start) peeker := newPeeker(tokens, false) parser := &parser{peeker: peeker} // Bare expressions are always parsed in "ignore newlines" mode, as if // they were wrapped in parentheses. parser.PushIncludeNewlines(false) expr, parseDiags := parser.ParseExpression() diags = append(diags, parseDiags...) next := parser.Peek() if next.Type != TokenEOF && !parser.recovery { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extra characters after expression", Detail: "An expression was successfully parsed, but extra characters were found after it.", Subject: &next.Range, }) } parser.PopIncludeNewlines() // Panic if the parser uses incorrect stack discipline with the peeker's // newlines stack, since otherwise it will produce confusing downstream // errors. peeker.AssertEmptyIncludeNewlinesStack() return expr, diags } // ParseTemplate parses the given buffer as a standalone HCL template, // returning it as an instance of Expression. func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) { tokens, diags := LexTemplate(src, filename, start) peeker := newPeeker(tokens, false) parser := &parser{peeker: peeker} expr, parseDiags := parser.ParseTemplate() diags = append(diags, parseDiags...) // Panic if the parser uses incorrect stack discipline with the peeker's // newlines stack, since otherwise it will produce confusing downstream // errors. peeker.AssertEmptyIncludeNewlinesStack() return expr, diags } // ParseTraversalAbs parses the given buffer as a standalone absolute traversal. // // Parsing as a traversal is more limited than parsing as an expession since // it allows only attribute and indexing operations on variables. Traverals // are useful as a syntax for referring to objects without necessarily // evaluating them. func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) { tokens, diags := LexExpression(src, filename, start) peeker := newPeeker(tokens, false) parser := &parser{peeker: peeker} // Bare traverals are always parsed in "ignore newlines" mode, as if // they were wrapped in parentheses. parser.PushIncludeNewlines(false) expr, parseDiags := parser.ParseTraversalAbs() diags = append(diags, parseDiags...) parser.PopIncludeNewlines() // Panic if the parser uses incorrect stack discipline with the peeker's // newlines stack, since otherwise it will produce confusing downstream // errors. peeker.AssertEmptyIncludeNewlinesStack() return expr, diags } // LexConfig performs lexical analysis on the given buffer, treating it as a // whole HCL config file, and returns the resulting tokens. // // Only minimal validation is done during lexical analysis, so the returned // diagnostics may include errors about lexical issues such as bad character // encodings or unrecognized characters, but full parsing is required to // detect _all_ syntax errors. func LexConfig(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { tokens := scanTokens(src, filename, start, scanNormal) diags := checkInvalidTokens(tokens) return tokens, diags } // LexExpression performs lexical analysis on the given buffer, treating it as // a standalone HCL expression, and returns the resulting tokens. // // Only minimal validation is done during lexical analysis, so the returned // diagnostics may include errors about lexical issues such as bad character // encodings or unrecognized characters, but full parsing is required to // detect _all_ syntax errors. func LexExpression(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { // This is actually just the same thing as LexConfig, since configs // and expressions lex in the same way. tokens := scanTokens(src, filename, start, scanNormal) diags := checkInvalidTokens(tokens) return tokens, diags } // LexTemplate performs lexical analysis on the given buffer, treating it as a // standalone HCL template, and returns the resulting tokens. // // Only minimal validation is done during lexical analysis, so the returned // diagnostics may include errors about lexical issues such as bad character // encodings or unrecognized characters, but full parsing is required to // detect _all_ syntax errors. func LexTemplate(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { tokens := scanTokens(src, filename, start, scanTemplate) diags := checkInvalidTokens(tokens) return tokens, diags } // ValidIdentifier tests if the given string could be a valid identifier in // a native syntax expression. // // This is useful when accepting names from the user that will be used as // variable or attribute names in the scope, to ensure that any name chosen // will be traversable using the variable or attribute traversal syntax. func ValidIdentifier(s string) bool { // This is a kinda-expensive way to do something pretty simple, but it // is easiest to do with our existing scanner-related infrastructure here // and nobody should be validating identifiers in a tight loop. tokens := scanTokens([]byte(s), "", hcl.Pos{}, scanIdentOnly) return len(tokens) == 2 && tokens[0].Type == TokenIdent && tokens[1].Type == TokenEOF } hcl-2.14.1/hclsyntax/public_test.go000066400000000000000000000025731431334125700172400ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/hashicorp/hcl/v2" ) func TestValidIdentifier(t *testing.T) { tests := []struct { Input string Want bool }{ {"", false}, {"hello", true}, {"hello.world", false}, {"hello ", false}, {" hello", false}, {"hello\n", false}, {"hello world", false}, {"aws_instance", true}, {"aws.instance", false}, {"foo-bar", true}, {"foo--bar", true}, {"foo_", true}, {"foo-", true}, {"_foobar", true}, {"-foobar", false}, {"blah1", true}, {"blah1blah", true}, {"1blah1blah", false}, {"héllo", true}, // combining acute accent {"Χαίρετε", true}, {"звать", true}, {"今日は", true}, {"\x80", false}, // UTF-8 continuation without an introducer {"a\x80", false}, // UTF-8 continuation after a non-introducer } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { got := ValidIdentifier(test.Input) if got != test.Want { t.Errorf("wrong result %#v; want %#v", got, test.Want) } }) } } var T Tokens func BenchmarkLexConfig(b *testing.B) { src := []byte("module \"once\" {\n source = \"../modules/foo\"\n}\n\nmodule \"twice\" {\n source = \"../modules/foo\"\n}\n") filename := "testdata/dave/main.tf" start := hcl.Pos{Line: 1, Column: 1, Byte: 0} var tokens Tokens for i := 0; i < b.N; i++ { tokens, _ = LexConfig(src, filename, start) } T = tokens } hcl-2.14.1/hclsyntax/scan_string_lit.go000066400000000000000000000142401431334125700200770ustar00rootroot00000000000000//line scan_string_lit.rl:1 package hclsyntax // This file is generated from scan_string_lit.rl. DO NOT EDIT. //line scan_string_lit.go:9 var _hclstrtok_actions []byte = []byte{ 0, 1, 0, 1, 1, 2, 1, 0, } var _hclstrtok_key_offsets []byte = []byte{ 0, 0, 2, 4, 6, 10, 14, 18, 22, 27, 31, 36, 41, 46, 51, 57, 62, 74, 85, 96, 107, 118, 129, 140, 151, } var _hclstrtok_trans_keys []byte = []byte{ 128, 191, 128, 191, 128, 191, 10, 13, 36, 37, 10, 13, 36, 37, 10, 13, 36, 37, 10, 13, 36, 37, 10, 13, 36, 37, 123, 10, 13, 36, 37, 10, 13, 36, 37, 92, 10, 13, 36, 37, 92, 10, 13, 36, 37, 92, 10, 13, 36, 37, 92, 10, 13, 36, 37, 92, 123, 10, 13, 36, 37, 92, 85, 117, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, 10, 13, 36, 37, 92, 48, 57, 65, 70, 97, 102, } var _hclstrtok_single_lengths []byte = []byte{ 0, 0, 0, 0, 4, 4, 4, 4, 5, 4, 5, 5, 5, 5, 6, 5, 2, 5, 5, 5, 5, 5, 5, 5, 5, } var _hclstrtok_range_lengths []byte = []byte{ 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 3, 3, 3, 3, 3, 3, 3, 3, } var _hclstrtok_index_offsets []byte = []byte{ 0, 0, 2, 4, 6, 11, 16, 21, 26, 32, 37, 43, 49, 55, 61, 68, 74, 82, 91, 100, 109, 118, 127, 136, 145, } var _hclstrtok_indicies []byte = []byte{ 0, 1, 2, 1, 3, 1, 5, 6, 7, 8, 4, 10, 11, 12, 13, 9, 14, 11, 12, 13, 9, 10, 11, 15, 13, 9, 10, 11, 12, 13, 14, 9, 10, 11, 12, 15, 9, 17, 18, 19, 20, 21, 16, 23, 24, 25, 26, 27, 22, 0, 24, 25, 26, 27, 22, 23, 24, 28, 26, 27, 22, 23, 24, 25, 26, 27, 0, 22, 23, 24, 25, 28, 27, 22, 29, 30, 22, 2, 3, 31, 22, 0, 23, 24, 25, 26, 27, 32, 32, 32, 22, 23, 24, 25, 26, 27, 33, 33, 33, 22, 23, 24, 25, 26, 27, 34, 34, 34, 22, 23, 24, 25, 26, 27, 30, 30, 30, 22, 23, 24, 25, 26, 27, 35, 35, 35, 22, 23, 24, 25, 26, 27, 36, 36, 36, 22, 23, 24, 25, 26, 27, 37, 37, 37, 22, 23, 24, 25, 26, 27, 0, 0, 0, 22, } var _hclstrtok_trans_targs []byte = []byte{ 11, 0, 1, 2, 4, 5, 6, 7, 9, 4, 5, 6, 7, 9, 5, 8, 10, 11, 12, 13, 15, 16, 10, 11, 12, 13, 15, 16, 14, 17, 21, 3, 18, 19, 20, 22, 23, 24, } var _hclstrtok_trans_actions []byte = []byte{ 0, 0, 0, 0, 0, 1, 1, 1, 1, 3, 5, 5, 5, 5, 0, 0, 0, 1, 1, 1, 1, 1, 3, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, } var _hclstrtok_eof_actions []byte = []byte{ 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, } const hclstrtok_start int = 4 const hclstrtok_first_final int = 4 const hclstrtok_error int = 0 const hclstrtok_en_quoted int = 10 const hclstrtok_en_unquoted int = 4 //line scan_string_lit.rl:10 func scanStringLit(data []byte, quoted bool) [][]byte { var ret [][]byte //line scan_string_lit.rl:61 // Ragel state p := 0 // "Pointer" into data pe := len(data) // End-of-data "pointer" ts := 0 te := 0 eof := pe var cs int // current state switch { case quoted: cs = hclstrtok_en_quoted default: cs = hclstrtok_en_unquoted } // Make Go compiler happy _ = ts _ = eof /*token := func () { ret = append(ret, data[ts:te]) }*/ //line scan_string_lit.go:154 { } //line scan_string_lit.go:158 { var _klen int var _trans int var _acts int var _nacts uint var _keys int if p == pe { goto _test_eof } if cs == 0 { goto _out } _resume: _keys = int(_hclstrtok_key_offsets[cs]) _trans = int(_hclstrtok_index_offsets[cs]) _klen = int(_hclstrtok_single_lengths[cs]) if _klen > 0 { _lower := int(_keys) var _mid int _upper := int(_keys + _klen - 1) for { if _upper < _lower { break } _mid = _lower + ((_upper - _lower) >> 1) switch { case data[p] < _hclstrtok_trans_keys[_mid]: _upper = _mid - 1 case data[p] > _hclstrtok_trans_keys[_mid]: _lower = _mid + 1 default: _trans += int(_mid - int(_keys)) goto _match } } _keys += _klen _trans += _klen } _klen = int(_hclstrtok_range_lengths[cs]) if _klen > 0 { _lower := int(_keys) var _mid int _upper := int(_keys + (_klen << 1) - 2) for { if _upper < _lower { break } _mid = _lower + (((_upper - _lower) >> 1) & ^1) switch { case data[p] < _hclstrtok_trans_keys[_mid]: _upper = _mid - 2 case data[p] > _hclstrtok_trans_keys[_mid+1]: _lower = _mid + 2 default: _trans += int((_mid - int(_keys)) >> 1) goto _match } } _trans += _klen } _match: _trans = int(_hclstrtok_indicies[_trans]) cs = int(_hclstrtok_trans_targs[_trans]) if _hclstrtok_trans_actions[_trans] == 0 { goto _again } _acts = int(_hclstrtok_trans_actions[_trans]) _nacts = uint(_hclstrtok_actions[_acts]) _acts++ for ; _nacts > 0; _nacts-- { _acts++ switch _hclstrtok_actions[_acts-1] { case 0: //line scan_string_lit.rl:40 // If te is behind p then we've skipped over some literal // characters which we must now return. if te < p { ret = append(ret, data[te:p]) } ts = p case 1: //line scan_string_lit.rl:48 te = p ret = append(ret, data[ts:te]) //line scan_string_lit.go:253 } } _again: if cs == 0 { goto _out } p++ if p != pe { goto _resume } _test_eof: { } if p == eof { __acts := _hclstrtok_eof_actions[cs] __nacts := uint(_hclstrtok_actions[__acts]) __acts++ for ; __nacts > 0; __nacts-- { __acts++ switch _hclstrtok_actions[__acts-1] { case 1: //line scan_string_lit.rl:48 te = p ret = append(ret, data[ts:te]) //line scan_string_lit.go:278 } } } _out: { } } //line scan_string_lit.rl:89 if te < p { // Collect any leftover literal characters at the end of the input ret = append(ret, data[te:p]) } // If we fall out here without being in a final state then we've // encountered something that the scanner can't match, which should // be impossible (the scanner matches all bytes _somehow_) but we'll // tolerate it and let the caller deal with it. if cs < hclstrtok_first_final { ret = append(ret, data[p:len(data)]) } return ret } hcl-2.14.1/hclsyntax/scan_string_lit.rl000066400000000000000000000056261431334125700201170ustar00rootroot00000000000000 package hclsyntax // This file is generated from scan_string_lit.rl. DO NOT EDIT. %%{ # (except you are actually in scan_string_lit.rl here, so edit away!) machine hclstrtok; write data; }%% func scanStringLit(data []byte, quoted bool) [][]byte { var ret [][]byte %%{ include UnicodeDerived "unicode_derived.rl"; UTF8Cont = 0x80 .. 0xBF; AnyUTF8 = ( 0x00..0x7F | 0xC0..0xDF . UTF8Cont | 0xE0..0xEF . UTF8Cont . UTF8Cont | 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont ); BadUTF8 = any - AnyUTF8; Hex = ('0'..'9' | 'a'..'f' | 'A'..'F'); # Our goal with this patterns is to capture user intent as best as # possible, even if the input is invalid. The caller will then verify # whether each token is valid and generate suitable error messages # if not. UnicodeEscapeShort = "\\u" . Hex{0,4}; UnicodeEscapeLong = "\\U" . Hex{0,8}; UnicodeEscape = (UnicodeEscapeShort | UnicodeEscapeLong); SimpleEscape = "\\" . (AnyUTF8 - ('U'|'u'))?; TemplateEscape = ("$" . ("$" . ("{"?))?) | ("%" . ("%" . ("{"?))?); Newline = ("\r\n" | "\r" | "\n"); action Begin { // If te is behind p then we've skipped over some literal // characters which we must now return. if te < p { ret = append(ret, data[te:p]) } ts = p; } action End { te = p; ret = append(ret, data[ts:te]); } QuotedToken = (UnicodeEscape | SimpleEscape | TemplateEscape | Newline) >Begin %End; UnquotedToken = (TemplateEscape | Newline) >Begin %End; QuotedLiteral = (any - ("\\" | "$" | "%" | "\r" | "\n")); UnquotedLiteral = (any - ("$" | "%" | "\r" | "\n")); quoted := (QuotedToken | QuotedLiteral)**; unquoted := (UnquotedToken | UnquotedLiteral)**; }%% // Ragel state p := 0 // "Pointer" into data pe := len(data) // End-of-data "pointer" ts := 0 te := 0 eof := pe var cs int // current state switch { case quoted: cs = hclstrtok_en_quoted default: cs = hclstrtok_en_unquoted } // Make Go compiler happy _ = ts _ = eof /*token := func () { ret = append(ret, data[ts:te]) }*/ %%{ write init nocs; write exec; }%% if te < p { // Collect any leftover literal characters at the end of the input ret = append(ret, data[te:p]) } // If we fall out here without being in a final state then we've // encountered something that the scanner can't match, which should // be impossible (the scanner matches all bytes _somehow_) but we'll // tolerate it and let the caller deal with it. if cs < hclstrtok_first_final { ret = append(ret, data[p:len(data)]) } return ret } hcl-2.14.1/hclsyntax/scan_string_lit_test.go000066400000000000000000000077541431334125700211520ustar00rootroot00000000000000package hclsyntax import ( "reflect" "testing" "github.com/davecgh/go-spew/spew" ) func TestScanStringLit(t *testing.T) { tests := []struct { Input string WantQuoted []string WantUnquoted []string }{ { ``, []string{}, []string{}, }, { `hello`, []string{`hello`}, []string{`hello`}, }, { `hello world`, []string{`hello world`}, []string{`hello world`}, }, { `hello\nworld`, []string{`hello`, `\n`, `world`}, []string{`hello\nworld`}, }, { `hello\🥁world`, []string{`hello`, `\🥁`, `world`}, []string{`hello\🥁world`}, }, { `hello\uabcdworld`, []string{`hello`, `\uabcd`, `world`}, []string{`hello\uabcdworld`}, }, { `hello\uabcdabcdworld`, []string{`hello`, `\uabcd`, `abcdworld`}, []string{`hello\uabcdabcdworld`}, }, { `hello\uabcworld`, []string{`hello`, `\uabc`, `world`}, []string{`hello\uabcworld`}, }, { `hello\U01234567world`, []string{`hello`, `\U01234567`, `world`}, []string{`hello\U01234567world`}, }, { `hello\U012345670123world`, []string{`hello`, `\U01234567`, `0123world`}, []string{`hello\U012345670123world`}, }, { `hello\Uabcdworld`, []string{`hello`, `\Uabcd`, `world`}, []string{`hello\Uabcdworld`}, }, { `hello\Uabcworld`, []string{`hello`, `\Uabc`, `world`}, []string{`hello\Uabcworld`}, }, { `hello\uworld`, []string{`hello`, `\u`, `world`}, []string{`hello\uworld`}, }, { `hello\Uworld`, []string{`hello`, `\U`, `world`}, []string{`hello\Uworld`}, }, { `hello\u`, []string{`hello`, `\u`}, []string{`hello\u`}, }, { `hello\U`, []string{`hello`, `\U`}, []string{`hello\U`}, }, { `hello\`, []string{`hello`, `\`}, []string{`hello\`}, }, { `hello$${world}`, []string{`hello`, `$${`, `world}`}, []string{`hello`, `$${`, `world}`}, }, { `hello$$world`, []string{`hello`, `$$`, `world`}, []string{`hello`, `$$`, `world`}, }, { `hello$world`, []string{`hello`, `$`, `world`}, []string{`hello`, `$`, `world`}, }, { `hello$`, []string{`hello`, `$`}, []string{`hello`, `$`}, }, { `hello$${`, []string{`hello`, `$${`}, []string{`hello`, `$${`}, }, { `hello%%{world}`, []string{`hello`, `%%{`, `world}`}, []string{`hello`, `%%{`, `world}`}, }, { `hello%%world`, []string{`hello`, `%%`, `world`}, []string{`hello`, `%%`, `world`}, }, { `hello%world`, []string{`hello`, `%`, `world`}, []string{`hello`, `%`, `world`}, }, { `hello%`, []string{`hello`, `%`}, []string{`hello`, `%`}, }, { `hello%%{`, []string{`hello`, `%%{`}, []string{`hello`, `%%{`}, }, { `hello\${world}`, []string{`hello`, `\$`, `{world}`}, []string{`hello\`, `$`, `{world}`}, }, { `hello\%{world}`, []string{`hello`, `\%`, `{world}`}, []string{`hello\`, `%`, `{world}`}, }, { "hello\nworld", []string{`hello`, "\n", `world`}, []string{`hello`, "\n", `world`}, }, { "hello\rworld", []string{`hello`, "\r", `world`}, []string{`hello`, "\r", `world`}, }, { "hello\r\nworld", []string{`hello`, "\r\n", `world`}, []string{`hello`, "\r\n", `world`}, }, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { t.Run("quoted", func(t *testing.T) { slices := scanStringLit([]byte(test.Input), true) got := make([]string, len(slices)) for i, slice := range slices { got[i] = string(slice) } if !reflect.DeepEqual(got, test.WantQuoted) { t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(test.WantQuoted)) } }) t.Run("unquoted", func(t *testing.T) { slices := scanStringLit([]byte(test.Input), false) got := make([]string, len(slices)) for i, slice := range slices { got[i] = string(slice) } if !reflect.DeepEqual(got, test.WantUnquoted) { t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(test.WantUnquoted)) } }) }) } } hcl-2.14.1/hclsyntax/scan_tokens.go000066400000000000000000005312231431334125700172310ustar00rootroot00000000000000//line scan_tokens.rl:1 package hclsyntax import ( "bytes" "github.com/hashicorp/hcl/v2" ) // This file is generated from scan_tokens.rl. DO NOT EDIT. //line scan_tokens.go:15 var _hcltok_actions []byte = []byte{ 0, 1, 0, 1, 1, 1, 3, 1, 4, 1, 7, 1, 8, 1, 9, 1, 10, 1, 11, 1, 12, 1, 13, 1, 14, 1, 15, 1, 16, 1, 17, 1, 18, 1, 19, 1, 20, 1, 23, 1, 24, 1, 25, 1, 26, 1, 27, 1, 28, 1, 29, 1, 30, 1, 31, 1, 32, 1, 35, 1, 36, 1, 37, 1, 38, 1, 39, 1, 40, 1, 41, 1, 42, 1, 43, 1, 44, 1, 47, 1, 48, 1, 49, 1, 50, 1, 51, 1, 52, 1, 53, 1, 56, 1, 57, 1, 58, 1, 59, 1, 60, 1, 61, 1, 62, 1, 63, 1, 64, 1, 65, 1, 66, 1, 67, 1, 68, 1, 69, 1, 70, 1, 71, 1, 72, 1, 73, 1, 74, 1, 75, 1, 76, 1, 77, 1, 78, 1, 79, 1, 80, 1, 81, 1, 82, 1, 83, 1, 84, 1, 85, 2, 0, 14, 2, 0, 25, 2, 0, 29, 2, 0, 37, 2, 0, 41, 2, 1, 2, 2, 4, 5, 2, 4, 6, 2, 4, 21, 2, 4, 22, 2, 4, 33, 2, 4, 34, 2, 4, 45, 2, 4, 46, 2, 4, 54, 2, 4, 55, } var _hcltok_key_offsets []int16 = []int16{ 0, 0, 1, 2, 4, 9, 13, 15, 57, 98, 144, 145, 149, 155, 155, 157, 159, 168, 174, 181, 182, 185, 186, 190, 195, 204, 208, 212, 220, 222, 224, 226, 229, 261, 263, 265, 269, 273, 276, 287, 300, 319, 332, 348, 360, 376, 391, 412, 422, 434, 445, 459, 474, 484, 496, 505, 517, 519, 523, 544, 553, 563, 569, 575, 576, 625, 627, 631, 633, 639, 646, 654, 661, 664, 670, 674, 678, 680, 684, 688, 692, 698, 706, 714, 720, 722, 726, 728, 734, 738, 742, 746, 750, 755, 762, 768, 770, 772, 776, 778, 784, 788, 792, 802, 807, 821, 836, 838, 846, 848, 853, 867, 872, 874, 878, 879, 883, 889, 895, 905, 915, 926, 934, 937, 940, 944, 948, 950, 953, 953, 956, 958, 988, 990, 992, 996, 1001, 1005, 1010, 1012, 1014, 1016, 1025, 1029, 1033, 1039, 1041, 1049, 1057, 1069, 1072, 1078, 1082, 1084, 1088, 1108, 1110, 1112, 1123, 1129, 1131, 1133, 1135, 1139, 1145, 1151, 1153, 1158, 1162, 1164, 1172, 1190, 1230, 1240, 1244, 1246, 1248, 1249, 1253, 1257, 1261, 1265, 1269, 1274, 1278, 1282, 1286, 1288, 1290, 1294, 1304, 1308, 1310, 1314, 1318, 1322, 1335, 1337, 1339, 1343, 1345, 1349, 1351, 1353, 1383, 1387, 1391, 1395, 1398, 1405, 1410, 1421, 1425, 1441, 1455, 1459, 1464, 1468, 1472, 1478, 1480, 1486, 1488, 1492, 1494, 1500, 1505, 1510, 1520, 1522, 1524, 1528, 1532, 1534, 1547, 1549, 1553, 1557, 1565, 1567, 1571, 1573, 1574, 1577, 1582, 1584, 1586, 1590, 1592, 1596, 1602, 1622, 1628, 1634, 1636, 1637, 1647, 1648, 1656, 1663, 1665, 1668, 1670, 1672, 1674, 1679, 1683, 1687, 1692, 1702, 1712, 1716, 1720, 1734, 1760, 1770, 1772, 1774, 1777, 1779, 1782, 1784, 1788, 1790, 1791, 1795, 1797, 1800, 1807, 1815, 1817, 1819, 1823, 1825, 1831, 1842, 1845, 1847, 1851, 1856, 1886, 1891, 1893, 1896, 1901, 1915, 1922, 1936, 1941, 1954, 1958, 1971, 1976, 1994, 1995, 2004, 2008, 2020, 2025, 2032, 2039, 2046, 2048, 2052, 2074, 2079, 2080, 2084, 2086, 2136, 2139, 2150, 2154, 2156, 2162, 2168, 2170, 2175, 2177, 2181, 2183, 2184, 2186, 2188, 2194, 2196, 2198, 2202, 2208, 2221, 2223, 2229, 2233, 2241, 2252, 2260, 2263, 2293, 2299, 2302, 2307, 2309, 2313, 2317, 2321, 2323, 2330, 2332, 2341, 2348, 2356, 2358, 2378, 2390, 2394, 2396, 2414, 2453, 2455, 2459, 2461, 2468, 2472, 2500, 2502, 2504, 2506, 2508, 2511, 2513, 2517, 2521, 2523, 2526, 2528, 2530, 2533, 2535, 2537, 2538, 2540, 2542, 2546, 2550, 2553, 2566, 2568, 2574, 2578, 2580, 2584, 2588, 2602, 2605, 2614, 2616, 2620, 2626, 2626, 2628, 2630, 2639, 2645, 2652, 2653, 2656, 2657, 2661, 2666, 2675, 2679, 2683, 2691, 2693, 2695, 2697, 2700, 2732, 2734, 2736, 2740, 2744, 2747, 2758, 2771, 2790, 2803, 2819, 2831, 2847, 2862, 2883, 2893, 2905, 2916, 2930, 2945, 2955, 2967, 2976, 2988, 2990, 2994, 3015, 3024, 3034, 3040, 3046, 3047, 3096, 3098, 3102, 3104, 3110, 3117, 3125, 3132, 3135, 3141, 3145, 3149, 3151, 3155, 3159, 3163, 3169, 3177, 3185, 3191, 3193, 3197, 3199, 3205, 3209, 3213, 3217, 3221, 3226, 3233, 3239, 3241, 3243, 3247, 3249, 3255, 3259, 3263, 3273, 3278, 3292, 3307, 3309, 3317, 3319, 3324, 3338, 3343, 3345, 3349, 3350, 3354, 3360, 3366, 3376, 3386, 3397, 3405, 3408, 3411, 3415, 3419, 3421, 3424, 3424, 3427, 3429, 3459, 3461, 3463, 3467, 3472, 3476, 3481, 3483, 3485, 3487, 3496, 3500, 3504, 3510, 3512, 3520, 3528, 3540, 3543, 3549, 3553, 3555, 3559, 3579, 3581, 3583, 3594, 3600, 3602, 3604, 3606, 3610, 3616, 3622, 3624, 3629, 3633, 3635, 3643, 3661, 3701, 3711, 3715, 3717, 3719, 3720, 3724, 3728, 3732, 3736, 3740, 3745, 3749, 3753, 3757, 3759, 3761, 3765, 3775, 3779, 3781, 3785, 3789, 3793, 3806, 3808, 3810, 3814, 3816, 3820, 3822, 3824, 3854, 3858, 3862, 3866, 3869, 3876, 3881, 3892, 3896, 3912, 3926, 3930, 3935, 3939, 3943, 3949, 3951, 3957, 3959, 3963, 3965, 3971, 3976, 3981, 3991, 3993, 3995, 3999, 4003, 4005, 4018, 4020, 4024, 4028, 4036, 4038, 4042, 4044, 4045, 4048, 4053, 4055, 4057, 4061, 4063, 4067, 4073, 4093, 4099, 4105, 4107, 4108, 4118, 4119, 4127, 4134, 4136, 4139, 4141, 4143, 4145, 4150, 4154, 4158, 4163, 4173, 4183, 4187, 4191, 4205, 4231, 4241, 4243, 4245, 4248, 4250, 4253, 4255, 4259, 4261, 4262, 4266, 4268, 4270, 4277, 4281, 4288, 4295, 4304, 4320, 4332, 4350, 4361, 4373, 4381, 4399, 4407, 4437, 4440, 4450, 4460, 4472, 4483, 4492, 4505, 4517, 4521, 4527, 4554, 4563, 4566, 4571, 4577, 4582, 4603, 4607, 4613, 4613, 4620, 4629, 4637, 4640, 4644, 4650, 4656, 4659, 4663, 4670, 4676, 4685, 4694, 4698, 4702, 4706, 4710, 4717, 4721, 4725, 4735, 4741, 4745, 4751, 4755, 4758, 4764, 4770, 4782, 4786, 4790, 4800, 4804, 4815, 4817, 4819, 4823, 4835, 4840, 4864, 4868, 4874, 4896, 4905, 4909, 4912, 4913, 4921, 4929, 4935, 4945, 4952, 4970, 4973, 4976, 4984, 4990, 4994, 4998, 5002, 5008, 5016, 5021, 5027, 5031, 5039, 5046, 5050, 5057, 5063, 5071, 5079, 5085, 5091, 5102, 5106, 5118, 5127, 5144, 5161, 5164, 5168, 5170, 5176, 5178, 5182, 5197, 5201, 5205, 5209, 5213, 5217, 5219, 5225, 5230, 5234, 5240, 5247, 5250, 5268, 5270, 5315, 5321, 5327, 5331, 5335, 5341, 5345, 5351, 5357, 5364, 5366, 5372, 5378, 5382, 5386, 5394, 5407, 5413, 5420, 5428, 5434, 5443, 5449, 5453, 5458, 5462, 5470, 5474, 5478, 5508, 5514, 5520, 5526, 5532, 5539, 5545, 5552, 5557, 5567, 5571, 5578, 5584, 5588, 5595, 5599, 5605, 5608, 5612, 5616, 5620, 5624, 5629, 5634, 5638, 5649, 5653, 5657, 5663, 5671, 5675, 5692, 5696, 5702, 5712, 5718, 5724, 5727, 5732, 5741, 5745, 5749, 5755, 5759, 5765, 5773, 5791, 5792, 5802, 5803, 5812, 5820, 5822, 5825, 5827, 5829, 5831, 5836, 5849, 5853, 5868, 5897, 5908, 5910, 5914, 5918, 5923, 5927, 5929, 5936, 5940, 5948, 5952, 5964, 5966, 5968, 5970, 5972, 5974, 5975, 5977, 5979, 5981, 5983, 5985, 5986, 5988, 5990, 5992, 5994, 5996, 6000, 6006, 6006, 6008, 6010, 6019, 6025, 6032, 6033, 6036, 6037, 6041, 6046, 6055, 6059, 6063, 6071, 6073, 6075, 6077, 6080, 6112, 6114, 6116, 6120, 6124, 6127, 6138, 6151, 6170, 6183, 6199, 6211, 6227, 6242, 6263, 6273, 6285, 6296, 6310, 6325, 6335, 6347, 6356, 6368, 6370, 6374, 6395, 6404, 6414, 6420, 6426, 6427, 6476, 6478, 6482, 6484, 6490, 6497, 6505, 6512, 6515, 6521, 6525, 6529, 6531, 6535, 6539, 6543, 6549, 6557, 6565, 6571, 6573, 6577, 6579, 6585, 6589, 6593, 6597, 6601, 6606, 6613, 6619, 6621, 6623, 6627, 6629, 6635, 6639, 6643, 6653, 6658, 6672, 6687, 6689, 6697, 6699, 6704, 6718, 6723, 6725, 6729, 6730, 6734, 6740, 6746, 6756, 6766, 6777, 6785, 6788, 6791, 6795, 6799, 6801, 6804, 6804, 6807, 6809, 6839, 6841, 6843, 6847, 6852, 6856, 6861, 6863, 6865, 6867, 6876, 6880, 6884, 6890, 6892, 6900, 6908, 6920, 6923, 6929, 6933, 6935, 6939, 6959, 6961, 6963, 6974, 6980, 6982, 6984, 6986, 6990, 6996, 7002, 7004, 7009, 7013, 7015, 7023, 7041, 7081, 7091, 7095, 7097, 7099, 7100, 7104, 7108, 7112, 7116, 7120, 7125, 7129, 7133, 7137, 7139, 7141, 7145, 7155, 7159, 7161, 7165, 7169, 7173, 7186, 7188, 7190, 7194, 7196, 7200, 7202, 7204, 7234, 7238, 7242, 7246, 7249, 7256, 7261, 7272, 7276, 7292, 7306, 7310, 7315, 7319, 7323, 7329, 7331, 7337, 7339, 7343, 7345, 7351, 7356, 7361, 7371, 7373, 7375, 7379, 7383, 7385, 7398, 7400, 7404, 7408, 7416, 7418, 7422, 7424, 7425, 7428, 7433, 7435, 7437, 7441, 7443, 7447, 7453, 7473, 7479, 7485, 7487, 7488, 7498, 7499, 7507, 7514, 7516, 7519, 7521, 7523, 7525, 7530, 7534, 7538, 7543, 7553, 7563, 7567, 7571, 7585, 7611, 7621, 7623, 7625, 7628, 7630, 7633, 7635, 7639, 7641, 7642, 7646, 7648, 7650, 7657, 7661, 7668, 7675, 7684, 7700, 7712, 7730, 7741, 7753, 7761, 7779, 7787, 7817, 7820, 7830, 7840, 7852, 7863, 7872, 7885, 7897, 7901, 7907, 7934, 7943, 7946, 7951, 7957, 7962, 7983, 7987, 7993, 7993, 8000, 8009, 8017, 8020, 8024, 8030, 8036, 8039, 8043, 8050, 8056, 8065, 8074, 8078, 8082, 8086, 8090, 8097, 8101, 8105, 8115, 8121, 8125, 8131, 8135, 8138, 8144, 8150, 8162, 8166, 8170, 8180, 8184, 8195, 8197, 8199, 8203, 8215, 8220, 8244, 8248, 8254, 8276, 8285, 8289, 8292, 8293, 8301, 8309, 8315, 8325, 8332, 8350, 8353, 8356, 8364, 8370, 8374, 8378, 8382, 8388, 8396, 8401, 8407, 8411, 8419, 8426, 8430, 8437, 8443, 8451, 8459, 8465, 8471, 8482, 8486, 8498, 8507, 8524, 8541, 8544, 8548, 8550, 8556, 8558, 8562, 8577, 8581, 8585, 8589, 8593, 8597, 8599, 8605, 8610, 8614, 8620, 8627, 8630, 8648, 8650, 8695, 8701, 8707, 8711, 8715, 8721, 8725, 8731, 8737, 8744, 8746, 8752, 8758, 8762, 8766, 8774, 8787, 8793, 8800, 8808, 8814, 8823, 8829, 8833, 8838, 8842, 8850, 8854, 8858, 8888, 8894, 8900, 8906, 8912, 8919, 8925, 8932, 8937, 8947, 8951, 8958, 8964, 8968, 8975, 8979, 8985, 8988, 8992, 8996, 9000, 9004, 9009, 9014, 9018, 9029, 9033, 9037, 9043, 9051, 9055, 9072, 9076, 9082, 9092, 9098, 9104, 9107, 9112, 9121, 9125, 9129, 9135, 9139, 9145, 9153, 9171, 9172, 9182, 9183, 9192, 9200, 9202, 9205, 9207, 9209, 9211, 9216, 9229, 9233, 9248, 9277, 9288, 9290, 9294, 9298, 9303, 9307, 9309, 9316, 9320, 9328, 9332, 9407, 9409, 9410, 9411, 9412, 9413, 9414, 9416, 9421, 9423, 9425, 9426, 9470, 9471, 9472, 9474, 9479, 9483, 9483, 9485, 9487, 9498, 9508, 9516, 9517, 9519, 9520, 9524, 9528, 9538, 9542, 9549, 9560, 9567, 9571, 9577, 9588, 9620, 9669, 9684, 9699, 9704, 9706, 9711, 9743, 9751, 9753, 9775, 9797, 9799, 9815, 9831, 9833, 9835, 9835, 9836, 9837, 9838, 9840, 9841, 9853, 9855, 9857, 9859, 9873, 9887, 9889, 9892, 9895, 9897, 9898, 9899, 9901, 9903, 9905, 9919, 9933, 9935, 9938, 9941, 9943, 9944, 9945, 9947, 9949, 9951, 10000, 10044, 10046, 10051, 10055, 10055, 10057, 10059, 10070, 10080, 10088, 10089, 10091, 10092, 10096, 10100, 10110, 10114, 10121, 10132, 10139, 10143, 10149, 10160, 10192, 10241, 10256, 10271, 10276, 10278, 10283, 10315, 10323, 10325, 10347, 10369, } var _hcltok_trans_keys []byte = []byte{ 46, 42, 42, 47, 46, 69, 101, 48, 57, 43, 45, 48, 57, 48, 57, 45, 95, 194, 195, 198, 199, 203, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 239, 240, 65, 90, 97, 122, 196, 202, 208, 218, 229, 236, 95, 194, 195, 198, 199, 203, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 239, 240, 65, 90, 97, 122, 196, 202, 208, 218, 229, 236, 10, 13, 45, 95, 194, 195, 198, 199, 203, 204, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 239, 240, 243, 48, 57, 65, 90, 97, 122, 196, 218, 229, 236, 10, 170, 181, 183, 186, 128, 150, 152, 182, 184, 255, 192, 255, 128, 255, 173, 130, 133, 146, 159, 165, 171, 175, 255, 181, 190, 184, 185, 192, 255, 140, 134, 138, 142, 161, 163, 255, 182, 130, 136, 137, 176, 151, 152, 154, 160, 190, 136, 144, 192, 255, 135, 129, 130, 132, 133, 144, 170, 176, 178, 144, 154, 160, 191, 128, 169, 174, 255, 148, 169, 157, 158, 189, 190, 192, 255, 144, 255, 139, 140, 178, 255, 186, 128, 181, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 128, 173, 128, 155, 160, 180, 182, 189, 148, 161, 163, 255, 176, 164, 165, 132, 169, 177, 141, 142, 145, 146, 179, 181, 186, 187, 158, 133, 134, 137, 138, 143, 150, 152, 155, 164, 165, 178, 255, 188, 129, 131, 133, 138, 143, 144, 147, 168, 170, 176, 178, 179, 181, 182, 184, 185, 190, 255, 157, 131, 134, 137, 138, 142, 144, 146, 152, 159, 165, 182, 255, 129, 131, 133, 141, 143, 145, 147, 168, 170, 176, 178, 179, 181, 185, 188, 255, 134, 138, 142, 143, 145, 159, 164, 165, 176, 184, 186, 255, 129, 131, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 188, 191, 177, 128, 132, 135, 136, 139, 141, 150, 151, 156, 157, 159, 163, 166, 175, 156, 130, 131, 133, 138, 142, 144, 146, 149, 153, 154, 158, 159, 163, 164, 168, 170, 174, 185, 190, 191, 144, 151, 128, 130, 134, 136, 138, 141, 166, 175, 128, 131, 133, 140, 142, 144, 146, 168, 170, 185, 189, 255, 133, 137, 151, 142, 148, 155, 159, 164, 165, 176, 255, 128, 131, 133, 140, 142, 144, 146, 168, 170, 179, 181, 185, 188, 191, 158, 128, 132, 134, 136, 138, 141, 149, 150, 160, 163, 166, 175, 177, 178, 129, 131, 133, 140, 142, 144, 146, 186, 189, 255, 133, 137, 143, 147, 152, 158, 164, 165, 176, 185, 192, 255, 189, 130, 131, 133, 150, 154, 177, 179, 187, 138, 150, 128, 134, 143, 148, 152, 159, 166, 175, 178, 179, 129, 186, 128, 142, 144, 153, 132, 138, 141, 165, 167, 129, 130, 135, 136, 148, 151, 153, 159, 161, 163, 170, 171, 173, 185, 187, 189, 134, 128, 132, 136, 141, 144, 153, 156, 159, 128, 181, 183, 185, 152, 153, 160, 169, 190, 191, 128, 135, 137, 172, 177, 191, 128, 132, 134, 151, 153, 188, 134, 128, 129, 130, 131, 137, 138, 139, 140, 141, 142, 143, 144, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 173, 175, 176, 177, 178, 179, 181, 182, 183, 188, 189, 190, 191, 132, 152, 172, 184, 185, 187, 128, 191, 128, 137, 144, 255, 158, 159, 134, 187, 136, 140, 142, 143, 137, 151, 153, 142, 143, 158, 159, 137, 177, 142, 143, 182, 183, 191, 255, 128, 130, 133, 136, 150, 152, 255, 145, 150, 151, 155, 156, 160, 168, 178, 255, 128, 143, 160, 255, 182, 183, 190, 255, 129, 255, 173, 174, 192, 255, 129, 154, 160, 255, 171, 173, 185, 255, 128, 140, 142, 148, 160, 180, 128, 147, 160, 172, 174, 176, 178, 179, 148, 150, 152, 155, 158, 159, 170, 255, 139, 141, 144, 153, 160, 255, 184, 255, 128, 170, 176, 255, 182, 255, 128, 158, 160, 171, 176, 187, 134, 173, 176, 180, 128, 171, 176, 255, 138, 143, 155, 255, 128, 155, 160, 255, 159, 189, 190, 192, 255, 167, 128, 137, 144, 153, 176, 189, 140, 143, 154, 170, 180, 255, 180, 255, 128, 183, 128, 137, 141, 189, 128, 136, 144, 146, 148, 182, 184, 185, 128, 181, 187, 191, 150, 151, 158, 159, 152, 154, 156, 158, 134, 135, 142, 143, 190, 255, 190, 128, 180, 182, 188, 130, 132, 134, 140, 144, 147, 150, 155, 160, 172, 178, 180, 182, 188, 128, 129, 130, 131, 132, 133, 134, 176, 177, 178, 179, 180, 181, 182, 183, 191, 255, 129, 147, 149, 176, 178, 190, 192, 255, 144, 156, 161, 144, 156, 165, 176, 130, 135, 149, 164, 166, 168, 138, 147, 152, 157, 170, 185, 188, 191, 142, 133, 137, 160, 255, 137, 255, 128, 174, 176, 255, 159, 165, 170, 180, 255, 167, 173, 128, 165, 176, 255, 168, 174, 176, 190, 192, 255, 128, 150, 160, 166, 168, 174, 176, 182, 184, 190, 128, 134, 136, 142, 144, 150, 152, 158, 160, 191, 128, 129, 130, 131, 132, 133, 134, 135, 144, 145, 255, 133, 135, 161, 175, 177, 181, 184, 188, 160, 151, 152, 187, 192, 255, 133, 173, 177, 255, 143, 159, 187, 255, 176, 191, 182, 183, 184, 191, 192, 255, 150, 255, 128, 146, 147, 148, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 129, 255, 141, 255, 144, 189, 141, 143, 172, 255, 191, 128, 175, 180, 189, 151, 159, 162, 255, 175, 137, 138, 184, 255, 183, 255, 168, 255, 128, 179, 188, 134, 143, 154, 159, 184, 186, 190, 255, 128, 173, 176, 255, 148, 159, 189, 255, 129, 142, 154, 159, 191, 255, 128, 182, 128, 141, 144, 153, 160, 182, 186, 255, 128, 130, 155, 157, 160, 175, 178, 182, 129, 134, 137, 142, 145, 150, 160, 166, 168, 174, 176, 255, 155, 166, 175, 128, 170, 172, 173, 176, 185, 158, 159, 160, 255, 164, 175, 135, 138, 188, 255, 164, 169, 171, 172, 173, 174, 175, 180, 181, 182, 183, 184, 185, 187, 188, 189, 190, 191, 165, 186, 174, 175, 154, 255, 190, 128, 134, 147, 151, 157, 168, 170, 182, 184, 188, 128, 129, 131, 132, 134, 255, 147, 255, 190, 255, 144, 145, 136, 175, 188, 255, 128, 143, 160, 175, 179, 180, 141, 143, 176, 180, 182, 255, 189, 255, 191, 144, 153, 161, 186, 129, 154, 166, 255, 191, 255, 130, 135, 138, 143, 146, 151, 154, 156, 144, 145, 146, 147, 148, 150, 151, 152, 155, 157, 158, 160, 170, 171, 172, 175, 161, 169, 128, 129, 130, 131, 133, 135, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 152, 156, 157, 160, 161, 162, 163, 164, 166, 168, 169, 170, 171, 172, 173, 174, 176, 177, 153, 155, 178, 179, 128, 139, 141, 166, 168, 186, 188, 189, 191, 255, 142, 143, 158, 255, 187, 255, 128, 180, 189, 128, 156, 160, 255, 145, 159, 161, 255, 128, 159, 176, 255, 139, 143, 187, 255, 128, 157, 160, 255, 144, 132, 135, 150, 255, 158, 159, 170, 175, 148, 151, 188, 255, 128, 167, 176, 255, 164, 255, 183, 255, 128, 149, 160, 167, 136, 188, 128, 133, 138, 181, 183, 184, 191, 255, 150, 159, 183, 255, 128, 158, 160, 178, 180, 181, 128, 149, 160, 185, 128, 183, 190, 191, 191, 128, 131, 133, 134, 140, 147, 149, 151, 153, 179, 184, 186, 160, 188, 128, 156, 128, 135, 137, 166, 128, 181, 128, 149, 160, 178, 128, 145, 128, 178, 129, 130, 131, 132, 133, 135, 136, 138, 139, 140, 141, 144, 145, 146, 147, 150, 151, 152, 153, 154, 155, 156, 162, 163, 171, 176, 177, 178, 128, 134, 135, 165, 176, 190, 144, 168, 176, 185, 128, 180, 182, 191, 182, 144, 179, 155, 133, 137, 141, 143, 157, 255, 190, 128, 145, 147, 183, 136, 128, 134, 138, 141, 143, 157, 159, 168, 176, 255, 171, 175, 186, 255, 128, 131, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 188, 191, 144, 151, 128, 132, 135, 136, 139, 141, 157, 163, 166, 172, 176, 180, 128, 138, 144, 153, 134, 136, 143, 154, 255, 128, 181, 184, 255, 129, 151, 158, 255, 129, 131, 133, 143, 154, 255, 128, 137, 128, 153, 157, 171, 176, 185, 160, 255, 170, 190, 192, 255, 128, 184, 128, 136, 138, 182, 184, 191, 128, 144, 153, 178, 255, 168, 144, 145, 183, 255, 128, 142, 145, 149, 129, 141, 144, 146, 147, 148, 175, 255, 132, 255, 128, 144, 129, 143, 144, 153, 145, 152, 135, 255, 160, 168, 169, 171, 172, 173, 174, 188, 189, 190, 191, 161, 167, 185, 255, 128, 158, 160, 169, 144, 173, 176, 180, 128, 131, 144, 153, 163, 183, 189, 255, 144, 255, 133, 143, 191, 255, 143, 159, 160, 128, 129, 255, 159, 160, 171, 172, 255, 173, 255, 179, 255, 128, 176, 177, 178, 128, 129, 171, 175, 189, 255, 128, 136, 144, 153, 157, 158, 133, 134, 137, 144, 145, 146, 147, 148, 149, 154, 155, 156, 157, 158, 159, 168, 169, 170, 150, 153, 165, 169, 173, 178, 187, 255, 131, 132, 140, 169, 174, 255, 130, 132, 149, 157, 173, 186, 188, 160, 161, 163, 164, 167, 168, 132, 134, 149, 157, 186, 139, 140, 191, 255, 134, 128, 132, 138, 144, 146, 255, 166, 167, 129, 155, 187, 149, 181, 143, 175, 137, 169, 131, 140, 141, 192, 255, 128, 182, 187, 255, 173, 180, 182, 255, 132, 155, 159, 161, 175, 128, 160, 163, 164, 165, 184, 185, 186, 161, 162, 128, 134, 136, 152, 155, 161, 163, 164, 166, 170, 133, 143, 151, 255, 139, 143, 154, 255, 164, 167, 185, 187, 128, 131, 133, 159, 161, 162, 169, 178, 180, 183, 130, 135, 137, 139, 148, 151, 153, 155, 157, 159, 164, 190, 141, 143, 145, 146, 161, 162, 167, 170, 172, 178, 180, 183, 185, 188, 128, 137, 139, 155, 161, 163, 165, 169, 171, 187, 155, 156, 151, 255, 156, 157, 160, 181, 255, 186, 187, 255, 162, 255, 160, 168, 161, 167, 158, 255, 160, 132, 135, 133, 134, 176, 255, 170, 181, 186, 191, 176, 180, 182, 183, 186, 189, 134, 140, 136, 138, 142, 161, 163, 255, 130, 137, 136, 255, 144, 170, 176, 178, 160, 191, 128, 138, 174, 175, 177, 255, 148, 150, 164, 167, 173, 176, 185, 189, 190, 192, 255, 144, 146, 175, 141, 255, 166, 176, 178, 255, 186, 138, 170, 180, 181, 160, 161, 162, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 186, 187, 188, 189, 190, 183, 185, 154, 164, 168, 128, 149, 128, 152, 189, 132, 185, 144, 152, 161, 177, 255, 169, 177, 129, 132, 141, 142, 145, 146, 179, 181, 186, 188, 190, 255, 142, 156, 157, 159, 161, 176, 177, 133, 138, 143, 144, 147, 168, 170, 176, 178, 179, 181, 182, 184, 185, 158, 153, 156, 178, 180, 189, 133, 141, 143, 145, 147, 168, 170, 176, 178, 179, 181, 185, 144, 185, 160, 161, 189, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 177, 156, 157, 159, 161, 131, 156, 133, 138, 142, 144, 146, 149, 153, 154, 158, 159, 163, 164, 168, 170, 174, 185, 144, 189, 133, 140, 142, 144, 146, 168, 170, 185, 152, 154, 160, 161, 128, 189, 133, 140, 142, 144, 146, 168, 170, 179, 181, 185, 158, 160, 161, 177, 178, 189, 133, 140, 142, 144, 146, 186, 142, 148, 150, 159, 161, 186, 191, 189, 133, 150, 154, 177, 179, 187, 128, 134, 129, 176, 178, 179, 132, 138, 141, 165, 167, 189, 129, 130, 135, 136, 148, 151, 153, 159, 161, 163, 170, 171, 173, 176, 178, 179, 134, 128, 132, 156, 159, 128, 128, 135, 137, 172, 136, 140, 128, 129, 130, 131, 137, 138, 139, 140, 141, 142, 143, 144, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 188, 189, 190, 191, 132, 152, 185, 187, 191, 128, 170, 161, 144, 149, 154, 157, 165, 166, 174, 176, 181, 255, 130, 141, 143, 159, 155, 255, 128, 140, 142, 145, 160, 177, 128, 145, 160, 172, 174, 176, 151, 156, 170, 128, 168, 176, 255, 138, 255, 128, 150, 160, 255, 149, 255, 167, 133, 179, 133, 139, 131, 160, 174, 175, 186, 255, 166, 255, 128, 163, 141, 143, 154, 189, 169, 172, 174, 177, 181, 182, 129, 130, 132, 133, 134, 176, 177, 178, 179, 180, 181, 182, 183, 177, 191, 165, 170, 175, 177, 180, 255, 168, 174, 176, 255, 128, 134, 136, 142, 144, 150, 152, 158, 128, 129, 130, 131, 132, 133, 134, 135, 144, 145, 255, 133, 135, 161, 169, 177, 181, 184, 188, 160, 151, 154, 128, 146, 147, 148, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 129, 255, 141, 143, 160, 169, 172, 255, 191, 128, 174, 130, 134, 139, 163, 255, 130, 179, 187, 189, 178, 183, 138, 165, 176, 255, 135, 159, 189, 255, 132, 178, 143, 160, 164, 166, 175, 186, 190, 128, 168, 186, 128, 130, 132, 139, 160, 182, 190, 255, 176, 178, 180, 183, 184, 190, 255, 128, 130, 155, 157, 160, 170, 178, 180, 128, 162, 164, 169, 171, 172, 173, 174, 175, 180, 181, 182, 183, 185, 186, 187, 188, 189, 190, 191, 165, 179, 157, 190, 128, 134, 147, 151, 159, 168, 170, 182, 184, 188, 176, 180, 182, 255, 161, 186, 144, 145, 146, 147, 148, 150, 151, 152, 155, 157, 158, 160, 170, 171, 172, 175, 161, 169, 128, 129, 130, 131, 133, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 152, 156, 157, 160, 161, 162, 163, 164, 166, 168, 169, 170, 171, 172, 173, 174, 176, 177, 153, 155, 178, 179, 145, 255, 139, 143, 182, 255, 158, 175, 128, 144, 147, 149, 151, 153, 179, 128, 135, 137, 164, 128, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 144, 145, 146, 147, 150, 151, 152, 153, 154, 156, 162, 163, 171, 176, 177, 178, 131, 183, 131, 175, 144, 168, 131, 166, 182, 144, 178, 131, 178, 154, 156, 129, 132, 128, 145, 147, 171, 159, 255, 144, 157, 161, 135, 138, 128, 175, 135, 132, 133, 128, 174, 152, 155, 132, 128, 170, 128, 153, 160, 190, 192, 255, 128, 136, 138, 174, 128, 178, 255, 160, 168, 169, 171, 172, 173, 174, 188, 189, 190, 191, 161, 167, 144, 173, 128, 131, 163, 183, 189, 255, 133, 143, 145, 255, 147, 159, 128, 176, 177, 178, 128, 136, 144, 153, 144, 145, 146, 147, 148, 149, 154, 155, 156, 157, 158, 159, 150, 153, 131, 140, 255, 160, 163, 164, 165, 184, 185, 186, 161, 162, 133, 255, 170, 181, 183, 186, 128, 150, 152, 182, 184, 255, 192, 255, 128, 255, 173, 130, 133, 146, 159, 165, 171, 175, 255, 181, 190, 184, 185, 192, 255, 140, 134, 138, 142, 161, 163, 255, 182, 130, 136, 137, 176, 151, 152, 154, 160, 190, 136, 144, 192, 255, 135, 129, 130, 132, 133, 144, 170, 176, 178, 144, 154, 160, 191, 128, 169, 174, 255, 148, 169, 157, 158, 189, 190, 192, 255, 144, 255, 139, 140, 178, 255, 186, 128, 181, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 128, 173, 128, 155, 160, 180, 182, 189, 148, 161, 163, 255, 176, 164, 165, 132, 169, 177, 141, 142, 145, 146, 179, 181, 186, 187, 158, 133, 134, 137, 138, 143, 150, 152, 155, 164, 165, 178, 255, 188, 129, 131, 133, 138, 143, 144, 147, 168, 170, 176, 178, 179, 181, 182, 184, 185, 190, 255, 157, 131, 134, 137, 138, 142, 144, 146, 152, 159, 165, 182, 255, 129, 131, 133, 141, 143, 145, 147, 168, 170, 176, 178, 179, 181, 185, 188, 255, 134, 138, 142, 143, 145, 159, 164, 165, 176, 184, 186, 255, 129, 131, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 188, 191, 177, 128, 132, 135, 136, 139, 141, 150, 151, 156, 157, 159, 163, 166, 175, 156, 130, 131, 133, 138, 142, 144, 146, 149, 153, 154, 158, 159, 163, 164, 168, 170, 174, 185, 190, 191, 144, 151, 128, 130, 134, 136, 138, 141, 166, 175, 128, 131, 133, 140, 142, 144, 146, 168, 170, 185, 189, 255, 133, 137, 151, 142, 148, 155, 159, 164, 165, 176, 255, 128, 131, 133, 140, 142, 144, 146, 168, 170, 179, 181, 185, 188, 191, 158, 128, 132, 134, 136, 138, 141, 149, 150, 160, 163, 166, 175, 177, 178, 129, 131, 133, 140, 142, 144, 146, 186, 189, 255, 133, 137, 143, 147, 152, 158, 164, 165, 176, 185, 192, 255, 189, 130, 131, 133, 150, 154, 177, 179, 187, 138, 150, 128, 134, 143, 148, 152, 159, 166, 175, 178, 179, 129, 186, 128, 142, 144, 153, 132, 138, 141, 165, 167, 129, 130, 135, 136, 148, 151, 153, 159, 161, 163, 170, 171, 173, 185, 187, 189, 134, 128, 132, 136, 141, 144, 153, 156, 159, 128, 181, 183, 185, 152, 153, 160, 169, 190, 191, 128, 135, 137, 172, 177, 191, 128, 132, 134, 151, 153, 188, 134, 128, 129, 130, 131, 137, 138, 139, 140, 141, 142, 143, 144, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 173, 175, 176, 177, 178, 179, 181, 182, 183, 188, 189, 190, 191, 132, 152, 172, 184, 185, 187, 128, 191, 128, 137, 144, 255, 158, 159, 134, 187, 136, 140, 142, 143, 137, 151, 153, 142, 143, 158, 159, 137, 177, 142, 143, 182, 183, 191, 255, 128, 130, 133, 136, 150, 152, 255, 145, 150, 151, 155, 156, 160, 168, 178, 255, 128, 143, 160, 255, 182, 183, 190, 255, 129, 255, 173, 174, 192, 255, 129, 154, 160, 255, 171, 173, 185, 255, 128, 140, 142, 148, 160, 180, 128, 147, 160, 172, 174, 176, 178, 179, 148, 150, 152, 155, 158, 159, 170, 255, 139, 141, 144, 153, 160, 255, 184, 255, 128, 170, 176, 255, 182, 255, 128, 158, 160, 171, 176, 187, 134, 173, 176, 180, 128, 171, 176, 255, 138, 143, 155, 255, 128, 155, 160, 255, 159, 189, 190, 192, 255, 167, 128, 137, 144, 153, 176, 189, 140, 143, 154, 170, 180, 255, 180, 255, 128, 183, 128, 137, 141, 189, 128, 136, 144, 146, 148, 182, 184, 185, 128, 181, 187, 191, 150, 151, 158, 159, 152, 154, 156, 158, 134, 135, 142, 143, 190, 255, 190, 128, 180, 182, 188, 130, 132, 134, 140, 144, 147, 150, 155, 160, 172, 178, 180, 182, 188, 128, 129, 130, 131, 132, 133, 134, 176, 177, 178, 179, 180, 181, 182, 183, 191, 255, 129, 147, 149, 176, 178, 190, 192, 255, 144, 156, 161, 144, 156, 165, 176, 130, 135, 149, 164, 166, 168, 138, 147, 152, 157, 170, 185, 188, 191, 142, 133, 137, 160, 255, 137, 255, 128, 174, 176, 255, 159, 165, 170, 180, 255, 167, 173, 128, 165, 176, 255, 168, 174, 176, 190, 192, 255, 128, 150, 160, 166, 168, 174, 176, 182, 184, 190, 128, 134, 136, 142, 144, 150, 152, 158, 160, 191, 128, 129, 130, 131, 132, 133, 134, 135, 144, 145, 255, 133, 135, 161, 175, 177, 181, 184, 188, 160, 151, 152, 187, 192, 255, 133, 173, 177, 255, 143, 159, 187, 255, 176, 191, 182, 183, 184, 191, 192, 255, 150, 255, 128, 146, 147, 148, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 129, 255, 141, 255, 144, 189, 141, 143, 172, 255, 191, 128, 175, 180, 189, 151, 159, 162, 255, 175, 137, 138, 184, 255, 183, 255, 168, 255, 128, 179, 188, 134, 143, 154, 159, 184, 186, 190, 255, 128, 173, 176, 255, 148, 159, 189, 255, 129, 142, 154, 159, 191, 255, 128, 182, 128, 141, 144, 153, 160, 182, 186, 255, 128, 130, 155, 157, 160, 175, 178, 182, 129, 134, 137, 142, 145, 150, 160, 166, 168, 174, 176, 255, 155, 166, 175, 128, 170, 172, 173, 176, 185, 158, 159, 160, 255, 164, 175, 135, 138, 188, 255, 164, 169, 171, 172, 173, 174, 175, 180, 181, 182, 183, 184, 185, 187, 188, 189, 190, 191, 165, 186, 174, 175, 154, 255, 190, 128, 134, 147, 151, 157, 168, 170, 182, 184, 188, 128, 129, 131, 132, 134, 255, 147, 255, 190, 255, 144, 145, 136, 175, 188, 255, 128, 143, 160, 175, 179, 180, 141, 143, 176, 180, 182, 255, 189, 255, 191, 144, 153, 161, 186, 129, 154, 166, 255, 191, 255, 130, 135, 138, 143, 146, 151, 154, 156, 144, 145, 146, 147, 148, 150, 151, 152, 155, 157, 158, 160, 170, 171, 172, 175, 161, 169, 128, 129, 130, 131, 133, 135, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 152, 156, 157, 160, 161, 162, 163, 164, 166, 168, 169, 170, 171, 172, 173, 174, 176, 177, 153, 155, 178, 179, 128, 139, 141, 166, 168, 186, 188, 189, 191, 255, 142, 143, 158, 255, 187, 255, 128, 180, 189, 128, 156, 160, 255, 145, 159, 161, 255, 128, 159, 176, 255, 139, 143, 187, 255, 128, 157, 160, 255, 144, 132, 135, 150, 255, 158, 159, 170, 175, 148, 151, 188, 255, 128, 167, 176, 255, 164, 255, 183, 255, 128, 149, 160, 167, 136, 188, 128, 133, 138, 181, 183, 184, 191, 255, 150, 159, 183, 255, 128, 158, 160, 178, 180, 181, 128, 149, 160, 185, 128, 183, 190, 191, 191, 128, 131, 133, 134, 140, 147, 149, 151, 153, 179, 184, 186, 160, 188, 128, 156, 128, 135, 137, 166, 128, 181, 128, 149, 160, 178, 128, 145, 128, 178, 129, 130, 131, 132, 133, 135, 136, 138, 139, 140, 141, 144, 145, 146, 147, 150, 151, 152, 153, 154, 155, 156, 162, 163, 171, 176, 177, 178, 128, 134, 135, 165, 176, 190, 144, 168, 176, 185, 128, 180, 182, 191, 182, 144, 179, 155, 133, 137, 141, 143, 157, 255, 190, 128, 145, 147, 183, 136, 128, 134, 138, 141, 143, 157, 159, 168, 176, 255, 171, 175, 186, 255, 128, 131, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 188, 191, 144, 151, 128, 132, 135, 136, 139, 141, 157, 163, 166, 172, 176, 180, 128, 138, 144, 153, 134, 136, 143, 154, 255, 128, 181, 184, 255, 129, 151, 158, 255, 129, 131, 133, 143, 154, 255, 128, 137, 128, 153, 157, 171, 176, 185, 160, 255, 170, 190, 192, 255, 128, 184, 128, 136, 138, 182, 184, 191, 128, 144, 153, 178, 255, 168, 144, 145, 183, 255, 128, 142, 145, 149, 129, 141, 144, 146, 147, 148, 175, 255, 132, 255, 128, 144, 129, 143, 144, 153, 145, 152, 135, 255, 160, 168, 169, 171, 172, 173, 174, 188, 189, 190, 191, 161, 167, 185, 255, 128, 158, 160, 169, 144, 173, 176, 180, 128, 131, 144, 153, 163, 183, 189, 255, 144, 255, 133, 143, 191, 255, 143, 159, 160, 128, 129, 255, 159, 160, 171, 172, 255, 173, 255, 179, 255, 128, 176, 177, 178, 128, 129, 171, 175, 189, 255, 128, 136, 144, 153, 157, 158, 133, 134, 137, 144, 145, 146, 147, 148, 149, 154, 155, 156, 157, 158, 159, 168, 169, 170, 150, 153, 165, 169, 173, 178, 187, 255, 131, 132, 140, 169, 174, 255, 130, 132, 149, 157, 173, 186, 188, 160, 161, 163, 164, 167, 168, 132, 134, 149, 157, 186, 139, 140, 191, 255, 134, 128, 132, 138, 144, 146, 255, 166, 167, 129, 155, 187, 149, 181, 143, 175, 137, 169, 131, 140, 141, 192, 255, 128, 182, 187, 255, 173, 180, 182, 255, 132, 155, 159, 161, 175, 128, 160, 163, 164, 165, 184, 185, 186, 161, 162, 128, 134, 136, 152, 155, 161, 163, 164, 166, 170, 133, 143, 151, 255, 139, 143, 154, 255, 164, 167, 185, 187, 128, 131, 133, 159, 161, 162, 169, 178, 180, 183, 130, 135, 137, 139, 148, 151, 153, 155, 157, 159, 164, 190, 141, 143, 145, 146, 161, 162, 167, 170, 172, 178, 180, 183, 185, 188, 128, 137, 139, 155, 161, 163, 165, 169, 171, 187, 155, 156, 151, 255, 156, 157, 160, 181, 255, 186, 187, 255, 162, 255, 160, 168, 161, 167, 158, 255, 160, 132, 135, 133, 134, 176, 255, 128, 191, 154, 164, 168, 128, 149, 150, 191, 128, 152, 153, 191, 181, 128, 159, 160, 189, 190, 191, 189, 128, 131, 132, 185, 186, 191, 144, 128, 151, 152, 161, 162, 176, 177, 255, 169, 177, 129, 132, 141, 142, 145, 146, 179, 181, 186, 188, 190, 191, 192, 255, 142, 158, 128, 155, 156, 161, 162, 175, 176, 177, 178, 191, 169, 177, 180, 183, 128, 132, 133, 138, 139, 142, 143, 144, 145, 146, 147, 185, 186, 191, 157, 128, 152, 153, 158, 159, 177, 178, 180, 181, 191, 142, 146, 169, 177, 180, 189, 128, 132, 133, 185, 186, 191, 144, 185, 128, 159, 160, 161, 162, 191, 169, 177, 180, 189, 128, 132, 133, 140, 141, 142, 143, 144, 145, 146, 147, 185, 186, 191, 158, 177, 128, 155, 156, 161, 162, 191, 131, 145, 155, 157, 128, 132, 133, 138, 139, 141, 142, 149, 150, 152, 153, 159, 160, 162, 163, 164, 165, 167, 168, 170, 171, 173, 174, 185, 186, 191, 144, 128, 191, 141, 145, 169, 189, 128, 132, 133, 185, 186, 191, 128, 151, 152, 154, 155, 159, 160, 161, 162, 191, 128, 141, 145, 169, 180, 189, 129, 132, 133, 185, 186, 191, 158, 128, 159, 160, 161, 162, 176, 177, 178, 179, 191, 141, 145, 189, 128, 132, 133, 186, 187, 191, 142, 128, 147, 148, 150, 151, 158, 159, 161, 162, 185, 186, 191, 178, 188, 128, 132, 133, 150, 151, 153, 154, 189, 190, 191, 128, 134, 135, 191, 128, 177, 129, 179, 180, 191, 128, 131, 137, 141, 152, 160, 164, 166, 172, 177, 189, 129, 132, 133, 134, 135, 138, 139, 147, 148, 167, 168, 169, 170, 179, 180, 191, 133, 128, 134, 135, 155, 156, 159, 160, 191, 128, 129, 191, 136, 128, 172, 173, 191, 128, 135, 136, 140, 141, 191, 191, 128, 170, 171, 190, 161, 128, 143, 144, 149, 150, 153, 154, 157, 158, 164, 165, 166, 167, 173, 174, 176, 177, 180, 181, 255, 130, 141, 143, 159, 134, 187, 136, 140, 142, 143, 137, 151, 153, 142, 143, 158, 159, 137, 177, 191, 142, 143, 182, 183, 192, 255, 129, 151, 128, 133, 134, 135, 136, 255, 145, 150, 151, 155, 191, 192, 255, 128, 143, 144, 159, 160, 255, 182, 183, 190, 191, 192, 255, 128, 129, 255, 173, 174, 192, 255, 128, 129, 154, 155, 159, 160, 255, 171, 173, 185, 191, 192, 255, 141, 128, 145, 146, 159, 160, 177, 178, 191, 173, 128, 145, 146, 159, 160, 176, 177, 191, 128, 179, 180, 191, 151, 156, 128, 191, 128, 159, 160, 255, 184, 191, 192, 255, 169, 128, 170, 171, 175, 176, 255, 182, 191, 192, 255, 128, 158, 159, 191, 128, 143, 144, 173, 174, 175, 176, 180, 181, 191, 128, 171, 172, 175, 176, 255, 138, 191, 192, 255, 128, 150, 151, 159, 160, 255, 149, 191, 192, 255, 167, 128, 191, 128, 132, 133, 179, 180, 191, 128, 132, 133, 139, 140, 191, 128, 130, 131, 160, 161, 173, 174, 175, 176, 185, 186, 255, 166, 191, 192, 255, 128, 163, 164, 191, 128, 140, 141, 143, 144, 153, 154, 189, 190, 191, 128, 136, 137, 191, 173, 128, 168, 169, 177, 178, 180, 181, 182, 183, 191, 0, 127, 192, 255, 150, 151, 158, 159, 152, 154, 156, 158, 134, 135, 142, 143, 190, 191, 192, 255, 181, 189, 191, 128, 190, 133, 181, 128, 129, 130, 140, 141, 143, 144, 147, 148, 149, 150, 155, 156, 159, 160, 172, 173, 177, 178, 188, 189, 191, 177, 191, 128, 190, 128, 143, 144, 156, 157, 191, 130, 135, 148, 164, 166, 168, 128, 137, 138, 149, 150, 151, 152, 157, 158, 169, 170, 185, 186, 187, 188, 191, 142, 128, 132, 133, 137, 138, 159, 160, 255, 137, 191, 192, 255, 175, 128, 255, 159, 165, 170, 175, 177, 180, 191, 192, 255, 166, 173, 128, 167, 168, 175, 176, 255, 168, 174, 176, 191, 192, 255, 167, 175, 183, 191, 128, 150, 151, 159, 160, 190, 135, 143, 151, 128, 158, 159, 191, 128, 132, 133, 135, 136, 160, 161, 169, 170, 176, 177, 181, 182, 183, 184, 188, 189, 191, 160, 151, 154, 187, 192, 255, 128, 132, 133, 173, 174, 176, 177, 255, 143, 159, 187, 191, 192, 255, 128, 175, 176, 191, 150, 191, 192, 255, 141, 191, 192, 255, 128, 143, 144, 189, 190, 191, 141, 143, 160, 169, 172, 191, 192, 255, 191, 128, 174, 175, 190, 128, 157, 158, 159, 160, 255, 176, 191, 192, 255, 128, 150, 151, 159, 160, 161, 162, 255, 175, 137, 138, 184, 191, 192, 255, 128, 182, 183, 255, 130, 134, 139, 163, 191, 192, 255, 128, 129, 130, 179, 180, 191, 187, 189, 128, 177, 178, 183, 184, 191, 128, 137, 138, 165, 166, 175, 176, 255, 135, 159, 189, 191, 192, 255, 128, 131, 132, 178, 179, 191, 143, 165, 191, 128, 159, 160, 175, 176, 185, 186, 190, 128, 168, 169, 191, 131, 186, 128, 139, 140, 159, 160, 182, 183, 189, 190, 255, 176, 178, 180, 183, 184, 190, 191, 192, 255, 129, 128, 130, 131, 154, 155, 157, 158, 159, 160, 170, 171, 177, 178, 180, 181, 191, 128, 167, 175, 129, 134, 135, 136, 137, 142, 143, 144, 145, 150, 151, 159, 160, 255, 155, 166, 175, 128, 162, 163, 191, 164, 175, 135, 138, 188, 191, 192, 255, 174, 175, 154, 191, 192, 255, 157, 169, 183, 189, 191, 128, 134, 135, 146, 147, 151, 152, 158, 159, 190, 130, 133, 128, 255, 178, 191, 192, 255, 128, 146, 147, 255, 190, 191, 192, 255, 128, 143, 144, 255, 144, 145, 136, 175, 188, 191, 192, 255, 181, 128, 175, 176, 255, 189, 191, 192, 255, 128, 160, 161, 186, 187, 191, 128, 129, 154, 155, 165, 166, 255, 191, 192, 255, 128, 129, 130, 135, 136, 137, 138, 143, 144, 145, 146, 151, 152, 153, 154, 156, 157, 191, 128, 191, 128, 129, 130, 131, 133, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 152, 156, 157, 160, 161, 162, 163, 164, 166, 168, 169, 170, 171, 172, 173, 174, 176, 177, 132, 151, 153, 155, 158, 175, 178, 179, 180, 191, 140, 167, 187, 190, 128, 255, 142, 143, 158, 191, 192, 255, 187, 191, 192, 255, 128, 180, 181, 191, 128, 156, 157, 159, 160, 255, 145, 191, 192, 255, 128, 159, 160, 175, 176, 255, 139, 143, 182, 191, 192, 255, 144, 132, 135, 150, 191, 192, 255, 158, 175, 148, 151, 188, 191, 192, 255, 128, 167, 168, 175, 176, 255, 164, 191, 192, 255, 183, 191, 192, 255, 128, 149, 150, 159, 160, 167, 168, 191, 136, 182, 188, 128, 133, 134, 137, 138, 184, 185, 190, 191, 255, 150, 159, 183, 191, 192, 255, 179, 128, 159, 160, 181, 182, 191, 128, 149, 150, 159, 160, 185, 186, 191, 128, 183, 184, 189, 190, 191, 128, 148, 152, 129, 143, 144, 179, 180, 191, 128, 159, 160, 188, 189, 191, 128, 156, 157, 191, 136, 128, 164, 165, 191, 128, 181, 182, 191, 128, 149, 150, 159, 160, 178, 179, 191, 128, 145, 146, 191, 128, 178, 179, 191, 128, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 144, 145, 146, 147, 150, 151, 152, 153, 154, 156, 162, 163, 171, 176, 177, 178, 129, 191, 128, 130, 131, 183, 184, 191, 128, 130, 131, 175, 176, 191, 128, 143, 144, 168, 169, 191, 128, 130, 131, 166, 167, 191, 182, 128, 143, 144, 178, 179, 191, 128, 130, 131, 178, 179, 191, 128, 154, 156, 129, 132, 133, 191, 146, 128, 171, 172, 191, 135, 137, 142, 158, 128, 168, 169, 175, 176, 255, 159, 191, 192, 255, 144, 128, 156, 157, 161, 162, 191, 128, 134, 135, 138, 139, 191, 128, 175, 176, 191, 134, 128, 131, 132, 135, 136, 191, 128, 174, 175, 191, 128, 151, 152, 155, 156, 191, 132, 128, 191, 128, 170, 171, 191, 128, 153, 154, 191, 160, 190, 192, 255, 128, 184, 185, 191, 137, 128, 174, 175, 191, 128, 129, 177, 178, 255, 144, 191, 192, 255, 128, 142, 143, 144, 145, 146, 149, 129, 148, 150, 191, 175, 191, 192, 255, 132, 191, 192, 255, 128, 144, 129, 143, 145, 191, 144, 153, 128, 143, 145, 152, 154, 191, 135, 191, 192, 255, 160, 168, 169, 171, 172, 173, 174, 188, 189, 190, 191, 128, 159, 161, 167, 170, 187, 185, 191, 192, 255, 128, 143, 144, 173, 174, 191, 128, 131, 132, 162, 163, 183, 184, 188, 189, 255, 133, 143, 145, 191, 192, 255, 128, 146, 147, 159, 160, 191, 160, 128, 191, 128, 129, 191, 192, 255, 159, 160, 171, 128, 170, 172, 191, 192, 255, 173, 191, 192, 255, 179, 191, 192, 255, 128, 176, 177, 178, 129, 191, 128, 129, 130, 191, 171, 175, 189, 191, 192, 255, 128, 136, 137, 143, 144, 153, 154, 191, 144, 145, 146, 147, 148, 149, 154, 155, 156, 157, 158, 159, 128, 143, 150, 153, 160, 191, 149, 157, 173, 186, 188, 160, 161, 163, 164, 167, 168, 132, 134, 149, 157, 186, 191, 139, 140, 192, 255, 133, 145, 128, 134, 135, 137, 138, 255, 166, 167, 129, 155, 187, 149, 181, 143, 175, 137, 169, 131, 140, 191, 192, 255, 160, 163, 164, 165, 184, 185, 186, 128, 159, 161, 162, 166, 191, 133, 191, 192, 255, 132, 160, 163, 167, 179, 184, 186, 128, 164, 165, 168, 169, 187, 188, 191, 130, 135, 137, 139, 144, 147, 151, 153, 155, 157, 159, 163, 171, 179, 184, 189, 191, 128, 140, 141, 148, 149, 160, 161, 164, 165, 166, 167, 190, 138, 164, 170, 128, 155, 156, 160, 161, 187, 188, 191, 128, 191, 155, 156, 128, 191, 151, 191, 192, 255, 156, 157, 160, 128, 191, 181, 191, 192, 255, 158, 159, 186, 128, 185, 187, 191, 192, 255, 162, 191, 192, 255, 160, 168, 128, 159, 161, 167, 169, 191, 158, 191, 192, 255, 10, 13, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 128, 191, 128, 191, 128, 191, 128, 191, 128, 191, 10, 128, 191, 128, 191, 128, 191, 36, 123, 37, 123, 10, 128, 191, 128, 191, 128, 191, 36, 123, 37, 123, 170, 181, 183, 186, 128, 150, 152, 182, 184, 255, 192, 255, 128, 255, 173, 130, 133, 146, 159, 165, 171, 175, 255, 181, 190, 184, 185, 192, 255, 140, 134, 138, 142, 161, 163, 255, 182, 130, 136, 137, 176, 151, 152, 154, 160, 190, 136, 144, 192, 255, 135, 129, 130, 132, 133, 144, 170, 176, 178, 144, 154, 160, 191, 128, 169, 174, 255, 148, 169, 157, 158, 189, 190, 192, 255, 144, 255, 139, 140, 178, 255, 186, 128, 181, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 128, 173, 128, 155, 160, 180, 182, 189, 148, 161, 163, 255, 176, 164, 165, 132, 169, 177, 141, 142, 145, 146, 179, 181, 186, 187, 158, 133, 134, 137, 138, 143, 150, 152, 155, 164, 165, 178, 255, 188, 129, 131, 133, 138, 143, 144, 147, 168, 170, 176, 178, 179, 181, 182, 184, 185, 190, 255, 157, 131, 134, 137, 138, 142, 144, 146, 152, 159, 165, 182, 255, 129, 131, 133, 141, 143, 145, 147, 168, 170, 176, 178, 179, 181, 185, 188, 255, 134, 138, 142, 143, 145, 159, 164, 165, 176, 184, 186, 255, 129, 131, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 188, 191, 177, 128, 132, 135, 136, 139, 141, 150, 151, 156, 157, 159, 163, 166, 175, 156, 130, 131, 133, 138, 142, 144, 146, 149, 153, 154, 158, 159, 163, 164, 168, 170, 174, 185, 190, 191, 144, 151, 128, 130, 134, 136, 138, 141, 166, 175, 128, 131, 133, 140, 142, 144, 146, 168, 170, 185, 189, 255, 133, 137, 151, 142, 148, 155, 159, 164, 165, 176, 255, 128, 131, 133, 140, 142, 144, 146, 168, 170, 179, 181, 185, 188, 191, 158, 128, 132, 134, 136, 138, 141, 149, 150, 160, 163, 166, 175, 177, 178, 129, 131, 133, 140, 142, 144, 146, 186, 189, 255, 133, 137, 143, 147, 152, 158, 164, 165, 176, 185, 192, 255, 189, 130, 131, 133, 150, 154, 177, 179, 187, 138, 150, 128, 134, 143, 148, 152, 159, 166, 175, 178, 179, 129, 186, 128, 142, 144, 153, 132, 138, 141, 165, 167, 129, 130, 135, 136, 148, 151, 153, 159, 161, 163, 170, 171, 173, 185, 187, 189, 134, 128, 132, 136, 141, 144, 153, 156, 159, 128, 181, 183, 185, 152, 153, 160, 169, 190, 191, 128, 135, 137, 172, 177, 191, 128, 132, 134, 151, 153, 188, 134, 128, 129, 130, 131, 137, 138, 139, 140, 141, 142, 143, 144, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 173, 175, 176, 177, 178, 179, 181, 182, 183, 188, 189, 190, 191, 132, 152, 172, 184, 185, 187, 128, 191, 128, 137, 144, 255, 158, 159, 134, 187, 136, 140, 142, 143, 137, 151, 153, 142, 143, 158, 159, 137, 177, 142, 143, 182, 183, 191, 255, 128, 130, 133, 136, 150, 152, 255, 145, 150, 151, 155, 156, 160, 168, 178, 255, 128, 143, 160, 255, 182, 183, 190, 255, 129, 255, 173, 174, 192, 255, 129, 154, 160, 255, 171, 173, 185, 255, 128, 140, 142, 148, 160, 180, 128, 147, 160, 172, 174, 176, 178, 179, 148, 150, 152, 155, 158, 159, 170, 255, 139, 141, 144, 153, 160, 255, 184, 255, 128, 170, 176, 255, 182, 255, 128, 158, 160, 171, 176, 187, 134, 173, 176, 180, 128, 171, 176, 255, 138, 143, 155, 255, 128, 155, 160, 255, 159, 189, 190, 192, 255, 167, 128, 137, 144, 153, 176, 189, 140, 143, 154, 170, 180, 255, 180, 255, 128, 183, 128, 137, 141, 189, 128, 136, 144, 146, 148, 182, 184, 185, 128, 181, 187, 191, 150, 151, 158, 159, 152, 154, 156, 158, 134, 135, 142, 143, 190, 255, 190, 128, 180, 182, 188, 130, 132, 134, 140, 144, 147, 150, 155, 160, 172, 178, 180, 182, 188, 128, 129, 130, 131, 132, 133, 134, 176, 177, 178, 179, 180, 181, 182, 183, 191, 255, 129, 147, 149, 176, 178, 190, 192, 255, 144, 156, 161, 144, 156, 165, 176, 130, 135, 149, 164, 166, 168, 138, 147, 152, 157, 170, 185, 188, 191, 142, 133, 137, 160, 255, 137, 255, 128, 174, 176, 255, 159, 165, 170, 180, 255, 167, 173, 128, 165, 176, 255, 168, 174, 176, 190, 192, 255, 128, 150, 160, 166, 168, 174, 176, 182, 184, 190, 128, 134, 136, 142, 144, 150, 152, 158, 160, 191, 128, 129, 130, 131, 132, 133, 134, 135, 144, 145, 255, 133, 135, 161, 175, 177, 181, 184, 188, 160, 151, 152, 187, 192, 255, 133, 173, 177, 255, 143, 159, 187, 255, 176, 191, 182, 183, 184, 191, 192, 255, 150, 255, 128, 146, 147, 148, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 129, 255, 141, 255, 144, 189, 141, 143, 172, 255, 191, 128, 175, 180, 189, 151, 159, 162, 255, 175, 137, 138, 184, 255, 183, 255, 168, 255, 128, 179, 188, 134, 143, 154, 159, 184, 186, 190, 255, 128, 173, 176, 255, 148, 159, 189, 255, 129, 142, 154, 159, 191, 255, 128, 182, 128, 141, 144, 153, 160, 182, 186, 255, 128, 130, 155, 157, 160, 175, 178, 182, 129, 134, 137, 142, 145, 150, 160, 166, 168, 174, 176, 255, 155, 166, 175, 128, 170, 172, 173, 176, 185, 158, 159, 160, 255, 164, 175, 135, 138, 188, 255, 164, 169, 171, 172, 173, 174, 175, 180, 181, 182, 183, 184, 185, 187, 188, 189, 190, 191, 165, 186, 174, 175, 154, 255, 190, 128, 134, 147, 151, 157, 168, 170, 182, 184, 188, 128, 129, 131, 132, 134, 255, 147, 255, 190, 255, 144, 145, 136, 175, 188, 255, 128, 143, 160, 175, 179, 180, 141, 143, 176, 180, 182, 255, 189, 255, 191, 144, 153, 161, 186, 129, 154, 166, 255, 191, 255, 130, 135, 138, 143, 146, 151, 154, 156, 144, 145, 146, 147, 148, 150, 151, 152, 155, 157, 158, 160, 170, 171, 172, 175, 161, 169, 128, 129, 130, 131, 133, 135, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 152, 156, 157, 160, 161, 162, 163, 164, 166, 168, 169, 170, 171, 172, 173, 174, 176, 177, 153, 155, 178, 179, 128, 139, 141, 166, 168, 186, 188, 189, 191, 255, 142, 143, 158, 255, 187, 255, 128, 180, 189, 128, 156, 160, 255, 145, 159, 161, 255, 128, 159, 176, 255, 139, 143, 187, 255, 128, 157, 160, 255, 144, 132, 135, 150, 255, 158, 159, 170, 175, 148, 151, 188, 255, 128, 167, 176, 255, 164, 255, 183, 255, 128, 149, 160, 167, 136, 188, 128, 133, 138, 181, 183, 184, 191, 255, 150, 159, 183, 255, 128, 158, 160, 178, 180, 181, 128, 149, 160, 185, 128, 183, 190, 191, 191, 128, 131, 133, 134, 140, 147, 149, 151, 153, 179, 184, 186, 160, 188, 128, 156, 128, 135, 137, 166, 128, 181, 128, 149, 160, 178, 128, 145, 128, 178, 129, 130, 131, 132, 133, 135, 136, 138, 139, 140, 141, 144, 145, 146, 147, 150, 151, 152, 153, 154, 155, 156, 162, 163, 171, 176, 177, 178, 128, 134, 135, 165, 176, 190, 144, 168, 176, 185, 128, 180, 182, 191, 182, 144, 179, 155, 133, 137, 141, 143, 157, 255, 190, 128, 145, 147, 183, 136, 128, 134, 138, 141, 143, 157, 159, 168, 176, 255, 171, 175, 186, 255, 128, 131, 133, 140, 143, 144, 147, 168, 170, 176, 178, 179, 181, 185, 188, 191, 144, 151, 128, 132, 135, 136, 139, 141, 157, 163, 166, 172, 176, 180, 128, 138, 144, 153, 134, 136, 143, 154, 255, 128, 181, 184, 255, 129, 151, 158, 255, 129, 131, 133, 143, 154, 255, 128, 137, 128, 153, 157, 171, 176, 185, 160, 255, 170, 190, 192, 255, 128, 184, 128, 136, 138, 182, 184, 191, 128, 144, 153, 178, 255, 168, 144, 145, 183, 255, 128, 142, 145, 149, 129, 141, 144, 146, 147, 148, 175, 255, 132, 255, 128, 144, 129, 143, 144, 153, 145, 152, 135, 255, 160, 168, 169, 171, 172, 173, 174, 188, 189, 190, 191, 161, 167, 185, 255, 128, 158, 160, 169, 144, 173, 176, 180, 128, 131, 144, 153, 163, 183, 189, 255, 144, 255, 133, 143, 191, 255, 143, 159, 160, 128, 129, 255, 159, 160, 171, 172, 255, 173, 255, 179, 255, 128, 176, 177, 178, 128, 129, 171, 175, 189, 255, 128, 136, 144, 153, 157, 158, 133, 134, 137, 144, 145, 146, 147, 148, 149, 154, 155, 156, 157, 158, 159, 168, 169, 170, 150, 153, 165, 169, 173, 178, 187, 255, 131, 132, 140, 169, 174, 255, 130, 132, 149, 157, 173, 186, 188, 160, 161, 163, 164, 167, 168, 132, 134, 149, 157, 186, 139, 140, 191, 255, 134, 128, 132, 138, 144, 146, 255, 166, 167, 129, 155, 187, 149, 181, 143, 175, 137, 169, 131, 140, 141, 192, 255, 128, 182, 187, 255, 173, 180, 182, 255, 132, 155, 159, 161, 175, 128, 160, 163, 164, 165, 184, 185, 186, 161, 162, 128, 134, 136, 152, 155, 161, 163, 164, 166, 170, 133, 143, 151, 255, 139, 143, 154, 255, 164, 167, 185, 187, 128, 131, 133, 159, 161, 162, 169, 178, 180, 183, 130, 135, 137, 139, 148, 151, 153, 155, 157, 159, 164, 190, 141, 143, 145, 146, 161, 162, 167, 170, 172, 178, 180, 183, 185, 188, 128, 137, 139, 155, 161, 163, 165, 169, 171, 187, 155, 156, 151, 255, 156, 157, 160, 181, 255, 186, 187, 255, 162, 255, 160, 168, 161, 167, 158, 255, 160, 132, 135, 133, 134, 176, 255, 128, 191, 154, 164, 168, 128, 149, 150, 191, 128, 152, 153, 191, 181, 128, 159, 160, 189, 190, 191, 189, 128, 131, 132, 185, 186, 191, 144, 128, 151, 152, 161, 162, 176, 177, 255, 169, 177, 129, 132, 141, 142, 145, 146, 179, 181, 186, 188, 190, 191, 192, 255, 142, 158, 128, 155, 156, 161, 162, 175, 176, 177, 178, 191, 169, 177, 180, 183, 128, 132, 133, 138, 139, 142, 143, 144, 145, 146, 147, 185, 186, 191, 157, 128, 152, 153, 158, 159, 177, 178, 180, 181, 191, 142, 146, 169, 177, 180, 189, 128, 132, 133, 185, 186, 191, 144, 185, 128, 159, 160, 161, 162, 191, 169, 177, 180, 189, 128, 132, 133, 140, 141, 142, 143, 144, 145, 146, 147, 185, 186, 191, 158, 177, 128, 155, 156, 161, 162, 191, 131, 145, 155, 157, 128, 132, 133, 138, 139, 141, 142, 149, 150, 152, 153, 159, 160, 162, 163, 164, 165, 167, 168, 170, 171, 173, 174, 185, 186, 191, 144, 128, 191, 141, 145, 169, 189, 128, 132, 133, 185, 186, 191, 128, 151, 152, 154, 155, 159, 160, 161, 162, 191, 128, 141, 145, 169, 180, 189, 129, 132, 133, 185, 186, 191, 158, 128, 159, 160, 161, 162, 176, 177, 178, 179, 191, 141, 145, 189, 128, 132, 133, 186, 187, 191, 142, 128, 147, 148, 150, 151, 158, 159, 161, 162, 185, 186, 191, 178, 188, 128, 132, 133, 150, 151, 153, 154, 189, 190, 191, 128, 134, 135, 191, 128, 177, 129, 179, 180, 191, 128, 131, 137, 141, 152, 160, 164, 166, 172, 177, 189, 129, 132, 133, 134, 135, 138, 139, 147, 148, 167, 168, 169, 170, 179, 180, 191, 133, 128, 134, 135, 155, 156, 159, 160, 191, 128, 129, 191, 136, 128, 172, 173, 191, 128, 135, 136, 140, 141, 191, 191, 128, 170, 171, 190, 161, 128, 143, 144, 149, 150, 153, 154, 157, 158, 164, 165, 166, 167, 173, 174, 176, 177, 180, 181, 255, 130, 141, 143, 159, 134, 187, 136, 140, 142, 143, 137, 151, 153, 142, 143, 158, 159, 137, 177, 191, 142, 143, 182, 183, 192, 255, 129, 151, 128, 133, 134, 135, 136, 255, 145, 150, 151, 155, 191, 192, 255, 128, 143, 144, 159, 160, 255, 182, 183, 190, 191, 192, 255, 128, 129, 255, 173, 174, 192, 255, 128, 129, 154, 155, 159, 160, 255, 171, 173, 185, 191, 192, 255, 141, 128, 145, 146, 159, 160, 177, 178, 191, 173, 128, 145, 146, 159, 160, 176, 177, 191, 128, 179, 180, 191, 151, 156, 128, 191, 128, 159, 160, 255, 184, 191, 192, 255, 169, 128, 170, 171, 175, 176, 255, 182, 191, 192, 255, 128, 158, 159, 191, 128, 143, 144, 173, 174, 175, 176, 180, 181, 191, 128, 171, 172, 175, 176, 255, 138, 191, 192, 255, 128, 150, 151, 159, 160, 255, 149, 191, 192, 255, 167, 128, 191, 128, 132, 133, 179, 180, 191, 128, 132, 133, 139, 140, 191, 128, 130, 131, 160, 161, 173, 174, 175, 176, 185, 186, 255, 166, 191, 192, 255, 128, 163, 164, 191, 128, 140, 141, 143, 144, 153, 154, 189, 190, 191, 128, 136, 137, 191, 173, 128, 168, 169, 177, 178, 180, 181, 182, 183, 191, 0, 127, 192, 255, 150, 151, 158, 159, 152, 154, 156, 158, 134, 135, 142, 143, 190, 191, 192, 255, 181, 189, 191, 128, 190, 133, 181, 128, 129, 130, 140, 141, 143, 144, 147, 148, 149, 150, 155, 156, 159, 160, 172, 173, 177, 178, 188, 189, 191, 177, 191, 128, 190, 128, 143, 144, 156, 157, 191, 130, 135, 148, 164, 166, 168, 128, 137, 138, 149, 150, 151, 152, 157, 158, 169, 170, 185, 186, 187, 188, 191, 142, 128, 132, 133, 137, 138, 159, 160, 255, 137, 191, 192, 255, 175, 128, 255, 159, 165, 170, 175, 177, 180, 191, 192, 255, 166, 173, 128, 167, 168, 175, 176, 255, 168, 174, 176, 191, 192, 255, 167, 175, 183, 191, 128, 150, 151, 159, 160, 190, 135, 143, 151, 128, 158, 159, 191, 128, 132, 133, 135, 136, 160, 161, 169, 170, 176, 177, 181, 182, 183, 184, 188, 189, 191, 160, 151, 154, 187, 192, 255, 128, 132, 133, 173, 174, 176, 177, 255, 143, 159, 187, 191, 192, 255, 128, 175, 176, 191, 150, 191, 192, 255, 141, 191, 192, 255, 128, 143, 144, 189, 190, 191, 141, 143, 160, 169, 172, 191, 192, 255, 191, 128, 174, 175, 190, 128, 157, 158, 159, 160, 255, 176, 191, 192, 255, 128, 150, 151, 159, 160, 161, 162, 255, 175, 137, 138, 184, 191, 192, 255, 128, 182, 183, 255, 130, 134, 139, 163, 191, 192, 255, 128, 129, 130, 179, 180, 191, 187, 189, 128, 177, 178, 183, 184, 191, 128, 137, 138, 165, 166, 175, 176, 255, 135, 159, 189, 191, 192, 255, 128, 131, 132, 178, 179, 191, 143, 165, 191, 128, 159, 160, 175, 176, 185, 186, 190, 128, 168, 169, 191, 131, 186, 128, 139, 140, 159, 160, 182, 183, 189, 190, 255, 176, 178, 180, 183, 184, 190, 191, 192, 255, 129, 128, 130, 131, 154, 155, 157, 158, 159, 160, 170, 171, 177, 178, 180, 181, 191, 128, 167, 175, 129, 134, 135, 136, 137, 142, 143, 144, 145, 150, 151, 159, 160, 255, 155, 166, 175, 128, 162, 163, 191, 164, 175, 135, 138, 188, 191, 192, 255, 174, 175, 154, 191, 192, 255, 157, 169, 183, 189, 191, 128, 134, 135, 146, 147, 151, 152, 158, 159, 190, 130, 133, 128, 255, 178, 191, 192, 255, 128, 146, 147, 255, 190, 191, 192, 255, 128, 143, 144, 255, 144, 145, 136, 175, 188, 191, 192, 255, 181, 128, 175, 176, 255, 189, 191, 192, 255, 128, 160, 161, 186, 187, 191, 128, 129, 154, 155, 165, 166, 255, 191, 192, 255, 128, 129, 130, 135, 136, 137, 138, 143, 144, 145, 146, 151, 152, 153, 154, 156, 157, 191, 128, 191, 128, 129, 130, 131, 133, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 152, 156, 157, 160, 161, 162, 163, 164, 166, 168, 169, 170, 171, 172, 173, 174, 176, 177, 132, 151, 153, 155, 158, 175, 178, 179, 180, 191, 140, 167, 187, 190, 128, 255, 142, 143, 158, 191, 192, 255, 187, 191, 192, 255, 128, 180, 181, 191, 128, 156, 157, 159, 160, 255, 145, 191, 192, 255, 128, 159, 160, 175, 176, 255, 139, 143, 182, 191, 192, 255, 144, 132, 135, 150, 191, 192, 255, 158, 175, 148, 151, 188, 191, 192, 255, 128, 167, 168, 175, 176, 255, 164, 191, 192, 255, 183, 191, 192, 255, 128, 149, 150, 159, 160, 167, 168, 191, 136, 182, 188, 128, 133, 134, 137, 138, 184, 185, 190, 191, 255, 150, 159, 183, 191, 192, 255, 179, 128, 159, 160, 181, 182, 191, 128, 149, 150, 159, 160, 185, 186, 191, 128, 183, 184, 189, 190, 191, 128, 148, 152, 129, 143, 144, 179, 180, 191, 128, 159, 160, 188, 189, 191, 128, 156, 157, 191, 136, 128, 164, 165, 191, 128, 181, 182, 191, 128, 149, 150, 159, 160, 178, 179, 191, 128, 145, 146, 191, 128, 178, 179, 191, 128, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 144, 145, 146, 147, 150, 151, 152, 153, 154, 156, 162, 163, 171, 176, 177, 178, 129, 191, 128, 130, 131, 183, 184, 191, 128, 130, 131, 175, 176, 191, 128, 143, 144, 168, 169, 191, 128, 130, 131, 166, 167, 191, 182, 128, 143, 144, 178, 179, 191, 128, 130, 131, 178, 179, 191, 128, 154, 156, 129, 132, 133, 191, 146, 128, 171, 172, 191, 135, 137, 142, 158, 128, 168, 169, 175, 176, 255, 159, 191, 192, 255, 144, 128, 156, 157, 161, 162, 191, 128, 134, 135, 138, 139, 191, 128, 175, 176, 191, 134, 128, 131, 132, 135, 136, 191, 128, 174, 175, 191, 128, 151, 152, 155, 156, 191, 132, 128, 191, 128, 170, 171, 191, 128, 153, 154, 191, 160, 190, 192, 255, 128, 184, 185, 191, 137, 128, 174, 175, 191, 128, 129, 177, 178, 255, 144, 191, 192, 255, 128, 142, 143, 144, 145, 146, 149, 129, 148, 150, 191, 175, 191, 192, 255, 132, 191, 192, 255, 128, 144, 129, 143, 145, 191, 144, 153, 128, 143, 145, 152, 154, 191, 135, 191, 192, 255, 160, 168, 169, 171, 172, 173, 174, 188, 189, 190, 191, 128, 159, 161, 167, 170, 187, 185, 191, 192, 255, 128, 143, 144, 173, 174, 191, 128, 131, 132, 162, 163, 183, 184, 188, 189, 255, 133, 143, 145, 191, 192, 255, 128, 146, 147, 159, 160, 191, 160, 128, 191, 128, 129, 191, 192, 255, 159, 160, 171, 128, 170, 172, 191, 192, 255, 173, 191, 192, 255, 179, 191, 192, 255, 128, 176, 177, 178, 129, 191, 128, 129, 130, 191, 171, 175, 189, 191, 192, 255, 128, 136, 137, 143, 144, 153, 154, 191, 144, 145, 146, 147, 148, 149, 154, 155, 156, 157, 158, 159, 128, 143, 150, 153, 160, 191, 149, 157, 173, 186, 188, 160, 161, 163, 164, 167, 168, 132, 134, 149, 157, 186, 191, 139, 140, 192, 255, 133, 145, 128, 134, 135, 137, 138, 255, 166, 167, 129, 155, 187, 149, 181, 143, 175, 137, 169, 131, 140, 191, 192, 255, 160, 163, 164, 165, 184, 185, 186, 128, 159, 161, 162, 166, 191, 133, 191, 192, 255, 132, 160, 163, 167, 179, 184, 186, 128, 164, 165, 168, 169, 187, 188, 191, 130, 135, 137, 139, 144, 147, 151, 153, 155, 157, 159, 163, 171, 179, 184, 189, 191, 128, 140, 141, 148, 149, 160, 161, 164, 165, 166, 167, 190, 138, 164, 170, 128, 155, 156, 160, 161, 187, 188, 191, 128, 191, 155, 156, 128, 191, 151, 191, 192, 255, 156, 157, 160, 128, 191, 181, 191, 192, 255, 158, 159, 186, 128, 185, 187, 191, 192, 255, 162, 191, 192, 255, 160, 168, 128, 159, 161, 167, 169, 191, 158, 191, 192, 255, 9, 10, 13, 32, 33, 34, 35, 38, 46, 47, 60, 61, 62, 64, 92, 95, 123, 124, 125, 126, 127, 194, 195, 198, 199, 203, 204, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 238, 239, 240, 0, 36, 37, 45, 48, 57, 58, 63, 65, 90, 91, 96, 97, 122, 192, 193, 196, 218, 229, 236, 241, 247, 9, 32, 10, 61, 10, 38, 46, 42, 47, 46, 69, 101, 48, 57, 60, 61, 61, 62, 61, 45, 95, 194, 195, 198, 199, 203, 204, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 239, 240, 243, 48, 57, 65, 90, 97, 122, 196, 218, 229, 236, 124, 125, 128, 191, 170, 181, 186, 128, 191, 151, 183, 128, 255, 192, 255, 0, 127, 173, 130, 133, 146, 159, 165, 171, 175, 191, 192, 255, 181, 190, 128, 175, 176, 183, 184, 185, 186, 191, 134, 139, 141, 162, 128, 135, 136, 255, 182, 130, 137, 176, 151, 152, 154, 160, 136, 191, 192, 255, 128, 143, 144, 170, 171, 175, 176, 178, 179, 191, 128, 159, 160, 191, 176, 128, 138, 139, 173, 174, 255, 148, 150, 164, 167, 173, 176, 185, 189, 190, 192, 255, 144, 128, 145, 146, 175, 176, 191, 128, 140, 141, 255, 166, 176, 178, 191, 192, 255, 186, 128, 137, 138, 170, 171, 179, 180, 181, 182, 191, 160, 161, 162, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 128, 191, 128, 129, 130, 131, 137, 138, 139, 140, 141, 142, 143, 144, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 182, 183, 184, 188, 189, 190, 191, 132, 187, 129, 130, 132, 133, 134, 176, 177, 178, 179, 180, 181, 182, 183, 128, 191, 128, 129, 130, 131, 132, 133, 134, 135, 144, 136, 143, 145, 191, 192, 255, 182, 183, 184, 128, 191, 128, 191, 191, 128, 190, 192, 255, 128, 146, 147, 148, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 129, 191, 192, 255, 158, 159, 128, 157, 160, 191, 192, 255, 128, 191, 164, 169, 171, 172, 173, 174, 175, 180, 181, 182, 183, 184, 185, 187, 188, 189, 190, 191, 128, 163, 165, 186, 144, 145, 146, 147, 148, 150, 151, 152, 155, 157, 158, 160, 170, 171, 172, 175, 128, 159, 161, 169, 173, 191, 128, 191, 10, 13, 34, 36, 37, 92, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 34, 92, 36, 37, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 36, 123, 123, 126, 126, 37, 123, 126, 10, 13, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 128, 191, 128, 191, 128, 191, 10, 13, 36, 37, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 36, 37, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 10, 13, 123, 10, 13, 126, 10, 13, 126, 126, 128, 191, 128, 191, 128, 191, 10, 13, 36, 37, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 36, 37, 128, 191, 192, 223, 224, 239, 240, 247, 248, 255, 10, 13, 10, 13, 123, 10, 13, 126, 10, 13, 126, 126, 128, 191, 128, 191, 128, 191, 95, 194, 195, 198, 199, 203, 204, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 238, 239, 240, 65, 90, 97, 122, 128, 191, 192, 193, 196, 218, 229, 236, 241, 247, 248, 255, 45, 95, 194, 195, 198, 199, 203, 204, 205, 206, 207, 210, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 233, 234, 237, 239, 240, 243, 48, 57, 65, 90, 97, 122, 196, 218, 229, 236, 128, 191, 170, 181, 186, 128, 191, 151, 183, 128, 255, 192, 255, 0, 127, 173, 130, 133, 146, 159, 165, 171, 175, 191, 192, 255, 181, 190, 128, 175, 176, 183, 184, 185, 186, 191, 134, 139, 141, 162, 128, 135, 136, 255, 182, 130, 137, 176, 151, 152, 154, 160, 136, 191, 192, 255, 128, 143, 144, 170, 171, 175, 176, 178, 179, 191, 128, 159, 160, 191, 176, 128, 138, 139, 173, 174, 255, 148, 150, 164, 167, 173, 176, 185, 189, 190, 192, 255, 144, 128, 145, 146, 175, 176, 191, 128, 140, 141, 255, 166, 176, 178, 191, 192, 255, 186, 128, 137, 138, 170, 171, 179, 180, 181, 182, 191, 160, 161, 162, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 128, 191, 128, 129, 130, 131, 137, 138, 139, 140, 141, 142, 143, 144, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 182, 183, 184, 188, 189, 190, 191, 132, 187, 129, 130, 132, 133, 134, 176, 177, 178, 179, 180, 181, 182, 183, 128, 191, 128, 129, 130, 131, 132, 133, 134, 135, 144, 136, 143, 145, 191, 192, 255, 182, 183, 184, 128, 191, 128, 191, 191, 128, 190, 192, 255, 128, 146, 147, 148, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 129, 191, 192, 255, 158, 159, 128, 157, 160, 191, 192, 255, 128, 191, 164, 169, 171, 172, 173, 174, 175, 180, 181, 182, 183, 184, 185, 187, 188, 189, 190, 191, 128, 163, 165, 186, 144, 145, 146, 147, 148, 150, 151, 152, 155, 157, 158, 160, 170, 171, 172, 175, 128, 159, 161, 169, 173, 191, 128, 191, } var _hcltok_single_lengths []byte = []byte{ 0, 1, 1, 2, 3, 2, 0, 32, 31, 36, 1, 4, 0, 0, 0, 0, 1, 2, 1, 1, 1, 1, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1, 32, 0, 0, 0, 0, 1, 3, 1, 1, 1, 0, 2, 0, 1, 1, 2, 0, 3, 0, 1, 0, 2, 1, 2, 0, 0, 5, 1, 4, 0, 0, 1, 43, 0, 0, 0, 2, 3, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 1, 0, 15, 0, 0, 0, 1, 6, 1, 0, 0, 1, 0, 2, 0, 0, 0, 9, 0, 1, 1, 0, 0, 0, 3, 0, 1, 0, 28, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 0, 18, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 16, 36, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 1, 1, 1, 1, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0, 0, 2, 2, 0, 11, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3, 0, 0, 4, 0, 0, 0, 18, 0, 0, 0, 1, 4, 1, 4, 1, 0, 3, 2, 2, 2, 1, 0, 0, 1, 8, 0, 0, 0, 4, 12, 0, 2, 0, 3, 0, 1, 0, 2, 0, 1, 2, 0, 3, 1, 2, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 28, 3, 0, 1, 1, 2, 1, 0, 1, 1, 2, 1, 1, 2, 1, 1, 0, 2, 1, 1, 1, 1, 0, 0, 6, 1, 1, 0, 0, 46, 1, 1, 0, 0, 0, 0, 2, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 13, 2, 0, 0, 0, 9, 0, 1, 28, 0, 1, 3, 0, 2, 0, 0, 0, 1, 0, 1, 1, 2, 0, 18, 2, 0, 0, 16, 35, 0, 0, 0, 1, 0, 28, 0, 0, 0, 0, 1, 0, 2, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 4, 0, 12, 1, 7, 0, 4, 0, 0, 0, 0, 1, 2, 1, 1, 1, 1, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1, 32, 0, 0, 0, 0, 1, 3, 1, 1, 1, 0, 2, 0, 1, 1, 2, 0, 3, 0, 1, 0, 2, 1, 2, 0, 0, 5, 1, 4, 0, 0, 1, 43, 0, 0, 0, 2, 3, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 1, 0, 15, 0, 0, 0, 1, 6, 1, 0, 0, 1, 0, 2, 0, 0, 0, 9, 0, 1, 1, 0, 0, 0, 3, 0, 1, 0, 28, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 0, 18, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 16, 36, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 1, 1, 1, 1, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0, 0, 2, 2, 0, 11, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3, 0, 0, 4, 0, 0, 0, 18, 0, 0, 0, 1, 4, 1, 4, 1, 0, 3, 2, 2, 2, 1, 0, 0, 1, 8, 0, 0, 0, 4, 12, 0, 2, 0, 3, 0, 1, 0, 2, 0, 1, 2, 0, 0, 3, 0, 1, 1, 1, 2, 2, 4, 1, 6, 2, 4, 2, 4, 1, 4, 0, 6, 1, 3, 1, 2, 0, 2, 11, 1, 1, 1, 0, 1, 1, 0, 2, 0, 3, 3, 2, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 4, 3, 2, 2, 0, 6, 1, 0, 1, 1, 0, 2, 0, 4, 3, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 3, 0, 2, 0, 0, 0, 3, 0, 2, 1, 1, 3, 1, 0, 0, 0, 0, 0, 5, 2, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 35, 4, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 3, 0, 0, 1, 0, 0, 0, 0, 28, 0, 0, 0, 0, 1, 0, 3, 1, 4, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 7, 0, 0, 2, 2, 0, 11, 0, 0, 0, 0, 0, 1, 1, 3, 0, 0, 4, 0, 0, 0, 12, 1, 4, 1, 5, 2, 0, 3, 2, 2, 2, 1, 7, 0, 7, 17, 3, 0, 2, 0, 3, 0, 0, 1, 0, 2, 0, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 2, 1, 0, 0, 0, 2, 2, 4, 0, 0, 0, 0, 1, 2, 1, 1, 1, 1, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1, 32, 0, 0, 0, 0, 1, 3, 1, 1, 1, 0, 2, 0, 1, 1, 2, 0, 3, 0, 1, 0, 2, 1, 2, 0, 0, 5, 1, 4, 0, 0, 1, 43, 0, 0, 0, 2, 3, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 1, 0, 15, 0, 0, 0, 1, 6, 1, 0, 0, 1, 0, 2, 0, 0, 0, 9, 0, 1, 1, 0, 0, 0, 3, 0, 1, 0, 28, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 0, 18, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 16, 36, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 1, 1, 1, 1, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0, 0, 2, 2, 0, 11, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3, 0, 0, 4, 0, 0, 0, 18, 0, 0, 0, 1, 4, 1, 4, 1, 0, 3, 2, 2, 2, 1, 0, 0, 1, 8, 0, 0, 0, 4, 12, 0, 2, 0, 3, 0, 1, 0, 2, 0, 1, 2, 0, 0, 3, 0, 1, 1, 1, 2, 2, 4, 1, 6, 2, 4, 2, 4, 1, 4, 0, 6, 1, 3, 1, 2, 0, 2, 11, 1, 1, 1, 0, 1, 1, 0, 2, 0, 3, 3, 2, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 4, 3, 2, 2, 0, 6, 1, 0, 1, 1, 0, 2, 0, 4, 3, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 3, 0, 2, 0, 0, 0, 3, 0, 2, 1, 1, 3, 1, 0, 0, 0, 0, 0, 5, 2, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 35, 4, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 3, 0, 0, 1, 0, 0, 0, 0, 28, 0, 0, 0, 0, 1, 0, 3, 1, 4, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 7, 0, 0, 2, 2, 0, 11, 0, 0, 0, 0, 0, 1, 1, 3, 0, 0, 4, 0, 0, 0, 12, 1, 4, 1, 5, 2, 0, 3, 2, 2, 2, 1, 7, 0, 7, 17, 3, 0, 2, 0, 3, 0, 0, 1, 0, 2, 0, 53, 2, 1, 1, 1, 1, 1, 2, 3, 2, 2, 1, 34, 1, 1, 0, 3, 2, 0, 0, 0, 1, 2, 4, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 30, 47, 13, 9, 3, 0, 1, 28, 2, 0, 18, 16, 0, 6, 4, 2, 2, 0, 1, 1, 1, 2, 1, 2, 0, 0, 0, 4, 2, 2, 3, 3, 2, 1, 1, 0, 0, 0, 4, 2, 2, 3, 3, 2, 1, 1, 0, 0, 0, 33, 34, 0, 3, 2, 0, 0, 0, 1, 2, 4, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 30, 47, 13, 9, 3, 0, 1, 28, 2, 0, 18, 16, 0, } var _hcltok_range_lengths []byte = []byte{ 0, 0, 0, 0, 1, 1, 1, 5, 5, 5, 0, 0, 3, 0, 1, 1, 4, 2, 3, 0, 1, 0, 2, 2, 4, 2, 2, 3, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 4, 6, 9, 6, 8, 5, 8, 7, 10, 4, 6, 4, 7, 7, 5, 5, 4, 5, 1, 2, 8, 4, 3, 3, 3, 0, 3, 1, 2, 1, 2, 2, 3, 3, 1, 3, 2, 2, 1, 2, 2, 2, 3, 4, 4, 3, 1, 2, 1, 3, 2, 2, 2, 2, 2, 3, 3, 1, 1, 2, 1, 3, 2, 2, 3, 2, 7, 0, 1, 4, 1, 2, 4, 2, 1, 2, 0, 2, 2, 3, 5, 5, 1, 4, 1, 1, 2, 2, 1, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 4, 2, 2, 3, 1, 4, 4, 6, 1, 3, 1, 1, 2, 1, 1, 1, 5, 3, 1, 1, 1, 2, 3, 3, 1, 2, 2, 1, 4, 1, 2, 5, 2, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 4, 2, 1, 2, 2, 2, 6, 1, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 3, 2, 5, 2, 8, 6, 2, 2, 2, 2, 3, 1, 3, 1, 2, 1, 3, 2, 2, 3, 1, 1, 1, 1, 1, 1, 1, 2, 2, 4, 1, 2, 1, 0, 1, 1, 1, 1, 0, 1, 2, 3, 1, 3, 3, 1, 0, 3, 0, 2, 3, 1, 0, 0, 0, 0, 2, 2, 2, 2, 1, 5, 2, 2, 5, 7, 5, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 3, 3, 1, 1, 2, 1, 3, 5, 1, 1, 2, 2, 1, 1, 1, 1, 2, 6, 3, 7, 2, 6, 1, 6, 2, 8, 0, 4, 2, 5, 2, 3, 3, 3, 1, 2, 8, 2, 0, 2, 1, 2, 1, 5, 2, 1, 3, 3, 0, 2, 1, 2, 1, 0, 1, 1, 3, 1, 1, 2, 3, 0, 0, 3, 2, 4, 1, 4, 1, 1, 3, 1, 1, 1, 1, 2, 2, 1, 3, 1, 4, 3, 3, 1, 1, 5, 2, 1, 1, 2, 1, 2, 1, 3, 2, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 1, 1, 3, 2, 1, 0, 2, 1, 1, 1, 1, 0, 3, 0, 1, 1, 4, 2, 3, 0, 1, 0, 2, 2, 4, 2, 2, 3, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 4, 6, 9, 6, 8, 5, 8, 7, 10, 4, 6, 4, 7, 7, 5, 5, 4, 5, 1, 2, 8, 4, 3, 3, 3, 0, 3, 1, 2, 1, 2, 2, 3, 3, 1, 3, 2, 2, 1, 2, 2, 2, 3, 4, 4, 3, 1, 2, 1, 3, 2, 2, 2, 2, 2, 3, 3, 1, 1, 2, 1, 3, 2, 2, 3, 2, 7, 0, 1, 4, 1, 2, 4, 2, 1, 2, 0, 2, 2, 3, 5, 5, 1, 4, 1, 1, 2, 2, 1, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 4, 2, 2, 3, 1, 4, 4, 6, 1, 3, 1, 1, 2, 1, 1, 1, 5, 3, 1, 1, 1, 2, 3, 3, 1, 2, 2, 1, 4, 1, 2, 5, 2, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 4, 2, 1, 2, 2, 2, 6, 1, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 3, 2, 5, 2, 8, 6, 2, 2, 2, 2, 3, 1, 3, 1, 2, 1, 3, 2, 2, 3, 1, 1, 1, 1, 1, 1, 1, 2, 2, 4, 1, 2, 1, 0, 1, 1, 1, 1, 0, 1, 2, 3, 1, 3, 3, 1, 0, 3, 0, 2, 3, 1, 0, 0, 0, 0, 2, 2, 2, 2, 1, 5, 2, 2, 5, 7, 5, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 2, 2, 3, 3, 4, 7, 5, 7, 5, 3, 3, 7, 3, 13, 1, 3, 5, 3, 5, 3, 6, 5, 2, 2, 8, 4, 1, 2, 3, 2, 10, 2, 2, 0, 2, 3, 3, 1, 2, 3, 3, 1, 2, 3, 3, 4, 4, 2, 1, 2, 2, 3, 2, 2, 5, 3, 2, 3, 2, 1, 3, 3, 6, 2, 2, 5, 2, 5, 1, 1, 2, 4, 1, 11, 1, 3, 8, 4, 2, 1, 0, 4, 3, 3, 3, 2, 9, 1, 1, 4, 3, 2, 2, 2, 3, 4, 2, 3, 2, 4, 3, 2, 2, 3, 3, 4, 3, 3, 4, 2, 5, 4, 8, 7, 1, 2, 1, 3, 1, 2, 5, 1, 2, 2, 2, 2, 1, 3, 2, 2, 3, 3, 1, 9, 1, 5, 1, 3, 2, 2, 3, 2, 3, 3, 3, 1, 3, 3, 2, 2, 4, 5, 3, 3, 4, 3, 3, 3, 2, 2, 2, 4, 2, 2, 1, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 2, 3, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 5, 3, 3, 1, 2, 3, 2, 2, 1, 2, 3, 4, 3, 0, 3, 0, 2, 3, 1, 0, 0, 0, 0, 2, 3, 2, 4, 6, 4, 1, 1, 2, 1, 2, 1, 3, 2, 3, 2, 5, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 3, 0, 1, 1, 4, 2, 3, 0, 1, 0, 2, 2, 4, 2, 2, 3, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 4, 6, 9, 6, 8, 5, 8, 7, 10, 4, 6, 4, 7, 7, 5, 5, 4, 5, 1, 2, 8, 4, 3, 3, 3, 0, 3, 1, 2, 1, 2, 2, 3, 3, 1, 3, 2, 2, 1, 2, 2, 2, 3, 4, 4, 3, 1, 2, 1, 3, 2, 2, 2, 2, 2, 3, 3, 1, 1, 2, 1, 3, 2, 2, 3, 2, 7, 0, 1, 4, 1, 2, 4, 2, 1, 2, 0, 2, 2, 3, 5, 5, 1, 4, 1, 1, 2, 2, 1, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 4, 2, 2, 3, 1, 4, 4, 6, 1, 3, 1, 1, 2, 1, 1, 1, 5, 3, 1, 1, 1, 2, 3, 3, 1, 2, 2, 1, 4, 1, 2, 5, 2, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 4, 2, 1, 2, 2, 2, 6, 1, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 3, 2, 5, 2, 8, 6, 2, 2, 2, 2, 3, 1, 3, 1, 2, 1, 3, 2, 2, 3, 1, 1, 1, 1, 1, 1, 1, 2, 2, 4, 1, 2, 1, 0, 1, 1, 1, 1, 0, 1, 2, 3, 1, 3, 3, 1, 0, 3, 0, 2, 3, 1, 0, 0, 0, 0, 2, 2, 2, 2, 1, 5, 2, 2, 5, 7, 5, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 2, 2, 3, 3, 4, 7, 5, 7, 5, 3, 3, 7, 3, 13, 1, 3, 5, 3, 5, 3, 6, 5, 2, 2, 8, 4, 1, 2, 3, 2, 10, 2, 2, 0, 2, 3, 3, 1, 2, 3, 3, 1, 2, 3, 3, 4, 4, 2, 1, 2, 2, 3, 2, 2, 5, 3, 2, 3, 2, 1, 3, 3, 6, 2, 2, 5, 2, 5, 1, 1, 2, 4, 1, 11, 1, 3, 8, 4, 2, 1, 0, 4, 3, 3, 3, 2, 9, 1, 1, 4, 3, 2, 2, 2, 3, 4, 2, 3, 2, 4, 3, 2, 2, 3, 3, 4, 3, 3, 4, 2, 5, 4, 8, 7, 1, 2, 1, 3, 1, 2, 5, 1, 2, 2, 2, 2, 1, 3, 2, 2, 3, 3, 1, 9, 1, 5, 1, 3, 2, 2, 3, 2, 3, 3, 3, 1, 3, 3, 2, 2, 4, 5, 3, 3, 4, 3, 3, 3, 2, 2, 2, 4, 2, 2, 1, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 2, 3, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 5, 3, 3, 1, 2, 3, 2, 2, 1, 2, 3, 4, 3, 0, 3, 0, 2, 3, 1, 0, 0, 0, 0, 2, 3, 2, 4, 6, 4, 1, 1, 2, 1, 2, 1, 3, 2, 3, 2, 11, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 5, 0, 0, 1, 1, 1, 0, 1, 1, 5, 4, 2, 0, 1, 0, 2, 2, 5, 2, 3, 5, 3, 2, 3, 5, 1, 1, 1, 3, 1, 1, 2, 2, 3, 1, 2, 3, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, 1, 1, 5, 6, 0, 0, 0, 0, 0, 0, 1, 1, 1, 5, 6, 0, 0, 0, 0, 0, 0, 1, 1, 1, 8, 5, 1, 1, 1, 0, 1, 1, 5, 4, 2, 0, 1, 0, 2, 2, 5, 2, 3, 5, 3, 2, 3, 5, 1, 1, 1, 3, 1, 1, 2, 2, 3, 1, 2, 3, 1, } var _hcltok_index_offsets []int16 = []int16{ 0, 0, 2, 4, 7, 12, 16, 18, 56, 93, 135, 137, 142, 146, 147, 149, 151, 157, 162, 167, 169, 172, 174, 177, 181, 187, 190, 193, 199, 201, 203, 205, 208, 241, 243, 245, 248, 251, 254, 262, 270, 281, 289, 298, 306, 315, 324, 336, 343, 350, 358, 366, 375, 381, 389, 395, 403, 405, 408, 422, 428, 436, 440, 444, 446, 493, 495, 498, 500, 505, 511, 517, 522, 525, 529, 532, 535, 537, 540, 543, 546, 550, 555, 560, 564, 566, 569, 571, 575, 578, 581, 584, 587, 591, 596, 600, 602, 604, 607, 609, 613, 616, 619, 627, 631, 639, 655, 657, 662, 664, 668, 679, 683, 685, 688, 690, 693, 698, 702, 708, 714, 725, 730, 733, 736, 739, 742, 744, 748, 749, 752, 754, 784, 786, 788, 791, 795, 798, 802, 804, 806, 808, 814, 817, 820, 824, 826, 831, 836, 843, 846, 850, 854, 856, 859, 879, 881, 883, 890, 894, 896, 898, 900, 903, 907, 911, 913, 917, 920, 922, 927, 945, 984, 990, 993, 995, 997, 999, 1002, 1005, 1008, 1011, 1014, 1018, 1021, 1024, 1027, 1029, 1031, 1034, 1041, 1044, 1046, 1049, 1052, 1055, 1063, 1065, 1067, 1070, 1072, 1075, 1077, 1079, 1109, 1112, 1115, 1118, 1121, 1126, 1130, 1137, 1140, 1149, 1158, 1161, 1165, 1168, 1171, 1175, 1177, 1181, 1183, 1186, 1188, 1192, 1196, 1200, 1208, 1210, 1212, 1216, 1220, 1222, 1235, 1237, 1240, 1243, 1248, 1250, 1253, 1255, 1257, 1260, 1265, 1267, 1269, 1274, 1276, 1279, 1283, 1303, 1307, 1311, 1313, 1315, 1323, 1325, 1332, 1337, 1339, 1343, 1346, 1349, 1352, 1356, 1359, 1362, 1366, 1376, 1382, 1385, 1388, 1398, 1418, 1424, 1427, 1429, 1433, 1435, 1438, 1440, 1444, 1446, 1448, 1452, 1454, 1458, 1463, 1469, 1471, 1473, 1476, 1478, 1482, 1489, 1492, 1494, 1497, 1501, 1531, 1536, 1538, 1541, 1545, 1554, 1559, 1567, 1571, 1579, 1583, 1591, 1595, 1606, 1608, 1614, 1617, 1625, 1629, 1634, 1639, 1644, 1646, 1649, 1664, 1668, 1670, 1673, 1675, 1724, 1727, 1734, 1737, 1739, 1743, 1747, 1750, 1754, 1756, 1759, 1761, 1763, 1765, 1767, 1771, 1773, 1775, 1778, 1782, 1796, 1799, 1803, 1806, 1811, 1822, 1827, 1830, 1860, 1864, 1867, 1872, 1874, 1878, 1881, 1884, 1886, 1891, 1893, 1899, 1904, 1910, 1912, 1932, 1940, 1943, 1945, 1963, 2001, 2003, 2006, 2008, 2013, 2016, 2045, 2047, 2049, 2051, 2053, 2056, 2058, 2062, 2065, 2067, 2070, 2072, 2074, 2077, 2079, 2081, 2083, 2085, 2087, 2090, 2093, 2096, 2109, 2111, 2115, 2118, 2120, 2125, 2128, 2142, 2145, 2154, 2156, 2161, 2165, 2166, 2168, 2170, 2176, 2181, 2186, 2188, 2191, 2193, 2196, 2200, 2206, 2209, 2212, 2218, 2220, 2222, 2224, 2227, 2260, 2262, 2264, 2267, 2270, 2273, 2281, 2289, 2300, 2308, 2317, 2325, 2334, 2343, 2355, 2362, 2369, 2377, 2385, 2394, 2400, 2408, 2414, 2422, 2424, 2427, 2441, 2447, 2455, 2459, 2463, 2465, 2512, 2514, 2517, 2519, 2524, 2530, 2536, 2541, 2544, 2548, 2551, 2554, 2556, 2559, 2562, 2565, 2569, 2574, 2579, 2583, 2585, 2588, 2590, 2594, 2597, 2600, 2603, 2606, 2610, 2615, 2619, 2621, 2623, 2626, 2628, 2632, 2635, 2638, 2646, 2650, 2658, 2674, 2676, 2681, 2683, 2687, 2698, 2702, 2704, 2707, 2709, 2712, 2717, 2721, 2727, 2733, 2744, 2749, 2752, 2755, 2758, 2761, 2763, 2767, 2768, 2771, 2773, 2803, 2805, 2807, 2810, 2814, 2817, 2821, 2823, 2825, 2827, 2833, 2836, 2839, 2843, 2845, 2850, 2855, 2862, 2865, 2869, 2873, 2875, 2878, 2898, 2900, 2902, 2909, 2913, 2915, 2917, 2919, 2922, 2926, 2930, 2932, 2936, 2939, 2941, 2946, 2964, 3003, 3009, 3012, 3014, 3016, 3018, 3021, 3024, 3027, 3030, 3033, 3037, 3040, 3043, 3046, 3048, 3050, 3053, 3060, 3063, 3065, 3068, 3071, 3074, 3082, 3084, 3086, 3089, 3091, 3094, 3096, 3098, 3128, 3131, 3134, 3137, 3140, 3145, 3149, 3156, 3159, 3168, 3177, 3180, 3184, 3187, 3190, 3194, 3196, 3200, 3202, 3205, 3207, 3211, 3215, 3219, 3227, 3229, 3231, 3235, 3239, 3241, 3254, 3256, 3259, 3262, 3267, 3269, 3272, 3274, 3276, 3279, 3284, 3286, 3288, 3293, 3295, 3298, 3302, 3322, 3326, 3330, 3332, 3334, 3342, 3344, 3351, 3356, 3358, 3362, 3365, 3368, 3371, 3375, 3378, 3381, 3385, 3395, 3401, 3404, 3407, 3417, 3437, 3443, 3446, 3448, 3452, 3454, 3457, 3459, 3463, 3465, 3467, 3471, 3473, 3475, 3481, 3484, 3489, 3494, 3500, 3510, 3518, 3530, 3537, 3547, 3553, 3565, 3571, 3589, 3592, 3600, 3606, 3616, 3623, 3630, 3638, 3646, 3649, 3654, 3674, 3680, 3683, 3687, 3691, 3695, 3707, 3710, 3715, 3716, 3722, 3729, 3735, 3738, 3741, 3745, 3749, 3752, 3755, 3760, 3764, 3770, 3776, 3779, 3783, 3786, 3789, 3794, 3797, 3800, 3806, 3810, 3813, 3817, 3820, 3823, 3827, 3831, 3838, 3841, 3844, 3850, 3853, 3860, 3862, 3864, 3867, 3876, 3881, 3895, 3899, 3903, 3918, 3924, 3927, 3930, 3932, 3937, 3943, 3947, 3955, 3961, 3971, 3974, 3977, 3982, 3986, 3989, 3992, 3995, 3999, 4004, 4008, 4012, 4015, 4020, 4025, 4028, 4034, 4038, 4044, 4049, 4053, 4057, 4065, 4068, 4076, 4082, 4092, 4103, 4106, 4109, 4111, 4115, 4117, 4120, 4131, 4135, 4138, 4141, 4144, 4147, 4149, 4153, 4157, 4160, 4164, 4169, 4172, 4182, 4184, 4225, 4231, 4235, 4238, 4241, 4245, 4248, 4252, 4256, 4261, 4263, 4267, 4271, 4274, 4277, 4282, 4291, 4295, 4300, 4305, 4309, 4316, 4320, 4323, 4327, 4330, 4335, 4338, 4341, 4371, 4375, 4379, 4383, 4387, 4392, 4396, 4402, 4406, 4414, 4417, 4422, 4426, 4429, 4434, 4437, 4441, 4444, 4447, 4450, 4453, 4456, 4460, 4464, 4467, 4477, 4480, 4483, 4488, 4494, 4497, 4512, 4515, 4519, 4525, 4529, 4533, 4536, 4540, 4547, 4550, 4553, 4559, 4562, 4566, 4571, 4587, 4589, 4597, 4599, 4607, 4613, 4615, 4619, 4622, 4625, 4628, 4632, 4643, 4646, 4658, 4682, 4690, 4692, 4696, 4699, 4704, 4707, 4709, 4714, 4717, 4723, 4726, 4734, 4736, 4738, 4740, 4742, 4744, 4746, 4748, 4750, 4752, 4755, 4758, 4760, 4762, 4764, 4766, 4769, 4772, 4777, 4781, 4782, 4784, 4786, 4792, 4797, 4802, 4804, 4807, 4809, 4812, 4816, 4822, 4825, 4828, 4834, 4836, 4838, 4840, 4843, 4876, 4878, 4880, 4883, 4886, 4889, 4897, 4905, 4916, 4924, 4933, 4941, 4950, 4959, 4971, 4978, 4985, 4993, 5001, 5010, 5016, 5024, 5030, 5038, 5040, 5043, 5057, 5063, 5071, 5075, 5079, 5081, 5128, 5130, 5133, 5135, 5140, 5146, 5152, 5157, 5160, 5164, 5167, 5170, 5172, 5175, 5178, 5181, 5185, 5190, 5195, 5199, 5201, 5204, 5206, 5210, 5213, 5216, 5219, 5222, 5226, 5231, 5235, 5237, 5239, 5242, 5244, 5248, 5251, 5254, 5262, 5266, 5274, 5290, 5292, 5297, 5299, 5303, 5314, 5318, 5320, 5323, 5325, 5328, 5333, 5337, 5343, 5349, 5360, 5365, 5368, 5371, 5374, 5377, 5379, 5383, 5384, 5387, 5389, 5419, 5421, 5423, 5426, 5430, 5433, 5437, 5439, 5441, 5443, 5449, 5452, 5455, 5459, 5461, 5466, 5471, 5478, 5481, 5485, 5489, 5491, 5494, 5514, 5516, 5518, 5525, 5529, 5531, 5533, 5535, 5538, 5542, 5546, 5548, 5552, 5555, 5557, 5562, 5580, 5619, 5625, 5628, 5630, 5632, 5634, 5637, 5640, 5643, 5646, 5649, 5653, 5656, 5659, 5662, 5664, 5666, 5669, 5676, 5679, 5681, 5684, 5687, 5690, 5698, 5700, 5702, 5705, 5707, 5710, 5712, 5714, 5744, 5747, 5750, 5753, 5756, 5761, 5765, 5772, 5775, 5784, 5793, 5796, 5800, 5803, 5806, 5810, 5812, 5816, 5818, 5821, 5823, 5827, 5831, 5835, 5843, 5845, 5847, 5851, 5855, 5857, 5870, 5872, 5875, 5878, 5883, 5885, 5888, 5890, 5892, 5895, 5900, 5902, 5904, 5909, 5911, 5914, 5918, 5938, 5942, 5946, 5948, 5950, 5958, 5960, 5967, 5972, 5974, 5978, 5981, 5984, 5987, 5991, 5994, 5997, 6001, 6011, 6017, 6020, 6023, 6033, 6053, 6059, 6062, 6064, 6068, 6070, 6073, 6075, 6079, 6081, 6083, 6087, 6089, 6091, 6097, 6100, 6105, 6110, 6116, 6126, 6134, 6146, 6153, 6163, 6169, 6181, 6187, 6205, 6208, 6216, 6222, 6232, 6239, 6246, 6254, 6262, 6265, 6270, 6290, 6296, 6299, 6303, 6307, 6311, 6323, 6326, 6331, 6332, 6338, 6345, 6351, 6354, 6357, 6361, 6365, 6368, 6371, 6376, 6380, 6386, 6392, 6395, 6399, 6402, 6405, 6410, 6413, 6416, 6422, 6426, 6429, 6433, 6436, 6439, 6443, 6447, 6454, 6457, 6460, 6466, 6469, 6476, 6478, 6480, 6483, 6492, 6497, 6511, 6515, 6519, 6534, 6540, 6543, 6546, 6548, 6553, 6559, 6563, 6571, 6577, 6587, 6590, 6593, 6598, 6602, 6605, 6608, 6611, 6615, 6620, 6624, 6628, 6631, 6636, 6641, 6644, 6650, 6654, 6660, 6665, 6669, 6673, 6681, 6684, 6692, 6698, 6708, 6719, 6722, 6725, 6727, 6731, 6733, 6736, 6747, 6751, 6754, 6757, 6760, 6763, 6765, 6769, 6773, 6776, 6780, 6785, 6788, 6798, 6800, 6841, 6847, 6851, 6854, 6857, 6861, 6864, 6868, 6872, 6877, 6879, 6883, 6887, 6890, 6893, 6898, 6907, 6911, 6916, 6921, 6925, 6932, 6936, 6939, 6943, 6946, 6951, 6954, 6957, 6987, 6991, 6995, 6999, 7003, 7008, 7012, 7018, 7022, 7030, 7033, 7038, 7042, 7045, 7050, 7053, 7057, 7060, 7063, 7066, 7069, 7072, 7076, 7080, 7083, 7093, 7096, 7099, 7104, 7110, 7113, 7128, 7131, 7135, 7141, 7145, 7149, 7152, 7156, 7163, 7166, 7169, 7175, 7178, 7182, 7187, 7203, 7205, 7213, 7215, 7223, 7229, 7231, 7235, 7238, 7241, 7244, 7248, 7259, 7262, 7274, 7298, 7306, 7308, 7312, 7315, 7320, 7323, 7325, 7330, 7333, 7339, 7342, 7407, 7410, 7412, 7414, 7416, 7418, 7420, 7423, 7428, 7431, 7434, 7436, 7476, 7478, 7480, 7482, 7487, 7491, 7492, 7494, 7496, 7503, 7510, 7517, 7519, 7521, 7523, 7526, 7529, 7535, 7538, 7543, 7550, 7555, 7558, 7562, 7569, 7601, 7650, 7665, 7678, 7683, 7685, 7689, 7720, 7726, 7728, 7749, 7769, 7771, 7783, 7794, 7797, 7800, 7801, 7803, 7805, 7807, 7810, 7812, 7820, 7822, 7824, 7826, 7836, 7845, 7848, 7852, 7856, 7859, 7861, 7863, 7865, 7867, 7869, 7879, 7888, 7891, 7895, 7899, 7902, 7904, 7906, 7908, 7910, 7912, 7954, 7994, 7996, 8001, 8005, 8006, 8008, 8010, 8017, 8024, 8031, 8033, 8035, 8037, 8040, 8043, 8049, 8052, 8057, 8064, 8069, 8072, 8076, 8083, 8115, 8164, 8179, 8192, 8197, 8199, 8203, 8234, 8240, 8242, 8263, 8283, } var _hcltok_indicies []int16 = []int16{ 1, 0, 3, 2, 3, 4, 2, 6, 8, 8, 7, 5, 9, 9, 7, 5, 7, 5, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 40, 41, 42, 43, 11, 11, 14, 14, 38, 0, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 40, 41, 42, 43, 11, 11, 14, 14, 38, 0, 44, 45, 11, 11, 46, 13, 15, 16, 17, 16, 47, 48, 20, 49, 22, 23, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 37, 39, 63, 41, 64, 65, 66, 11, 11, 11, 14, 38, 0, 44, 0, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 0, 11, 11, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 11, 11, 11, 11, 11, 0, 0, 11, 0, 0, 11, 0, 11, 0, 0, 11, 0, 0, 0, 11, 11, 11, 11, 11, 11, 0, 11, 11, 0, 11, 11, 0, 0, 0, 0, 0, 0, 11, 11, 0, 0, 11, 0, 11, 11, 11, 0, 67, 68, 69, 70, 14, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 0, 11, 0, 11, 0, 11, 11, 0, 11, 11, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 0, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 0, 11, 0, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 16, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 14, 15, 133, 134, 135, 136, 137, 14, 16, 14, 0, 11, 0, 11, 11, 0, 0, 11, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 0, 0, 0, 11, 0, 0, 0, 11, 11, 11, 0, 0, 0, 11, 11, 0, 0, 0, 11, 11, 11, 0, 0, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 0, 0, 0, 0, 0, 11, 11, 11, 11, 0, 0, 11, 11, 11, 0, 0, 11, 11, 11, 11, 0, 11, 11, 0, 11, 11, 0, 0, 0, 11, 11, 11, 0, 0, 0, 0, 11, 11, 11, 11, 11, 0, 0, 0, 0, 11, 0, 11, 11, 0, 11, 11, 0, 11, 0, 11, 11, 11, 0, 11, 11, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 138, 139, 140, 141, 142, 143, 144, 145, 146, 14, 147, 148, 149, 150, 151, 0, 11, 0, 0, 0, 0, 0, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 0, 11, 11, 11, 0, 0, 11, 0, 0, 11, 11, 11, 11, 11, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 0, 152, 109, 153, 154, 155, 14, 156, 157, 16, 14, 0, 11, 11, 11, 11, 0, 0, 0, 11, 0, 0, 11, 11, 11, 0, 0, 0, 11, 11, 0, 119, 0, 16, 14, 14, 158, 0, 14, 0, 11, 16, 159, 160, 16, 161, 162, 16, 57, 163, 164, 165, 166, 167, 16, 168, 169, 170, 16, 171, 172, 173, 15, 174, 175, 176, 15, 177, 16, 14, 0, 0, 11, 11, 0, 0, 0, 11, 11, 11, 11, 0, 11, 11, 0, 0, 0, 0, 11, 11, 0, 0, 11, 11, 0, 0, 0, 0, 0, 0, 11, 11, 11, 0, 0, 0, 11, 0, 0, 0, 11, 11, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 0, 0, 0, 11, 11, 11, 11, 0, 178, 179, 0, 14, 0, 11, 0, 0, 11, 16, 180, 181, 182, 183, 57, 184, 185, 55, 186, 187, 188, 189, 190, 191, 192, 193, 194, 14, 0, 0, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 0, 0, 11, 0, 11, 0, 0, 11, 11, 11, 11, 0, 11, 11, 11, 0, 0, 11, 11, 11, 11, 0, 11, 11, 0, 0, 11, 11, 11, 11, 11, 0, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 201, 206, 207, 208, 209, 38, 0, 210, 211, 16, 212, 213, 214, 215, 216, 217, 218, 219, 220, 16, 14, 221, 222, 223, 224, 16, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 16, 144, 14, 240, 0, 11, 11, 11, 11, 11, 0, 0, 0, 11, 0, 11, 11, 0, 11, 0, 11, 11, 0, 0, 0, 11, 11, 11, 0, 0, 0, 11, 11, 11, 0, 0, 0, 0, 11, 0, 0, 11, 0, 0, 11, 11, 11, 0, 0, 11, 0, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 0, 0, 0, 11, 11, 0, 11, 11, 0, 11, 11, 0, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 0, 11, 0, 11, 11, 0, 11, 0, 11, 11, 0, 11, 0, 11, 0, 241, 212, 242, 243, 244, 245, 246, 247, 248, 249, 250, 98, 251, 16, 252, 253, 254, 16, 255, 129, 256, 257, 258, 259, 260, 261, 262, 263, 16, 0, 0, 0, 11, 11, 11, 0, 11, 11, 0, 11, 11, 0, 0, 0, 0, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 0, 0, 0, 0, 11, 11, 11, 0, 0, 0, 11, 0, 0, 0, 11, 11, 0, 11, 11, 11, 0, 11, 0, 0, 0, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 0, 0, 0, 0, 11, 16, 181, 264, 265, 14, 16, 14, 0, 0, 11, 0, 11, 16, 264, 14, 0, 16, 266, 14, 0, 0, 11, 16, 267, 268, 269, 172, 270, 271, 16, 272, 273, 274, 14, 0, 0, 11, 11, 11, 0, 11, 11, 0, 11, 11, 11, 11, 0, 0, 11, 0, 0, 11, 11, 0, 11, 0, 16, 14, 0, 275, 16, 276, 0, 14, 0, 11, 0, 11, 277, 16, 278, 279, 0, 11, 0, 0, 0, 11, 11, 11, 11, 0, 280, 281, 282, 16, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 14, 0, 11, 11, 11, 0, 0, 0, 0, 11, 11, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 11, 0, 11, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 0, 0, 11, 0, 0, 0, 11, 0, 0, 11, 0, 0, 11, 0, 0, 11, 0, 0, 0, 11, 11, 11, 0, 0, 0, 11, 11, 11, 11, 0, 297, 16, 298, 16, 299, 300, 301, 302, 14, 0, 11, 11, 11, 11, 11, 0, 0, 0, 11, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 0, 303, 16, 14, 0, 11, 304, 16, 100, 14, 0, 11, 305, 0, 14, 0, 11, 16, 306, 14, 0, 0, 11, 307, 0, 16, 308, 14, 0, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 0, 0, 11, 0, 11, 11, 11, 0, 11, 0, 11, 11, 11, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 0, 11, 0, 0, 0, 11, 11, 11, 11, 0, 309, 310, 69, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 331, 332, 333, 334, 335, 336, 330, 0, 11, 11, 11, 11, 0, 11, 0, 11, 11, 0, 11, 11, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 0, 11, 11, 11, 11, 11, 0, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 0, 11, 11, 11, 11, 0, 11, 0, 11, 11, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 0, 11, 11, 0, 11, 0, 337, 338, 339, 101, 102, 103, 104, 105, 340, 107, 108, 109, 110, 111, 112, 341, 342, 167, 343, 258, 117, 344, 119, 229, 269, 122, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 131, 355, 16, 14, 15, 16, 134, 135, 136, 137, 14, 14, 0, 11, 11, 0, 11, 11, 11, 11, 11, 11, 0, 0, 0, 11, 0, 11, 11, 11, 11, 0, 11, 11, 11, 0, 11, 11, 0, 11, 11, 11, 0, 0, 11, 11, 11, 0, 0, 11, 11, 0, 11, 0, 11, 0, 11, 11, 11, 0, 0, 11, 11, 0, 11, 11, 0, 11, 11, 11, 0, 356, 140, 142, 143, 144, 145, 146, 14, 357, 148, 358, 150, 359, 0, 11, 11, 0, 0, 0, 0, 11, 0, 0, 11, 11, 11, 11, 11, 0, 360, 109, 361, 154, 155, 14, 156, 157, 16, 14, 0, 11, 11, 11, 11, 0, 0, 0, 11, 16, 159, 160, 16, 362, 363, 219, 308, 163, 164, 165, 364, 167, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 175, 176, 15, 375, 16, 14, 0, 0, 0, 0, 11, 11, 11, 0, 0, 0, 0, 0, 11, 11, 0, 11, 11, 11, 0, 11, 11, 0, 0, 0, 11, 11, 0, 11, 11, 11, 11, 0, 11, 0, 11, 11, 11, 11, 11, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 0, 11, 0, 16, 180, 181, 376, 183, 57, 184, 185, 55, 186, 187, 377, 14, 190, 378, 192, 193, 194, 14, 0, 11, 11, 11, 11, 11, 11, 11, 0, 11, 11, 0, 11, 0, 379, 380, 197, 198, 199, 381, 201, 202, 382, 383, 384, 201, 206, 207, 208, 209, 38, 0, 210, 211, 16, 212, 213, 215, 385, 217, 386, 219, 220, 16, 14, 387, 222, 223, 224, 16, 225, 226, 227, 228, 229, 230, 231, 232, 388, 234, 235, 389, 237, 238, 239, 16, 144, 14, 240, 0, 0, 11, 0, 0, 11, 0, 11, 11, 11, 11, 11, 0, 11, 11, 0, 390, 391, 392, 393, 394, 395, 396, 397, 247, 398, 319, 399, 213, 400, 401, 402, 403, 404, 401, 405, 406, 407, 258, 408, 260, 409, 410, 271, 0, 11, 0, 11, 0, 11, 0, 11, 0, 11, 11, 0, 11, 0, 11, 11, 11, 0, 11, 11, 0, 0, 11, 11, 11, 0, 11, 0, 11, 0, 11, 11, 0, 11, 0, 11, 0, 11, 0, 11, 0, 11, 0, 0, 0, 11, 11, 11, 0, 11, 11, 0, 16, 267, 229, 411, 401, 412, 271, 16, 413, 414, 274, 14, 0, 11, 0, 11, 11, 11, 0, 0, 0, 11, 11, 0, 277, 16, 278, 415, 0, 11, 11, 0, 16, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 416, 14, 0, 0, 0, 11, 16, 417, 16, 265, 300, 301, 302, 14, 0, 0, 11, 419, 419, 419, 419, 418, 419, 419, 419, 418, 419, 418, 419, 419, 418, 418, 418, 418, 418, 418, 419, 418, 418, 418, 418, 419, 419, 419, 419, 419, 418, 418, 419, 418, 418, 419, 418, 419, 418, 418, 419, 418, 418, 418, 419, 419, 419, 419, 419, 419, 418, 419, 419, 418, 419, 419, 418, 418, 418, 418, 418, 418, 419, 419, 418, 418, 419, 418, 419, 419, 419, 418, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 418, 419, 418, 419, 418, 419, 419, 418, 419, 419, 418, 418, 418, 419, 418, 418, 418, 418, 418, 418, 418, 419, 418, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 418, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 418, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 418, 418, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 418, 418, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 418, 419, 418, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 418, 419, 419, 419, 418, 419, 418, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 425, 489, 490, 491, 492, 493, 494, 425, 470, 425, 418, 419, 418, 419, 419, 418, 418, 419, 418, 418, 418, 418, 419, 418, 418, 418, 418, 418, 419, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 418, 418, 418, 419, 418, 418, 418, 419, 419, 419, 418, 418, 418, 419, 419, 418, 418, 418, 419, 419, 419, 418, 418, 418, 419, 419, 419, 419, 418, 419, 419, 419, 419, 418, 418, 418, 418, 418, 419, 419, 419, 419, 418, 418, 419, 419, 419, 418, 418, 419, 419, 419, 419, 418, 419, 419, 418, 419, 419, 418, 418, 418, 419, 419, 419, 418, 418, 418, 418, 419, 419, 419, 419, 419, 418, 418, 418, 418, 419, 418, 419, 419, 418, 419, 419, 418, 419, 418, 419, 419, 419, 418, 419, 419, 418, 418, 418, 419, 418, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 418, 495, 496, 497, 498, 499, 500, 501, 502, 503, 425, 504, 505, 506, 507, 508, 418, 419, 418, 418, 418, 418, 418, 419, 419, 418, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 418, 418, 419, 419, 419, 418, 418, 419, 418, 418, 419, 419, 419, 419, 419, 418, 418, 418, 418, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 418, 509, 464, 510, 511, 512, 425, 513, 514, 470, 425, 418, 419, 419, 419, 419, 418, 418, 418, 419, 418, 418, 419, 419, 419, 418, 418, 418, 419, 419, 418, 475, 418, 470, 425, 425, 515, 418, 425, 418, 419, 470, 516, 517, 470, 518, 519, 470, 520, 521, 522, 523, 524, 525, 470, 526, 527, 528, 470, 529, 530, 531, 489, 532, 533, 534, 489, 535, 470, 425, 418, 418, 419, 419, 418, 418, 418, 419, 419, 419, 419, 418, 419, 419, 418, 418, 418, 418, 419, 419, 418, 418, 419, 419, 418, 418, 418, 418, 418, 418, 419, 419, 419, 418, 418, 418, 419, 418, 418, 418, 419, 419, 418, 419, 419, 419, 419, 418, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 418, 418, 418, 419, 419, 419, 419, 418, 536, 537, 418, 425, 418, 419, 418, 418, 419, 470, 538, 539, 540, 541, 520, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 425, 418, 418, 419, 418, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 418, 419, 418, 418, 419, 418, 419, 418, 418, 419, 419, 419, 419, 418, 419, 419, 419, 418, 418, 419, 419, 419, 419, 418, 419, 419, 418, 418, 419, 419, 419, 419, 419, 418, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 560, 566, 567, 568, 569, 565, 418, 570, 571, 470, 572, 573, 574, 575, 576, 577, 578, 579, 580, 470, 425, 581, 582, 583, 584, 470, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 470, 501, 425, 600, 418, 419, 419, 419, 419, 419, 418, 418, 418, 419, 418, 419, 419, 418, 419, 418, 419, 419, 418, 418, 418, 419, 419, 419, 418, 418, 418, 419, 419, 419, 418, 418, 418, 418, 419, 418, 418, 419, 418, 418, 419, 419, 419, 418, 418, 419, 418, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 418, 418, 418, 419, 419, 418, 419, 419, 418, 419, 419, 418, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 418, 419, 418, 419, 418, 419, 419, 418, 419, 418, 419, 419, 418, 419, 418, 419, 418, 601, 572, 602, 603, 604, 605, 606, 607, 608, 609, 610, 453, 611, 470, 612, 613, 614, 470, 615, 485, 616, 617, 618, 619, 620, 621, 622, 623, 470, 418, 418, 418, 419, 419, 419, 418, 419, 419, 418, 419, 419, 418, 418, 418, 418, 418, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 418, 418, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 418, 418, 418, 418, 419, 419, 419, 418, 418, 418, 419, 418, 418, 418, 419, 419, 418, 419, 419, 419, 418, 419, 418, 418, 418, 419, 419, 418, 419, 419, 419, 418, 419, 419, 419, 418, 418, 418, 418, 419, 470, 539, 624, 625, 425, 470, 425, 418, 418, 419, 418, 419, 470, 624, 425, 418, 470, 626, 425, 418, 418, 419, 470, 627, 628, 629, 530, 630, 631, 470, 632, 633, 634, 425, 418, 418, 419, 419, 419, 418, 419, 419, 418, 419, 419, 419, 419, 418, 418, 419, 418, 418, 419, 419, 418, 419, 418, 470, 425, 418, 635, 470, 636, 418, 425, 418, 419, 418, 419, 637, 470, 638, 639, 418, 419, 418, 418, 418, 419, 419, 419, 419, 418, 640, 641, 642, 470, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 425, 418, 419, 419, 419, 418, 418, 418, 418, 419, 419, 418, 418, 419, 418, 418, 418, 418, 418, 418, 418, 419, 418, 419, 418, 418, 418, 418, 418, 418, 419, 419, 419, 419, 419, 418, 418, 419, 418, 418, 418, 419, 418, 418, 419, 418, 418, 419, 418, 418, 419, 418, 418, 418, 419, 419, 419, 418, 418, 418, 419, 419, 419, 419, 418, 657, 470, 658, 470, 659, 660, 661, 662, 425, 418, 419, 419, 419, 419, 419, 418, 418, 418, 419, 418, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 418, 419, 419, 419, 419, 419, 418, 663, 470, 425, 418, 419, 664, 470, 455, 425, 418, 419, 665, 418, 425, 418, 419, 470, 666, 425, 418, 418, 419, 667, 418, 470, 668, 425, 418, 418, 419, 670, 669, 419, 419, 419, 419, 670, 669, 419, 670, 669, 670, 670, 419, 670, 669, 419, 670, 419, 670, 669, 419, 670, 419, 670, 419, 669, 670, 670, 670, 670, 670, 670, 670, 670, 669, 419, 419, 670, 670, 419, 670, 419, 670, 669, 670, 670, 670, 670, 670, 419, 670, 419, 670, 419, 670, 669, 670, 670, 419, 670, 419, 670, 669, 670, 670, 670, 670, 670, 419, 670, 419, 670, 669, 419, 419, 670, 419, 670, 669, 670, 670, 670, 419, 670, 419, 670, 419, 670, 419, 670, 669, 670, 419, 670, 419, 670, 669, 419, 670, 670, 670, 670, 419, 670, 419, 670, 419, 670, 419, 670, 419, 670, 419, 670, 669, 419, 670, 669, 670, 670, 670, 419, 670, 419, 670, 669, 670, 419, 670, 419, 670, 669, 419, 670, 670, 670, 670, 419, 670, 419, 670, 669, 419, 670, 419, 670, 419, 670, 669, 670, 670, 419, 670, 419, 670, 669, 419, 670, 419, 670, 419, 670, 419, 669, 670, 670, 670, 419, 670, 419, 670, 669, 419, 670, 669, 670, 670, 419, 670, 669, 670, 670, 670, 419, 670, 670, 670, 670, 670, 670, 419, 419, 670, 419, 670, 419, 670, 419, 670, 669, 670, 419, 670, 419, 670, 669, 419, 670, 669, 670, 419, 670, 669, 670, 419, 670, 669, 419, 419, 670, 669, 419, 670, 419, 670, 419, 670, 419, 670, 419, 670, 419, 669, 670, 670, 419, 670, 670, 670, 670, 419, 419, 670, 670, 670, 670, 670, 419, 670, 670, 670, 670, 670, 669, 419, 670, 670, 419, 670, 419, 669, 670, 670, 419, 670, 669, 419, 419, 670, 419, 669, 670, 670, 669, 419, 670, 419, 669, 670, 669, 419, 670, 419, 670, 419, 669, 670, 670, 669, 419, 670, 419, 670, 419, 670, 669, 670, 419, 670, 419, 670, 669, 419, 670, 669, 419, 419, 670, 669, 670, 419, 669, 670, 669, 419, 670, 419, 670, 419, 669, 670, 669, 419, 419, 670, 669, 670, 419, 670, 419, 670, 669, 419, 670, 419, 669, 670, 669, 419, 419, 670, 419, 669, 670, 669, 419, 419, 670, 669, 670, 419, 670, 669, 670, 419, 670, 669, 670, 419, 670, 419, 670, 419, 669, 670, 669, 419, 419, 670, 669, 670, 419, 670, 419, 670, 669, 419, 670, 669, 670, 670, 419, 670, 419, 670, 669, 669, 419, 669, 419, 670, 670, 419, 670, 670, 670, 670, 670, 670, 670, 669, 419, 670, 670, 670, 419, 669, 670, 670, 670, 419, 670, 419, 670, 419, 670, 419, 670, 419, 670, 669, 419, 419, 670, 669, 670, 419, 670, 669, 419, 419, 670, 419, 419, 419, 670, 419, 670, 419, 670, 419, 670, 419, 669, 419, 670, 419, 670, 419, 669, 670, 669, 419, 670, 419, 669, 670, 419, 670, 670, 670, 669, 419, 670, 419, 419, 670, 419, 669, 670, 670, 669, 419, 670, 670, 670, 670, 419, 670, 419, 669, 670, 670, 670, 419, 670, 669, 670, 419, 670, 419, 670, 419, 670, 419, 670, 669, 670, 670, 419, 670, 669, 419, 670, 419, 670, 419, 669, 670, 670, 669, 419, 670, 419, 669, 670, 669, 419, 670, 669, 419, 670, 419, 670, 669, 670, 670, 670, 669, 419, 419, 419, 670, 669, 419, 670, 419, 669, 670, 669, 419, 670, 419, 670, 419, 669, 670, 670, 670, 669, 419, 670, 419, 669, 670, 670, 670, 670, 669, 419, 670, 419, 670, 669, 419, 419, 670, 419, 670, 669, 670, 419, 670, 419, 669, 670, 670, 669, 419, 670, 419, 670, 669, 419, 670, 670, 670, 419, 670, 419, 669, 419, 670, 669, 670, 419, 419, 670, 419, 670, 419, 669, 670, 670, 670, 670, 669, 419, 670, 419, 670, 419, 670, 419, 670, 419, 670, 669, 670, 670, 670, 419, 670, 419, 670, 419, 670, 419, 669, 670, 670, 419, 419, 670, 669, 670, 419, 670, 670, 669, 419, 670, 419, 670, 669, 419, 419, 670, 670, 670, 670, 419, 670, 419, 670, 419, 669, 670, 670, 419, 669, 670, 669, 419, 670, 419, 669, 670, 669, 419, 670, 419, 669, 670, 419, 670, 670, 669, 419, 670, 670, 419, 669, 670, 669, 419, 670, 419, 670, 669, 670, 419, 670, 419, 669, 670, 669, 419, 670, 419, 670, 419, 670, 419, 670, 419, 670, 669, 671, 669, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 674, 683, 684, 685, 686, 687, 674, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 674, 703, 671, 683, 671, 704, 671, 669, 670, 670, 670, 670, 419, 669, 670, 670, 669, 419, 670, 669, 419, 419, 670, 669, 419, 670, 419, 669, 670, 669, 419, 419, 670, 419, 669, 670, 670, 669, 419, 670, 670, 670, 669, 419, 670, 419, 670, 670, 669, 419, 419, 670, 419, 669, 670, 669, 419, 670, 669, 419, 419, 670, 419, 670, 669, 419, 670, 419, 419, 670, 419, 670, 419, 669, 670, 670, 669, 419, 670, 670, 419, 670, 669, 419, 670, 419, 670, 669, 419, 670, 419, 669, 419, 670, 670, 670, 419, 670, 669, 670, 419, 670, 669, 419, 670, 669, 670, 419, 670, 669, 419, 670, 669, 419, 670, 419, 670, 669, 419, 670, 669, 419, 670, 669, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 676, 717, 718, 719, 720, 721, 718, 722, 723, 724, 725, 726, 727, 728, 729, 730, 671, 669, 670, 419, 670, 669, 670, 419, 670, 669, 670, 419, 670, 669, 670, 419, 670, 669, 419, 670, 419, 670, 669, 670, 419, 670, 669, 670, 419, 419, 419, 670, 669, 670, 419, 670, 669, 670, 670, 670, 670, 419, 670, 419, 669, 670, 669, 419, 419, 670, 419, 670, 669, 670, 419, 670, 669, 419, 670, 669, 670, 670, 419, 670, 669, 419, 670, 669, 670, 419, 670, 669, 419, 670, 669, 419, 670, 669, 419, 670, 669, 670, 669, 419, 419, 670, 669, 670, 419, 670, 669, 419, 670, 419, 669, 670, 669, 419, 674, 731, 671, 674, 732, 674, 733, 683, 671, 669, 670, 669, 419, 670, 669, 419, 674, 732, 683, 671, 669, 674, 734, 671, 683, 671, 669, 670, 669, 419, 674, 735, 692, 736, 718, 737, 730, 674, 738, 739, 740, 671, 683, 671, 669, 670, 669, 419, 670, 419, 670, 669, 419, 670, 419, 670, 419, 669, 670, 670, 669, 419, 670, 419, 670, 669, 419, 670, 669, 674, 683, 425, 669, 741, 674, 742, 683, 671, 669, 425, 670, 669, 419, 670, 669, 419, 743, 674, 744, 745, 671, 669, 419, 670, 669, 670, 670, 669, 419, 419, 670, 419, 670, 669, 674, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 671, 683, 671, 669, 670, 419, 670, 670, 670, 670, 670, 670, 670, 419, 670, 419, 670, 670, 670, 670, 670, 670, 669, 419, 670, 670, 419, 670, 419, 669, 670, 419, 670, 670, 670, 419, 670, 670, 419, 670, 670, 419, 670, 670, 419, 670, 670, 669, 419, 674, 757, 674, 733, 758, 759, 760, 671, 683, 671, 669, 670, 669, 419, 670, 670, 670, 419, 670, 670, 670, 419, 670, 419, 670, 669, 419, 419, 419, 419, 670, 670, 419, 419, 419, 419, 419, 670, 670, 670, 670, 670, 670, 670, 419, 670, 419, 670, 419, 669, 670, 670, 670, 419, 670, 419, 670, 669, 683, 425, 761, 674, 683, 425, 670, 669, 419, 762, 674, 763, 683, 425, 670, 669, 419, 670, 419, 764, 683, 671, 669, 425, 670, 669, 419, 674, 765, 671, 683, 671, 669, 670, 669, 419, 766, 766, 766, 768, 769, 770, 766, 767, 767, 771, 768, 771, 769, 771, 767, 772, 773, 772, 775, 774, 776, 774, 777, 774, 779, 778, 781, 782, 780, 781, 783, 780, 785, 784, 786, 784, 787, 784, 789, 788, 791, 792, 790, 791, 793, 790, 795, 795, 795, 795, 794, 795, 795, 795, 794, 795, 794, 795, 795, 794, 794, 794, 794, 794, 794, 795, 794, 794, 794, 794, 795, 795, 795, 795, 795, 794, 794, 795, 794, 794, 795, 794, 795, 794, 794, 795, 794, 794, 794, 795, 795, 795, 795, 795, 795, 794, 795, 795, 794, 795, 795, 794, 794, 794, 794, 794, 794, 795, 795, 794, 794, 795, 794, 795, 795, 795, 794, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 794, 795, 794, 795, 794, 795, 795, 794, 795, 795, 794, 794, 794, 795, 794, 794, 794, 794, 794, 794, 794, 795, 794, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 794, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 794, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 794, 794, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 794, 794, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 794, 795, 794, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 794, 795, 795, 795, 794, 795, 794, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 801, 865, 866, 867, 868, 869, 870, 801, 846, 801, 794, 795, 794, 795, 795, 794, 794, 795, 794, 794, 794, 794, 795, 794, 794, 794, 794, 794, 795, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 794, 794, 794, 795, 794, 794, 794, 795, 795, 795, 794, 794, 794, 795, 795, 794, 794, 794, 795, 795, 795, 794, 794, 794, 795, 795, 795, 795, 794, 795, 795, 795, 795, 794, 794, 794, 794, 794, 795, 795, 795, 795, 794, 794, 795, 795, 795, 794, 794, 795, 795, 795, 795, 794, 795, 795, 794, 795, 795, 794, 794, 794, 795, 795, 795, 794, 794, 794, 794, 795, 795, 795, 795, 795, 794, 794, 794, 794, 795, 794, 795, 795, 794, 795, 795, 794, 795, 794, 795, 795, 795, 794, 795, 795, 794, 794, 794, 795, 794, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 794, 871, 872, 873, 874, 875, 876, 877, 878, 879, 801, 880, 881, 882, 883, 884, 794, 795, 794, 794, 794, 794, 794, 795, 795, 794, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 794, 794, 795, 795, 795, 794, 794, 795, 794, 794, 795, 795, 795, 795, 795, 794, 794, 794, 794, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 794, 885, 840, 886, 887, 888, 801, 889, 890, 846, 801, 794, 795, 795, 795, 795, 794, 794, 794, 795, 794, 794, 795, 795, 795, 794, 794, 794, 795, 795, 794, 851, 794, 846, 801, 801, 891, 794, 801, 794, 795, 846, 892, 893, 846, 894, 895, 846, 896, 897, 898, 899, 900, 901, 846, 902, 903, 904, 846, 905, 906, 907, 865, 908, 909, 910, 865, 911, 846, 801, 794, 794, 795, 795, 794, 794, 794, 795, 795, 795, 795, 794, 795, 795, 794, 794, 794, 794, 795, 795, 794, 794, 795, 795, 794, 794, 794, 794, 794, 794, 795, 795, 795, 794, 794, 794, 795, 794, 794, 794, 795, 795, 794, 795, 795, 795, 795, 794, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 794, 794, 794, 795, 795, 795, 795, 794, 912, 913, 794, 801, 794, 795, 794, 794, 795, 846, 914, 915, 916, 917, 896, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 801, 794, 794, 795, 794, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 794, 795, 794, 794, 795, 794, 795, 794, 794, 795, 795, 795, 795, 794, 795, 795, 795, 794, 794, 795, 795, 795, 795, 794, 795, 795, 794, 794, 795, 795, 795, 795, 795, 794, 930, 931, 932, 933, 934, 935, 936, 937, 938, 939, 940, 936, 942, 943, 944, 945, 941, 794, 946, 947, 846, 948, 949, 950, 951, 952, 953, 954, 955, 956, 846, 801, 957, 958, 959, 960, 846, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 846, 877, 801, 976, 794, 795, 795, 795, 795, 795, 794, 794, 794, 795, 794, 795, 795, 794, 795, 794, 795, 795, 794, 794, 794, 795, 795, 795, 794, 794, 794, 795, 795, 795, 794, 794, 794, 794, 795, 794, 794, 795, 794, 794, 795, 795, 795, 794, 794, 795, 794, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 794, 794, 794, 795, 795, 794, 795, 795, 794, 795, 795, 794, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 794, 795, 794, 795, 794, 795, 795, 794, 795, 794, 795, 795, 794, 795, 794, 795, 794, 977, 948, 978, 979, 980, 981, 982, 983, 984, 985, 986, 829, 987, 846, 988, 989, 990, 846, 991, 861, 992, 993, 994, 995, 996, 997, 998, 999, 846, 794, 794, 794, 795, 795, 795, 794, 795, 795, 794, 795, 795, 794, 794, 794, 794, 794, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 794, 794, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 794, 794, 794, 794, 795, 795, 795, 794, 794, 794, 795, 794, 794, 794, 795, 795, 794, 795, 795, 795, 794, 795, 794, 794, 794, 795, 795, 794, 795, 795, 795, 794, 795, 795, 795, 794, 794, 794, 794, 795, 846, 915, 1000, 1001, 801, 846, 801, 794, 794, 795, 794, 795, 846, 1000, 801, 794, 846, 1002, 801, 794, 794, 795, 846, 1003, 1004, 1005, 906, 1006, 1007, 846, 1008, 1009, 1010, 801, 794, 794, 795, 795, 795, 794, 795, 795, 794, 795, 795, 795, 795, 794, 794, 795, 794, 794, 795, 795, 794, 795, 794, 846, 801, 794, 1011, 846, 1012, 794, 801, 794, 795, 794, 795, 1013, 846, 1014, 1015, 794, 795, 794, 794, 794, 795, 795, 795, 795, 794, 1016, 1017, 1018, 846, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 801, 794, 795, 795, 795, 794, 794, 794, 794, 795, 795, 794, 794, 795, 794, 794, 794, 794, 794, 794, 794, 795, 794, 795, 794, 794, 794, 794, 794, 794, 795, 795, 795, 795, 795, 794, 794, 795, 794, 794, 794, 795, 794, 794, 795, 794, 794, 795, 794, 794, 795, 794, 794, 794, 795, 795, 795, 794, 794, 794, 795, 795, 795, 795, 794, 1033, 846, 1034, 846, 1035, 1036, 1037, 1038, 801, 794, 795, 795, 795, 795, 795, 794, 794, 794, 795, 794, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 794, 795, 795, 795, 795, 795, 794, 1039, 846, 801, 794, 795, 1040, 846, 831, 801, 794, 795, 1041, 794, 801, 794, 795, 846, 1042, 801, 794, 794, 795, 1043, 794, 846, 1044, 801, 794, 794, 795, 1046, 1045, 795, 795, 795, 795, 1046, 1045, 795, 1046, 1045, 1046, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1045, 795, 795, 1046, 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, 795, 795, 1046, 795, 1046, 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 1046, 795, 1046, 1045, 1046, 1046, 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046, 795, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 795, 1046, 1045, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 795, 1046, 1046, 1046, 1046, 795, 795, 1046, 1046, 1046, 1046, 1046, 795, 1046, 1046, 1046, 1046, 1046, 1045, 795, 1046, 1046, 795, 1046, 795, 1045, 1046, 1046, 795, 1046, 1045, 795, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 795, 795, 1046, 1045, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1045, 795, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 795, 1046, 795, 1045, 1046, 1045, 795, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 795, 1045, 1046, 1045, 795, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 1046, 795, 1046, 795, 1046, 1045, 1045, 795, 1045, 795, 1046, 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1045, 795, 1046, 1046, 1046, 795, 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 795, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 795, 1046, 795, 795, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1045, 1046, 795, 1046, 1046, 1046, 1045, 795, 1046, 795, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 1046, 1046, 1046, 795, 1046, 795, 1045, 1046, 1046, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 1045, 795, 1046, 795, 1046, 1045, 1046, 1046, 1046, 1045, 795, 795, 795, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1046, 1045, 795, 1046, 795, 1045, 1046, 1046, 1046, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 1046, 1046, 1046, 795, 1046, 795, 1045, 795, 1046, 1045, 1046, 795, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1046, 1046, 1045, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 795, 795, 1046, 1045, 1046, 795, 1046, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 795, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1045, 1046, 795, 1046, 1046, 1045, 795, 1046, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 795, 1046, 1045, 1047, 1045, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1050, 1059, 1060, 1061, 1062, 1063, 1050, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1050, 1079, 1047, 1059, 1047, 1080, 1047, 1045, 1046, 1046, 1046, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 1045, 795, 795, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 1046, 1046, 1045, 795, 1046, 795, 1046, 1046, 1045, 795, 795, 1046, 795, 1045, 1046, 1045, 795, 1046, 1045, 795, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 1045, 795, 1046, 1046, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 795, 1046, 1045, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1052, 1093, 1094, 1095, 1096, 1097, 1094, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1047, 1045, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 795, 795, 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, 1046, 1046, 1046, 795, 1046, 795, 1045, 1046, 1045, 795, 795, 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 1046, 795, 1046, 1045, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 1046, 1045, 795, 1046, 1045, 795, 1046, 1045, 1046, 1045, 795, 795, 1046, 1045, 1046, 795, 1046, 1045, 795, 1046, 795, 1045, 1046, 1045, 795, 1050, 1107, 1047, 1050, 1108, 1050, 1109, 1059, 1047, 1045, 1046, 1045, 795, 1046, 1045, 795, 1050, 1108, 1059, 1047, 1045, 1050, 1110, 1047, 1059, 1047, 1045, 1046, 1045, 795, 1050, 1111, 1068, 1112, 1094, 1113, 1106, 1050, 1114, 1115, 1116, 1047, 1059, 1047, 1045, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, 795, 1046, 1045, 795, 1046, 1045, 1050, 1059, 801, 1045, 1117, 1050, 1118, 1059, 1047, 1045, 801, 1046, 1045, 795, 1046, 1045, 795, 1119, 1050, 1120, 1121, 1047, 1045, 795, 1046, 1045, 1046, 1046, 1045, 795, 795, 1046, 795, 1046, 1045, 1050, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1047, 1059, 1047, 1045, 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046, 1045, 795, 1046, 1046, 795, 1046, 795, 1045, 1046, 795, 1046, 1046, 1046, 795, 1046, 1046, 795, 1046, 1046, 795, 1046, 1046, 795, 1046, 1046, 1045, 795, 1050, 1133, 1050, 1109, 1134, 1135, 1136, 1047, 1059, 1047, 1045, 1046, 1045, 795, 1046, 1046, 1046, 795, 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, 795, 795, 795, 795, 1046, 1046, 795, 795, 795, 795, 795, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, 795, 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, 1059, 801, 1137, 1050, 1059, 801, 1046, 1045, 795, 1138, 1050, 1139, 1059, 801, 1046, 1045, 795, 1046, 795, 1140, 1059, 1047, 1045, 801, 1046, 1045, 795, 1050, 1141, 1047, 1059, 1047, 1045, 1046, 1045, 795, 1142, 1143, 1144, 1142, 1145, 1146, 1147, 1149, 1150, 1151, 1152, 1153, 1154, 670, 670, 419, 1155, 1156, 1157, 1158, 670, 1161, 1162, 1164, 1165, 1166, 1160, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1188, 1189, 1190, 1191, 1192, 1193, 670, 1148, 7, 1148, 419, 1148, 419, 1160, 1163, 1187, 1194, 1159, 1142, 1142, 1195, 1143, 1196, 1198, 1197, 4, 1147, 1200, 1197, 1201, 1197, 2, 1147, 1197, 6, 8, 8, 7, 1202, 1203, 1204, 1197, 1205, 1206, 1197, 1207, 1197, 419, 419, 1209, 1210, 489, 470, 1211, 470, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 544, 1223, 520, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 419, 419, 419, 425, 565, 1208, 1236, 1197, 1237, 1197, 670, 1238, 419, 419, 419, 670, 1238, 670, 670, 419, 1238, 419, 1238, 419, 1238, 419, 670, 670, 670, 670, 670, 1238, 419, 670, 670, 670, 419, 670, 419, 1238, 419, 670, 670, 670, 670, 419, 1238, 670, 419, 670, 419, 670, 419, 670, 670, 419, 670, 1238, 419, 670, 419, 670, 419, 670, 1238, 670, 419, 1238, 670, 419, 670, 419, 1238, 670, 670, 670, 670, 670, 1238, 419, 419, 670, 419, 670, 1238, 670, 419, 1238, 670, 670, 1238, 419, 419, 670, 419, 670, 419, 670, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 715, 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1260, 1262, 1263, 1264, 1265, 1266, 671, 1238, 1267, 1268, 1269, 1270, 1271, 1272, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 725, 1286, 1287, 1288, 692, 1289, 1290, 1291, 1292, 1293, 1294, 671, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 674, 1303, 671, 674, 1304, 1305, 1306, 1307, 683, 1238, 1308, 1309, 1310, 1311, 703, 1312, 1313, 683, 1314, 1315, 1316, 1317, 1318, 671, 1238, 1319, 1278, 1320, 1321, 1322, 683, 1323, 1324, 674, 671, 683, 425, 1238, 1288, 671, 674, 683, 425, 683, 425, 1325, 683, 1238, 425, 674, 1326, 1327, 674, 1328, 1329, 681, 1330, 1331, 1332, 1333, 1334, 1284, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1303, 1347, 674, 683, 425, 1238, 1348, 1349, 683, 671, 1238, 425, 671, 1238, 674, 1350, 731, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 671, 1359, 1360, 1361, 1362, 1363, 1364, 671, 683, 1238, 1366, 1367, 1368, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1372, 1378, 1379, 1380, 1381, 1365, 1377, 1365, 1238, 1365, 1238, 1382, 1382, 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1390, 1387, 767, 1391, 1391, 1391, 1392, 1391, 1391, 768, 769, 770, 1391, 767, 1382, 1382, 1393, 1396, 1397, 1395, 1398, 1399, 1398, 1400, 1391, 1402, 1401, 1396, 1403, 1395, 1405, 1404, 1394, 1394, 1394, 768, 769, 770, 1394, 767, 767, 1406, 773, 1406, 1407, 1406, 775, 1408, 1409, 1410, 1411, 1412, 1413, 1414, 1411, 776, 775, 1408, 1415, 1415, 777, 779, 1416, 1415, 776, 1418, 1419, 1417, 1418, 1419, 1420, 1417, 775, 1408, 1421, 1415, 775, 1408, 1415, 1423, 1422, 1425, 1424, 776, 1426, 777, 1426, 779, 1426, 785, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1430, 786, 785, 1427, 1434, 1434, 787, 789, 1435, 1434, 786, 1437, 1438, 1436, 1437, 1438, 1439, 1436, 785, 1427, 1440, 1434, 785, 1427, 1434, 1442, 1441, 1444, 1443, 786, 1445, 787, 1445, 789, 1445, 795, 1448, 1449, 1451, 1452, 1453, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1475, 1476, 1477, 1478, 1479, 1480, 795, 795, 1446, 1447, 1450, 1474, 1481, 1446, 1046, 795, 795, 1483, 1484, 865, 846, 1485, 846, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 920, 1497, 896, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 795, 795, 795, 801, 941, 1482, 1046, 1510, 795, 795, 795, 1046, 1510, 1046, 1046, 795, 1510, 795, 1510, 795, 1510, 795, 1046, 1046, 1046, 1046, 1046, 1510, 795, 1046, 1046, 1046, 795, 1046, 795, 1510, 795, 1046, 1046, 1046, 1046, 795, 1510, 1046, 795, 1046, 795, 1046, 795, 1046, 1046, 795, 1046, 1510, 795, 1046, 795, 1046, 795, 1046, 1510, 1046, 795, 1510, 1046, 795, 1046, 795, 1510, 1046, 1046, 1046, 1046, 1046, 1510, 795, 795, 1046, 795, 1046, 1510, 1046, 795, 1510, 1046, 1046, 1510, 795, 795, 1046, 795, 1046, 795, 1046, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1091, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1532, 1534, 1535, 1536, 1537, 1538, 1047, 1510, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1101, 1558, 1559, 1560, 1068, 1561, 1562, 1563, 1564, 1565, 1566, 1047, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1050, 1575, 1047, 1050, 1576, 1577, 1578, 1579, 1059, 1510, 1580, 1581, 1582, 1583, 1079, 1584, 1585, 1059, 1586, 1587, 1588, 1589, 1590, 1047, 1510, 1591, 1550, 1592, 1593, 1594, 1059, 1595, 1596, 1050, 1047, 1059, 801, 1510, 1560, 1047, 1050, 1059, 801, 1059, 801, 1597, 1059, 1510, 801, 1050, 1598, 1599, 1050, 1600, 1601, 1057, 1602, 1603, 1604, 1605, 1606, 1556, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1575, 1619, 1050, 1059, 801, 1510, 1620, 1621, 1059, 1047, 1510, 801, 1047, 1510, 1050, 1622, 1107, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1047, 1631, 1632, 1633, 1634, 1635, 1636, 1047, 1059, 1510, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1644, 1650, 1651, 1652, 1653, 1637, 1649, 1637, 1510, 1637, 1510, } var _hcltok_trans_targs []int16 = []int16{ 1459, 1459, 2, 3, 1459, 1459, 4, 1467, 5, 6, 8, 9, 286, 12, 13, 14, 15, 16, 287, 288, 19, 289, 21, 22, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 328, 348, 353, 127, 128, 129, 356, 151, 371, 375, 1459, 10, 11, 17, 18, 20, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 64, 105, 120, 131, 154, 170, 283, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 124, 125, 126, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 171, 203, 227, 230, 231, 233, 242, 243, 246, 250, 268, 275, 277, 279, 281, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 229, 232, 234, 235, 236, 237, 238, 239, 240, 241, 244, 245, 247, 248, 249, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 276, 278, 280, 282, 284, 285, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 349, 350, 351, 352, 354, 355, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 372, 373, 374, 376, 382, 404, 409, 411, 413, 377, 378, 379, 380, 381, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 405, 406, 407, 408, 410, 412, 414, 1459, 1471, 1459, 437, 438, 439, 440, 417, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 419, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 418, 504, 505, 506, 507, 508, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 525, 526, 527, 528, 529, 530, 534, 536, 537, 538, 539, 434, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 556, 557, 559, 560, 561, 562, 563, 564, 432, 565, 566, 567, 568, 569, 570, 571, 572, 573, 575, 607, 631, 634, 635, 637, 646, 647, 650, 654, 672, 532, 679, 681, 683, 685, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 632, 633, 636, 638, 639, 640, 641, 642, 643, 644, 645, 648, 649, 651, 652, 653, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 673, 674, 675, 676, 677, 678, 680, 682, 684, 686, 688, 689, 1459, 1459, 690, 827, 828, 759, 829, 830, 831, 832, 833, 834, 788, 835, 724, 836, 837, 838, 839, 840, 841, 842, 843, 744, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 769, 854, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 702, 866, 867, 868, 869, 870, 871, 872, 873, 874, 740, 875, 876, 877, 878, 879, 810, 881, 882, 885, 887, 888, 889, 890, 891, 892, 895, 896, 898, 899, 900, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 914, 915, 916, 917, 920, 922, 923, 925, 927, 1509, 1510, 929, 930, 931, 1509, 1509, 932, 1523, 1523, 1524, 935, 1523, 936, 1525, 1526, 1529, 1530, 1534, 1534, 1535, 941, 1534, 942, 1536, 1537, 1540, 1541, 1545, 1546, 1545, 968, 969, 970, 971, 948, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, 1016, 950, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 949, 1035, 1036, 1037, 1038, 1039, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1060, 1061, 1065, 1067, 1068, 1069, 1070, 965, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1087, 1088, 1090, 1091, 1092, 1093, 1094, 1095, 963, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1106, 1138, 1162, 1165, 1166, 1168, 1177, 1178, 1181, 1185, 1203, 1063, 1210, 1212, 1214, 1216, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1163, 1164, 1167, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1179, 1180, 1182, 1183, 1184, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1204, 1205, 1206, 1207, 1208, 1209, 1211, 1213, 1215, 1217, 1219, 1220, 1545, 1545, 1221, 1358, 1359, 1290, 1360, 1361, 1362, 1363, 1364, 1365, 1319, 1366, 1255, 1367, 1368, 1369, 1370, 1371, 1372, 1373, 1374, 1275, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1382, 1383, 1384, 1300, 1385, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 1395, 1396, 1233, 1397, 1398, 1399, 1400, 1401, 1402, 1403, 1404, 1405, 1271, 1406, 1407, 1408, 1409, 1410, 1341, 1412, 1413, 1416, 1418, 1419, 1420, 1421, 1422, 1423, 1426, 1427, 1429, 1430, 1431, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1447, 1448, 1451, 1453, 1454, 1456, 1458, 1460, 1459, 1461, 1462, 1459, 1463, 1459, 1464, 1465, 1466, 1468, 1469, 1470, 1459, 1472, 1459, 1473, 1459, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1459, 1459, 1459, 1459, 1459, 1459, 1, 1459, 7, 1459, 1459, 1459, 1459, 1459, 415, 416, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 433, 435, 436, 468, 509, 524, 531, 533, 535, 555, 558, 574, 687, 1459, 1459, 1459, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 741, 742, 743, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 760, 761, 762, 763, 764, 765, 766, 767, 768, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 855, 880, 883, 884, 886, 893, 894, 897, 901, 913, 918, 919, 921, 924, 926, 1511, 1509, 1512, 1517, 1519, 1509, 1520, 1521, 1522, 1509, 928, 1509, 1509, 1513, 1514, 1516, 1509, 1515, 1509, 1509, 1509, 1518, 1509, 1509, 1509, 933, 934, 938, 939, 1523, 1531, 1532, 1533, 1523, 937, 1523, 1523, 934, 1527, 1528, 1523, 1523, 1523, 1523, 1523, 940, 944, 945, 1534, 1542, 1543, 1544, 1534, 943, 1534, 1534, 940, 1538, 1539, 1534, 1534, 1534, 1534, 1534, 1545, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1545, 946, 947, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 964, 966, 967, 999, 1040, 1055, 1062, 1064, 1066, 1086, 1089, 1105, 1218, 1545, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1273, 1274, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1291, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1386, 1411, 1414, 1415, 1417, 1424, 1425, 1428, 1432, 1444, 1449, 1450, 1452, 1455, 1457, } var _hcltok_trans_actions []byte = []byte{ 145, 107, 0, 0, 91, 141, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 143, 193, 149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 147, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 169, 0, 0, 0, 35, 33, 0, 55, 41, 175, 0, 53, 0, 175, 175, 0, 0, 75, 61, 181, 0, 73, 0, 181, 181, 0, 0, 85, 187, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 87, 79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 93, 0, 0, 119, 0, 111, 0, 7, 7, 7, 0, 0, 113, 0, 115, 0, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 7, 196, 196, 196, 196, 196, 196, 7, 7, 196, 7, 127, 139, 135, 97, 133, 103, 0, 129, 0, 101, 95, 109, 99, 131, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 117, 137, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 172, 17, 0, 7, 7, 23, 0, 25, 27, 0, 0, 0, 151, 0, 15, 19, 9, 0, 21, 11, 29, 0, 0, 0, 0, 43, 0, 178, 178, 49, 0, 157, 154, 1, 175, 175, 45, 37, 47, 39, 51, 0, 0, 0, 63, 0, 184, 184, 69, 0, 163, 160, 1, 181, 181, 65, 57, 67, 59, 71, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 7, 190, 190, 190, 190, 190, 190, 7, 7, 190, 7, 81, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, } var _hcltok_to_state_actions []byte = []byte{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, } var _hcltok_from_state_actions []byte = []byte{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, } var _hcltok_eof_trans []int16 = []int16{ 0, 1, 1, 1, 6, 6, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 419, 419, 421, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 419, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 670, 767, 772, 772, 772, 773, 773, 775, 775, 775, 779, 0, 0, 785, 785, 785, 789, 0, 0, 795, 795, 797, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 795, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, 0, 1196, 1197, 1198, 1200, 1198, 1198, 1198, 1203, 1198, 1198, 1198, 1209, 1198, 1198, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, 0, 1392, 1394, 1395, 1399, 1399, 1392, 1402, 1395, 1405, 1395, 1407, 1407, 1407, 0, 1416, 1418, 1418, 1416, 1416, 1423, 1425, 1427, 1427, 1427, 0, 1435, 1437, 1437, 1435, 1435, 1442, 1444, 1446, 1446, 1446, 0, 1483, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511, } const hcltok_start int = 1459 const hcltok_first_final int = 1459 const hcltok_error int = 0 const hcltok_en_stringTemplate int = 1509 const hcltok_en_heredocTemplate int = 1523 const hcltok_en_bareTemplate int = 1534 const hcltok_en_identOnly int = 1545 const hcltok_en_main int = 1459 //line scan_tokens.rl:16 func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token { stripData := stripUTF8BOM(data) start.Byte += len(data) - len(stripData) data = stripData f := &tokenAccum{ Filename: filename, Bytes: data, Pos: start, StartByte: start.Byte, } //line scan_tokens.rl:305 // Ragel state p := 0 // "Pointer" into data pe := len(data) // End-of-data "pointer" ts := 0 te := 0 act := 0 eof := pe var stack []int var top int var cs int // current state switch mode { case scanNormal: cs = hcltok_en_main case scanTemplate: cs = hcltok_en_bareTemplate case scanIdentOnly: cs = hcltok_en_identOnly default: panic("invalid scanMode") } braces := 0 var retBraces []int // stack of brace levels that cause us to use fret var heredocs []heredocInProgress // stack of heredocs we're currently processing //line scan_tokens.rl:340 // Make Go compiler happy _ = ts _ = te _ = act _ = eof token := func(ty TokenType) { f.emitToken(ty, ts, te) } selfToken := func() { b := data[ts:te] if len(b) != 1 { // should never happen panic("selfToken only works for single-character tokens") } f.emitToken(TokenType(b[0]), ts, te) } //line scan_tokens.go:4289 { top = 0 ts = 0 te = 0 act = 0 } //line scan_tokens.go:4297 { var _klen int var _trans int var _acts int var _nacts uint var _keys int if p == pe { goto _test_eof } if cs == 0 { goto _out } _resume: _acts = int(_hcltok_from_state_actions[cs]) _nacts = uint(_hcltok_actions[_acts]) _acts++ for ; _nacts > 0; _nacts-- { _acts++ switch _hcltok_actions[_acts-1] { case 3: //line NONE:1 ts = p //line scan_tokens.go:4320 } } _keys = int(_hcltok_key_offsets[cs]) _trans = int(_hcltok_index_offsets[cs]) _klen = int(_hcltok_single_lengths[cs]) if _klen > 0 { _lower := int(_keys) var _mid int _upper := int(_keys + _klen - 1) for { if _upper < _lower { break } _mid = _lower + ((_upper - _lower) >> 1) switch { case data[p] < _hcltok_trans_keys[_mid]: _upper = _mid - 1 case data[p] > _hcltok_trans_keys[_mid]: _lower = _mid + 1 default: _trans += int(_mid - int(_keys)) goto _match } } _keys += _klen _trans += _klen } _klen = int(_hcltok_range_lengths[cs]) if _klen > 0 { _lower := int(_keys) var _mid int _upper := int(_keys + (_klen << 1) - 2) for { if _upper < _lower { break } _mid = _lower + (((_upper - _lower) >> 1) & ^1) switch { case data[p] < _hcltok_trans_keys[_mid]: _upper = _mid - 2 case data[p] > _hcltok_trans_keys[_mid+1]: _lower = _mid + 2 default: _trans += int((_mid - int(_keys)) >> 1) goto _match } } _trans += _klen } _match: _trans = int(_hcltok_indicies[_trans]) _eof_trans: cs = int(_hcltok_trans_targs[_trans]) if _hcltok_trans_actions[_trans] == 0 { goto _again } _acts = int(_hcltok_trans_actions[_trans]) _nacts = uint(_hcltok_actions[_acts]) _acts++ for ; _nacts > 0; _nacts-- { _acts++ switch _hcltok_actions[_acts-1] { case 0: //line scan_tokens.rl:224 p-- case 4: //line NONE:1 te = p + 1 case 5: //line scan_tokens.rl:248 act = 4 case 6: //line scan_tokens.rl:250 act = 6 case 7: //line scan_tokens.rl:160 te = p + 1 { token(TokenTemplateInterp) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 8: //line scan_tokens.rl:170 te = p + 1 { token(TokenTemplateControl) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 9: //line scan_tokens.rl:84 te = p + 1 { token(TokenCQuote) top-- cs = stack[top] { stack = stack[:len(stack)-1] } goto _again } case 10: //line scan_tokens.rl:248 te = p + 1 { token(TokenQuotedLit) } case 11: //line scan_tokens.rl:251 te = p + 1 { token(TokenBadUTF8) } case 12: //line scan_tokens.rl:160 te = p p-- { token(TokenTemplateInterp) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 13: //line scan_tokens.rl:170 te = p p-- { token(TokenTemplateControl) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 14: //line scan_tokens.rl:248 te = p p-- { token(TokenQuotedLit) } case 15: //line scan_tokens.rl:249 te = p p-- { token(TokenQuotedNewline) } case 16: //line scan_tokens.rl:250 te = p p-- { token(TokenInvalid) } case 17: //line scan_tokens.rl:251 te = p p-- { token(TokenBadUTF8) } case 18: //line scan_tokens.rl:248 p = (te) - 1 { token(TokenQuotedLit) } case 19: //line scan_tokens.rl:251 p = (te) - 1 { token(TokenBadUTF8) } case 20: //line NONE:1 switch act { case 4: { p = (te) - 1 token(TokenQuotedLit) } case 6: { p = (te) - 1 token(TokenInvalid) } } case 21: //line scan_tokens.rl:148 act = 11 case 22: //line scan_tokens.rl:259 act = 12 case 23: //line scan_tokens.rl:160 te = p + 1 { token(TokenTemplateInterp) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 24: //line scan_tokens.rl:170 te = p + 1 { token(TokenTemplateControl) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 25: //line scan_tokens.rl:111 te = p + 1 { // This action is called specificially when a heredoc literal // ends with a newline character. // This might actually be our end marker. topdoc := &heredocs[len(heredocs)-1] if topdoc.StartOfLine { maybeMarker := bytes.TrimSpace(data[ts:te]) if bytes.Equal(maybeMarker, topdoc.Marker) { // We actually emit two tokens here: the end-of-heredoc // marker first, and then separately the newline that // follows it. This then avoids issues with the closing // marker consuming a newline that would normally be used // to mark the end of an attribute definition. // We might have either a \n sequence or an \r\n sequence // here, so we must handle both. nls := te - 1 nle := te te-- if data[te-1] == '\r' { // back up one more byte nls-- te-- } token(TokenCHeredoc) ts = nls te = nle token(TokenNewline) heredocs = heredocs[:len(heredocs)-1] top-- cs = stack[top] { stack = stack[:len(stack)-1] } goto _again } } topdoc.StartOfLine = true token(TokenStringLit) } case 26: //line scan_tokens.rl:259 te = p + 1 { token(TokenBadUTF8) } case 27: //line scan_tokens.rl:160 te = p p-- { token(TokenTemplateInterp) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 28: //line scan_tokens.rl:170 te = p p-- { token(TokenTemplateControl) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 29: //line scan_tokens.rl:148 te = p p-- { // This action is called when a heredoc literal _doesn't_ end // with a newline character, e.g. because we're about to enter // an interpolation sequence. heredocs[len(heredocs)-1].StartOfLine = false token(TokenStringLit) } case 30: //line scan_tokens.rl:259 te = p p-- { token(TokenBadUTF8) } case 31: //line scan_tokens.rl:148 p = (te) - 1 { // This action is called when a heredoc literal _doesn't_ end // with a newline character, e.g. because we're about to enter // an interpolation sequence. heredocs[len(heredocs)-1].StartOfLine = false token(TokenStringLit) } case 32: //line NONE:1 switch act { case 0: { cs = 0 goto _again } case 11: { p = (te) - 1 // This action is called when a heredoc literal _doesn't_ end // with a newline character, e.g. because we're about to enter // an interpolation sequence. heredocs[len(heredocs)-1].StartOfLine = false token(TokenStringLit) } case 12: { p = (te) - 1 token(TokenBadUTF8) } } case 33: //line scan_tokens.rl:156 act = 15 case 34: //line scan_tokens.rl:266 act = 16 case 35: //line scan_tokens.rl:160 te = p + 1 { token(TokenTemplateInterp) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 36: //line scan_tokens.rl:170 te = p + 1 { token(TokenTemplateControl) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 37: //line scan_tokens.rl:156 te = p + 1 { token(TokenStringLit) } case 38: //line scan_tokens.rl:266 te = p + 1 { token(TokenBadUTF8) } case 39: //line scan_tokens.rl:160 te = p p-- { token(TokenTemplateInterp) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 40: //line scan_tokens.rl:170 te = p p-- { token(TokenTemplateControl) braces++ retBraces = append(retBraces, braces) if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false } { stack = append(stack, 0) stack[top] = cs top++ cs = 1459 goto _again } } case 41: //line scan_tokens.rl:156 te = p p-- { token(TokenStringLit) } case 42: //line scan_tokens.rl:266 te = p p-- { token(TokenBadUTF8) } case 43: //line scan_tokens.rl:156 p = (te) - 1 { token(TokenStringLit) } case 44: //line NONE:1 switch act { case 0: { cs = 0 goto _again } case 15: { p = (te) - 1 token(TokenStringLit) } case 16: { p = (te) - 1 token(TokenBadUTF8) } } case 45: //line scan_tokens.rl:270 act = 17 case 46: //line scan_tokens.rl:271 act = 18 case 47: //line scan_tokens.rl:271 te = p + 1 { token(TokenBadUTF8) } case 48: //line scan_tokens.rl:272 te = p + 1 { token(TokenInvalid) } case 49: //line scan_tokens.rl:270 te = p p-- { token(TokenIdent) } case 50: //line scan_tokens.rl:271 te = p p-- { token(TokenBadUTF8) } case 51: //line scan_tokens.rl:270 p = (te) - 1 { token(TokenIdent) } case 52: //line scan_tokens.rl:271 p = (te) - 1 { token(TokenBadUTF8) } case 53: //line NONE:1 switch act { case 17: { p = (te) - 1 token(TokenIdent) } case 18: { p = (te) - 1 token(TokenBadUTF8) } } case 54: //line scan_tokens.rl:278 act = 22 case 55: //line scan_tokens.rl:301 act = 39 case 56: //line scan_tokens.rl:280 te = p + 1 { token(TokenComment) } case 57: //line scan_tokens.rl:281 te = p + 1 { token(TokenNewline) } case 58: //line scan_tokens.rl:283 te = p + 1 { token(TokenEqualOp) } case 59: //line scan_tokens.rl:284 te = p + 1 { token(TokenNotEqual) } case 60: //line scan_tokens.rl:285 te = p + 1 { token(TokenGreaterThanEq) } case 61: //line scan_tokens.rl:286 te = p + 1 { token(TokenLessThanEq) } case 62: //line scan_tokens.rl:287 te = p + 1 { token(TokenAnd) } case 63: //line scan_tokens.rl:288 te = p + 1 { token(TokenOr) } case 64: //line scan_tokens.rl:289 te = p + 1 { token(TokenEllipsis) } case 65: //line scan_tokens.rl:290 te = p + 1 { token(TokenFatArrow) } case 66: //line scan_tokens.rl:291 te = p + 1 { selfToken() } case 67: //line scan_tokens.rl:180 te = p + 1 { token(TokenOBrace) braces++ } case 68: //line scan_tokens.rl:185 te = p + 1 { if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { token(TokenTemplateSeqEnd) braces-- retBraces = retBraces[0 : len(retBraces)-1] top-- cs = stack[top] { stack = stack[:len(stack)-1] } goto _again } else { token(TokenCBrace) braces-- } } case 69: //line scan_tokens.rl:197 te = p + 1 { // Only consume from the retBraces stack and return if we are at // a suitable brace nesting level, otherwise things will get // confused. (Not entering this branch indicates a syntax error, // which we will catch in the parser.) if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { token(TokenTemplateSeqEnd) braces-- retBraces = retBraces[0 : len(retBraces)-1] top-- cs = stack[top] { stack = stack[:len(stack)-1] } goto _again } else { // We intentionally generate a TokenTemplateSeqEnd here, // even though the user apparently wanted a brace, because // we want to allow the parser to catch the incorrect use // of a ~} to balance a generic opening brace, rather than // a template sequence. token(TokenTemplateSeqEnd) braces-- } } case 70: //line scan_tokens.rl:79 te = p + 1 { token(TokenOQuote) { stack = append(stack, 0) stack[top] = cs top++ cs = 1509 goto _again } } case 71: //line scan_tokens.rl:89 te = p + 1 { token(TokenOHeredoc) // the token is currently the whole heredoc introducer, like // < 0; _nacts-- { _acts++ switch _hcltok_actions[_acts-1] { case 1: //line NONE:1 ts = 0 case 2: //line NONE:1 act = 0 //line scan_tokens.go:5073 } } if cs == 0 { goto _out } p++ if p != pe { goto _resume } _test_eof: { } if p == eof { if _hcltok_eof_trans[cs] > 0 { _trans = int(_hcltok_eof_trans[cs] - 1) goto _eof_trans } } _out: { } } //line scan_tokens.rl:363 // If we fall out here without being in a final state then we've // encountered something that the scanner can't match, which we'll // deal with as an invalid. if cs < hcltok_first_final { if mode == scanTemplate && len(stack) == 0 { // If we're scanning a bare template then any straggling // top-level stuff is actually literal string, rather than // invalid. This handles the case where the template ends // with a single "$" or "%", which trips us up because we // want to see another character to decide if it's a sequence // or an escape. f.emitToken(TokenStringLit, ts, len(data)) } else { f.emitToken(TokenInvalid, ts, len(data)) } } // We always emit a synthetic EOF token at the end, since it gives the // parser position information for an "unexpected EOF" diagnostic. f.emitToken(TokenEOF, len(data), len(data)) return f.Tokens } hcl-2.14.1/hclsyntax/scan_tokens.rl000066400000000000000000000325501431334125700172400ustar00rootroot00000000000000 package hclsyntax import ( "bytes" "github.com/hashicorp/hcl/v2" ) // This file is generated from scan_tokens.rl. DO NOT EDIT. %%{ # (except when you are actually in scan_tokens.rl here, so edit away!) machine hcltok; write data; }%% func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token { stripData := stripUTF8BOM(data) start.Byte += len(data) - len(stripData) data = stripData f := &tokenAccum{ Filename: filename, Bytes: data, Pos: start, StartByte: start.Byte, } %%{ include UnicodeDerived "unicode_derived.rl"; UTF8Cont = 0x80 .. 0xBF; AnyUTF8 = ( 0x00..0x7F | 0xC0..0xDF . UTF8Cont | 0xE0..0xEF . UTF8Cont . UTF8Cont | 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont ); BrokenUTF8 = any - AnyUTF8; NumberLitContinue = (digit|'.'|('e'|'E') ('+'|'-')? digit); NumberLit = digit ("" | (NumberLitContinue - '.') | (NumberLitContinue* (NumberLitContinue - '.'))); Ident = (ID_Start | '_') (ID_Continue | '-')*; # Symbols that just represent themselves are handled as a single rule. SelfToken = "[" | "]" | "(" | ")" | "." | "," | "*" | "/" | "%" | "+" | "-" | "=" | "<" | ">" | "!" | "?" | ":" | "\n" | "&" | "|" | "~" | "^" | ";" | "`" | "'"; EqualOp = "=="; NotEqual = "!="; GreaterThanEqual = ">="; LessThanEqual = "<="; LogicalAnd = "&&"; LogicalOr = "||"; Ellipsis = "..."; FatArrow = "=>"; Newline = '\r' ? '\n'; EndOfLine = Newline; BeginStringTmpl = '"'; BeginHeredocTmpl = '<<' ('-')? Ident Newline; Comment = ( # The :>> operator in these is a "finish-guarded concatenation", # which terminates the sequence on its left when it completes # the sequence on its right. # In the single-line comment cases this is allowing us to make # the trailing EndOfLine optional while still having the overall # pattern terminate. In the multi-line case it ensures that # the first comment in the file ends at the first */, rather than # gobbling up all of the "any*" until the _final_ */ in the file. ("#" (any - EndOfLine)* :>> EndOfLine?) | ("//" (any - EndOfLine)* :>> EndOfLine?) | ("/*" any* :>> "*/") ); # Note: hclwrite assumes that only ASCII spaces appear between tokens, # and uses this assumption to recreate the spaces between tokens by # looking at byte offset differences. This means it will produce # incorrect results in the presence of tabs, but that's acceptable # because the canonical style (which hclwrite itself can impose # automatically is to never use tabs). Spaces = (' ' | 0x09)+; action beginStringTemplate { token(TokenOQuote); fcall stringTemplate; } action endStringTemplate { token(TokenCQuote); fret; } action beginHeredocTemplate { token(TokenOHeredoc); // the token is currently the whole heredoc introducer, like // < 0 { heredocs[len(heredocs)-1].StartOfLine = false; } fcall main; } action beginTemplateControl { token(TokenTemplateControl); braces++; retBraces = append(retBraces, braces); if len(heredocs) > 0 { heredocs[len(heredocs)-1].StartOfLine = false; } fcall main; } action openBrace { token(TokenOBrace); braces++; } action closeBrace { if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { token(TokenTemplateSeqEnd); braces--; retBraces = retBraces[0:len(retBraces)-1] fret; } else { token(TokenCBrace); braces--; } } action closeTemplateSeqEatWhitespace { // Only consume from the retBraces stack and return if we are at // a suitable brace nesting level, otherwise things will get // confused. (Not entering this branch indicates a syntax error, // which we will catch in the parser.) if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { token(TokenTemplateSeqEnd); braces--; retBraces = retBraces[0:len(retBraces)-1] fret; } else { // We intentionally generate a TokenTemplateSeqEnd here, // even though the user apparently wanted a brace, because // we want to allow the parser to catch the incorrect use // of a ~} to balance a generic opening brace, rather than // a template sequence. token(TokenTemplateSeqEnd); braces--; } } TemplateInterp = "${" ("~")?; TemplateControl = "%{" ("~")?; EndStringTmpl = '"'; NewlineChars = ("\r"|"\n"); NewlineCharsSeq = NewlineChars+; StringLiteralChars = (AnyUTF8 - NewlineChars); TemplateIgnoredNonBrace = (^'{' %{ fhold; }); TemplateNotInterp = '$' (TemplateIgnoredNonBrace | TemplateInterp); TemplateNotControl = '%' (TemplateIgnoredNonBrace | TemplateControl); QuotedStringLiteralWithEsc = ('\\' StringLiteralChars) | (StringLiteralChars - ("$" | '%' | '"' | "\\")); TemplateStringLiteral = ( (TemplateNotInterp) | (TemplateNotControl) | (QuotedStringLiteralWithEsc)+ ); HeredocStringLiteral = ( (TemplateNotInterp) | (TemplateNotControl) | (StringLiteralChars - ("$" | '%'))* ); BareStringLiteral = ( (TemplateNotInterp) | (TemplateNotControl) | (StringLiteralChars - ("$" | '%'))* ) Newline?; stringTemplate := |* TemplateInterp => beginTemplateInterp; TemplateControl => beginTemplateControl; EndStringTmpl => endStringTemplate; TemplateStringLiteral => { token(TokenQuotedLit); }; NewlineCharsSeq => { token(TokenQuotedNewline); }; AnyUTF8 => { token(TokenInvalid); }; BrokenUTF8 => { token(TokenBadUTF8); }; *|; heredocTemplate := |* TemplateInterp => beginTemplateInterp; TemplateControl => beginTemplateControl; HeredocStringLiteral EndOfLine => heredocLiteralEOL; HeredocStringLiteral => heredocLiteralMidline; BrokenUTF8 => { token(TokenBadUTF8); }; *|; bareTemplate := |* TemplateInterp => beginTemplateInterp; TemplateControl => beginTemplateControl; BareStringLiteral => bareTemplateLiteral; BrokenUTF8 => { token(TokenBadUTF8); }; *|; identOnly := |* Ident => { token(TokenIdent) }; BrokenUTF8 => { token(TokenBadUTF8) }; AnyUTF8 => { token(TokenInvalid) }; *|; main := |* Spaces => {}; NumberLit => { token(TokenNumberLit) }; Ident => { token(TokenIdent) }; Comment => { token(TokenComment) }; Newline => { token(TokenNewline) }; EqualOp => { token(TokenEqualOp); }; NotEqual => { token(TokenNotEqual); }; GreaterThanEqual => { token(TokenGreaterThanEq); }; LessThanEqual => { token(TokenLessThanEq); }; LogicalAnd => { token(TokenAnd); }; LogicalOr => { token(TokenOr); }; Ellipsis => { token(TokenEllipsis); }; FatArrow => { token(TokenFatArrow); }; SelfToken => { selfToken() }; "{" => openBrace; "}" => closeBrace; "~}" => closeTemplateSeqEatWhitespace; BeginStringTmpl => beginStringTemplate; BeginHeredocTmpl => beginHeredocTemplate; BrokenUTF8 => { token(TokenBadUTF8) }; AnyUTF8 => { token(TokenInvalid) }; *|; }%% // Ragel state p := 0 // "Pointer" into data pe := len(data) // End-of-data "pointer" ts := 0 te := 0 act := 0 eof := pe var stack []int var top int var cs int // current state switch mode { case scanNormal: cs = hcltok_en_main case scanTemplate: cs = hcltok_en_bareTemplate case scanIdentOnly: cs = hcltok_en_identOnly default: panic("invalid scanMode") } braces := 0 var retBraces []int // stack of brace levels that cause us to use fret var heredocs []heredocInProgress // stack of heredocs we're currently processing %%{ prepush { stack = append(stack, 0); } postpop { stack = stack[:len(stack)-1]; } }%% // Make Go compiler happy _ = ts _ = te _ = act _ = eof token := func (ty TokenType) { f.emitToken(ty, ts, te) } selfToken := func () { b := data[ts:te] if len(b) != 1 { // should never happen panic("selfToken only works for single-character tokens") } f.emitToken(TokenType(b[0]), ts, te) } %%{ write init nocs; write exec; }%% // If we fall out here without being in a final state then we've // encountered something that the scanner can't match, which we'll // deal with as an invalid. if cs < hcltok_first_final { if mode == scanTemplate && len(stack) == 0 { // If we're scanning a bare template then any straggling // top-level stuff is actually literal string, rather than // invalid. This handles the case where the template ends // with a single "$" or "%", which trips us up because we // want to see another character to decide if it's a sequence // or an escape. f.emitToken(TokenStringLit, ts, len(data)) } else { f.emitToken(TokenInvalid, ts, len(data)) } } // We always emit a synthetic EOF token at the end, since it gives the // parser position information for an "unexpected EOF" diagnostic. f.emitToken(TokenEOF, len(data), len(data)) return f.Tokens } hcl-2.14.1/hclsyntax/scan_tokens_test.go000066400000000000000000001745011431334125700202720ustar00rootroot00000000000000package hclsyntax import ( "testing" "github.com/google/go-cmp/cmp" "github.com/hashicorp/hcl/v2" ) func TestScanTokens_normal(t *testing.T) { tests := []struct { input string want []Token }{ // Empty input { ``, []Token{ { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 0, Line: 1, Column: 1}, }, }, }, }, { ` `, []Token{ { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, }, }, { "\n\n", []Token{ { Type: TokenNewline, Bytes: []byte("\n"), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 2, Column: 1}, }, }, { Type: TokenNewline, Bytes: []byte("\n"), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 2, Column: 1}, End: hcl.Pos{Byte: 2, Line: 3, Column: 1}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 2, Line: 3, Column: 1}, End: hcl.Pos{Byte: 2, Line: 3, Column: 1}, }, }, }, }, // Byte-order mark { "\xef\xbb\xbf", // Leading UTF-8 byte-order mark is ignored... []Token{ { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ // ...but its bytes still count when producing ranges Start: hcl.Pos{Byte: 3, Line: 1, Column: 1}, End: hcl.Pos{Byte: 3, Line: 1, Column: 1}, }, }, }, }, { " \xef\xbb\xbf", // Non-leading BOM is invalid []Token{ { Type: TokenInvalid, Bytes: utf8BOM, Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 4, Line: 1, Column: 3}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 3}, End: hcl.Pos{Byte: 4, Line: 1, Column: 3}, }, }, }, }, { "\xfe\xff", // UTF-16 BOM is invalid []Token{ { Type: TokenBadUTF8, Bytes: []byte{0xfe}, Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenBadUTF8, Bytes: []byte{0xff}, Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 2, Line: 1, Column: 3}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 2, Line: 1, Column: 3}, End: hcl.Pos{Byte: 2, Line: 1, Column: 3}, }, }, }, }, // TokenNumberLit { `1`, []Token{ { Type: TokenNumberLit, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, }, }, { `12`, []Token{ { Type: TokenNumberLit, Bytes: []byte(`12`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 2, Line: 1, Column: 3}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 2, Line: 1, Column: 3}, End: hcl.Pos{Byte: 2, Line: 1, Column: 3}, }, }, }, }, { `12.3`, []Token{ { Type: TokenNumberLit, Bytes: []byte(`12.3`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, }, }, { `1e2`, []Token{ { Type: TokenNumberLit, Bytes: []byte(`1e2`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, }, }, { `1e+2`, []Token{ { Type: TokenNumberLit, Bytes: []byte(`1e+2`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, }, }, // TokenIdent { `hello`, []Token{ { Type: TokenIdent, Bytes: []byte(`hello`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, }, }, { `_ello`, []Token{ { Type: TokenIdent, Bytes: []byte(`_ello`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, }, }, { `hel_o`, []Token{ { Type: TokenIdent, Bytes: []byte(`hel_o`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, }, }, { `hel-o`, []Token{ { Type: TokenIdent, Bytes: []byte(`hel-o`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, }, }, { `h3ll0`, []Token{ { Type: TokenIdent, Bytes: []byte(`h3ll0`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, }, }, { `héllo`, // combining acute accent []Token{ { Type: TokenIdent, Bytes: []byte(`héllo`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 7, Line: 1, Column: 6}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 6}, End: hcl.Pos{Byte: 7, Line: 1, Column: 6}, }, }, }, }, // Literal-only Templates (string literals, effectively) { `""`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 2, Line: 1, Column: 3}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 2, Line: 1, Column: 3}, End: hcl.Pos{Byte: 2, Line: 1, Column: 3}, }, }, }, }, { `"hello"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, }, }, { `"hello, \"world\"!"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello, \"world\"!`), // The escapes are handled by the parser, not the scanner Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 18, Line: 1, Column: 19}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 18, Line: 1, Column: 19}, End: hcl.Pos{Byte: 19, Line: 1, Column: 20}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 19, Line: 1, Column: 20}, End: hcl.Pos{Byte: 19, Line: 1, Column: 20}, }, }, }, }, { `"hello $$"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, // This one scans a little oddly because of how the scanner // handles the escaping of the dollar sign, but it's still // good enough for the parser since it'll just concatenate // these two string literals together anyway. { Type: TokenQuotedLit, Bytes: []byte(`$`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`$`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 9, Line: 1, Column: 10}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 10, Line: 1, Column: 11}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, }, }, { `"hello %%"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, // This one scans a little oddly because of how the scanner // handles the escaping of the percent sign, but it's still // good enough for the parser since it'll just concatenate // these two string literals together anyway. { Type: TokenQuotedLit, Bytes: []byte(`%`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`%`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 9, Line: 1, Column: 10}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 10, Line: 1, Column: 11}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, }, }, { `"hello $"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`$`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 9, Line: 1, Column: 10}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, }, }, { `"hello %"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`%`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 9, Line: 1, Column: 10}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, }, }, { `"hello $${world}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`$${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`world}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 10, Line: 1, Column: 11}, End: hcl.Pos{Byte: 16, Line: 1, Column: 17}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 16, Line: 1, Column: 17}, End: hcl.Pos{Byte: 17, Line: 1, Column: 18}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 17, Line: 1, Column: 18}, End: hcl.Pos{Byte: 17, Line: 1, Column: 18}, }, }, }, }, { `"hello %%{world}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`%%{`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`world}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 10, Line: 1, Column: 11}, End: hcl.Pos{Byte: 16, Line: 1, Column: 17}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 16, Line: 1, Column: 17}, End: hcl.Pos{Byte: 17, Line: 1, Column: 18}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 17, Line: 1, Column: 18}, End: hcl.Pos{Byte: 17, Line: 1, Column: 18}, }, }, }, }, { `"hello %${world}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`hello `), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenQuotedLit, Bytes: []byte(`%`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, { Type: TokenIdent, Bytes: []byte(`world`), Range: hcl.Range{ Start: hcl.Pos{Byte: 10, Line: 1, Column: 11}, End: hcl.Pos{Byte: 15, Line: 1, Column: 16}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 15, Line: 1, Column: 16}, End: hcl.Pos{Byte: 16, Line: 1, Column: 17}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 16, Line: 1, Column: 17}, End: hcl.Pos{Byte: 17, Line: 1, Column: 18}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 17, Line: 1, Column: 18}, End: hcl.Pos{Byte: 17, Line: 1, Column: 18}, }, }, }, }, // Templates with interpolations and control sequences { `"${1}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenNumberLit, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, }, }, { `"%{a}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenTemplateControl, Bytes: []byte(`%{`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenIdent, Bytes: []byte(`a`), Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, }, }, { `"${{}}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenOBrace, Bytes: []byte(`{`), Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenCBrace, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, }, }, { `"${""}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 5, Line: 1, Column: 6}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, }, }, { `"${"${a}"}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenIdent, Bytes: []byte(`a`), Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 9, Line: 1, Column: 10}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 9, Line: 1, Column: 10}, End: hcl.Pos{Byte: 10, Line: 1, Column: 11}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 10, Line: 1, Column: 11}, End: hcl.Pos{Byte: 11, Line: 1, Column: 12}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 11, Line: 1, Column: 12}, End: hcl.Pos{Byte: 11, Line: 1, Column: 12}, }, }, }, }, { `"${"${a} foo"}"`, []Token{ { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 0, Line: 1, Column: 1}, End: hcl.Pos{Byte: 1, Line: 1, Column: 2}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 3, Line: 1, Column: 4}, }, }, { Type: TokenOQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 3, Line: 1, Column: 4}, End: hcl.Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Type: TokenTemplateInterp, Bytes: []byte(`${`), Range: hcl.Range{ Start: hcl.Pos{Byte: 4, Line: 1, Column: 5}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Type: TokenIdent, Bytes: []byte(`a`), Range: hcl.Range{ Start: hcl.Pos{Byte: 6, Line: 1, Column: 7}, End: hcl.Pos{Byte: 7, Line: 1, Column: 8}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 7, Line: 1, Column: 8}, End: hcl.Pos{Byte: 8, Line: 1, Column: 9}, }, }, { Type: TokenQuotedLit, Bytes: []byte(` foo`), Range: hcl.Range{ Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 12, Line: 1, Column: 13}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 12, Line: 1, Column: 13}, End: hcl.Pos{Byte: 13, Line: 1, Column: 14}, }, }, { Type: TokenTemplateSeqEnd, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{Byte: 13, Line: 1, Column: 14}, End: hcl.Pos{Byte: 14, Line: 1, Column: 15}, }, }, { Type: TokenCQuote, Bytes: []byte(`"`), Range: hcl.Range{ Start: hcl.Pos{Byte: 14, Line: 1, Column: 15}, End: hcl.Pos{Byte: 15, Line: 1, Column: 16}, }, }, { Type: TokenEOF, Bytes: []byte{}, Range: hcl.Range{ Start: hcl.Pos{Byte: 15, Line: 1, Column: 16}, End: hcl.Pos{Byte: 15, Line: 1, Column: 16}, }, }, }, }, // Heredoc Templates { `< ? } ] ) %{ * ! <= = . / >= => , % ... ``` ### Numeric Literals A numeric literal is a decimal representation of a real number. It has an integer part, a fractional part, and an exponent part. ```ebnf NumericLit = decimal+ ("." decimal+)? (expmark decimal+)?; decimal = '0' .. '9'; expmark = ('e' | 'E') ("+" | "-")?; ``` ## Structural Elements The structural language consists of syntax representing the following constructs: - _Attributes_, which assign a value to a specified name. - _Blocks_, which create a child body annotated by a type and optional labels. - _Body Content_, which consists of a collection of attributes and blocks. These constructs correspond to the similarly-named concepts in the language-agnostic HCL information model. ```ebnf ConfigFile = Body; Body = (Attribute | Block | OneLineBlock)*; Attribute = Identifier "=" Expression Newline; Block = Identifier (StringLit|Identifier)* "{" Newline Body "}" Newline; OneLineBlock = Identifier (StringLit|Identifier)* "{" (Identifier "=" Expression)? "}" Newline; ``` ### Configuration Files A _configuration file_ is a sequence of characters whose top-level is interpreted as a Body. ### Bodies A _body_ is a collection of associated attributes and blocks. The meaning of this association is defined by the calling application. ### Attribute Definitions An _attribute definition_ assigns a value to a particular attribute name within a body. Each distinct attribute name may be defined no more than once within a single body. The attribute value is given as an expression, which is retained literally for later evaluation by the calling application. ### Blocks A _block_ creates a child body that is annotated with a block _type_ and zero or more block _labels_. Blocks create a structural hierarchy which can be interpreted by the calling application. Block labels can either be quoted literal strings or naked identifiers. ## Expressions The expression sub-language is used within attribute definitions to specify values. ```ebnf Expression = ( ExprTerm | Operation | Conditional ); ``` ### Types The value types used within the expression language are those defined by the syntax-agnostic HCL information model. An expression may return any valid type, but only a subset of the available types have first-class syntax. A calling application may make other types available via _variables_ and _functions_. ### Expression Terms Expression _terms_ are the operands for unary and binary expressions, as well as acting as expressions in their own right. ```ebnf ExprTerm = ( LiteralValue | CollectionValue | TemplateExpr | VariableExpr | FunctionCall | ForExpr | ExprTerm Index | ExprTerm GetAttr | ExprTerm Splat | "(" Expression ")" ); ``` The productions for these different term types are given in their corresponding sections. Between the `(` and `)` characters denoting a sub-expression, newline characters are ignored as whitespace. ### Literal Values A _literal value_ immediately represents a particular value of a primitive type. ```ebnf LiteralValue = ( NumericLit | "true" | "false" | "null" ); ``` - Numeric literals represent values of type _number_. - The `true` and `false` keywords represent values of type _bool_. - The `null` keyword represents a null value of the dynamic pseudo-type. String literals are not directly available in the expression sub-language, but are available via the template sub-language, which can in turn be incorporated via _template expressions_. ### Collection Values A _collection value_ combines zero or more other expressions to produce a collection value. ```ebnf CollectionValue = tuple | object; tuple = "[" ( (Expression ("," Expression)* ","?)? ) "]"; object = "{" ( (objectelem ("," objectelem)* ","?)? ) "}"; objectelem = (Identifier | Expression) ("=" | ":") Expression; ``` Only tuple and object values can be directly constructed via native syntax. Tuple and object values can in turn be converted to list, set and map values with other operations, which behaves as defined by the syntax-agnostic HCL information model. When specifying an object element, an identifier is interpreted as a literal attribute name as opposed to a variable reference. To populate an item key from a variable, use parentheses to disambiguate: - `{foo = "baz"}` is interpreted as an attribute literally named `foo`. - `{(foo) = "baz"}` is interpreted as an attribute whose name is taken from the variable named `foo`. Between the open and closing delimiters of these sequences, newline sequences are ignored as whitespace. There is a syntax ambiguity between _for expressions_ and collection values whose first element starts with an identifier named `for`. The _for expression_ interpretation has priority, so to write a key literally named `for` or an expression derived from a variable named `for` you must use parentheses or quotes to disambiguate: - `[for, foo, baz]` is a syntax error. - `[(for), foo, baz]` is a tuple whose first element is the value of variable `for`. - `{for = 1, baz = 2}` is a syntax error. - `{"for" = 1, baz = 2}` is an object with an attribute literally named `for`. - `{baz = 2, for = 1}` is equivalent to the previous example, and resolves the ambiguity by reordering. - `{(for) = 1, baz = 2}` is an object with a key with the same value as the variable `for`. ### Template Expressions A _template expression_ embeds a program written in the template sub-language as an expression. Template expressions come in two forms: - A _quoted_ template expression is delimited by quote characters (`"`) and defines a template as a single-line expression with escape characters. - A _heredoc_ template expression is introduced by a `<<` sequence and defines a template via a multi-line sequence terminated by a user-chosen delimiter. In both cases the template interpolation and directive syntax is available for use within the delimiters, and any text outside of these special sequences is interpreted as a literal string. In _quoted_ template expressions any literal string sequences within the template behave in a special way: literal newline sequences are not permitted and instead _escape sequences_ can be included, starting with the backslash `\`: ``` \n Unicode newline control character \r Unicode carriage return control character \t Unicode tab control character \" Literal quote mark, used to prevent interpretation as end of string \\ Literal backslash, used to prevent interpretation as escape sequence \uNNNN Unicode character from Basic Multilingual Plane (NNNN is four hexadecimal digits) \UNNNNNNNN Unicode character from supplementary planes (NNNNNNNN is eight hexadecimal digits) ``` The _heredoc_ template expression type is introduced by either `<<` or `<<-`, followed by an identifier. The template expression ends when the given identifier subsequently appears again on a line of its own. If a heredoc template is introduced with the `<<-` symbol, any literal string at the start of each line is analyzed to find the minimum number of leading spaces, and then that number of prefix spaces is removed from all line-leading literal strings. The final closing marker may also have an arbitrary number of spaces preceding it on its line. ```ebnf TemplateExpr = quotedTemplate | heredocTemplate; quotedTemplate = (as defined in prose above); heredocTemplate = ( ("<<" | "<<-") Identifier Newline (content as defined in prose above) Identifier Newline ); ``` A quoted template expression containing only a single literal string serves as a syntax for defining literal string _expressions_. In certain contexts the template syntax is restricted in this manner: ```ebnf StringLit = '"' (quoted literals as defined in prose above) '"'; ``` The `StringLit` production permits the escape sequences discussed for quoted template expressions as above, but does _not_ permit template interpolation or directive sequences. ### Variables and Variable Expressions A _variable_ is a value that has been assigned a symbolic name. Variables are made available for use in expressions by the calling application, by populating the _global scope_ used for expression evaluation. Variables can also be created by expressions themselves, which always creates a _child scope_ that incorporates the variables from its parent scope but (re-)defines zero or more names with new values. The value of a variable is accessed using a _variable expression_, which is a standalone `Identifier` whose name corresponds to a defined variable: ```ebnf VariableExpr = Identifier; ``` Variables in a particular scope are immutable, but child scopes may _hide_ a variable from an ancestor scope by defining a new variable of the same name. When looking up variables, the most locally-defined variable of the given name is used, and ancestor-scoped variables of the same name cannot be accessed. No direct syntax is provided for declaring or assigning variables, but other expression constructs implicitly create child scopes and define variables as part of their evaluation. ### Functions and Function Calls A _function_ is an operation that has been assigned a symbolic name. Functions are made available for use in expressions by the calling application, by populating the _function table_ used for expression evaluation. The namespace of functions is distinct from the namespace of variables. A function and a variable may share the same name with no implication that they are in any way related. A function can be executed via a _function call_ expression: ```ebnf FunctionCall = Identifier "(" arguments ")"; Arguments = ( () || (Expression ("," Expression)* ("," | "...")?) ); ``` The definition of functions and the semantics of calling them are defined by the language-agnostic HCL information model. The given arguments are mapped onto the function's _parameters_ and the result of a function call expression is the return value of the named function when given those arguments. If the final argument expression is followed by the ellipsis symbol (`...`), the final argument expression must evaluate to either a list or tuple value. The elements of the value are each mapped to a single parameter of the named function, beginning at the first parameter remaining after all other argument expressions have been mapped. Within the parentheses that delimit the function arguments, newline sequences are ignored as whitespace. ### For Expressions A _for expression_ is a construct for constructing a collection by projecting the items from another collection. ```ebnf ForExpr = forTupleExpr | forObjectExpr; forTupleExpr = "[" forIntro Expression forCond? "]"; forObjectExpr = "{" forIntro Expression "=>" Expression "..."? forCond? "}"; forIntro = "for" Identifier ("," Identifier)? "in" Expression ":"; forCond = "if" Expression; ``` The punctuation used to delimit a for expression decide whether it will produce a tuple value (`[` and `]`) or an object value (`{` and `}`). The "introduction" is equivalent in both cases: the keyword `for` followed by either one or two identifiers separated by a comma which define the temporary variable names used for iteration, followed by the keyword `in` and then an expression that must evaluate to a value that can be iterated. The introduction is then terminated by the colon (`:`) symbol. If only one identifier is provided, it is the name of a variable that will be temporarily assigned the value of each element during iteration. If both are provided, the first is the key and the second is the value. Tuple, object, list, map, and set types are iterable. The type of collection used defines how the key and value variables are populated: - For tuple and list types, the _key_ is the zero-based index into the sequence for each element, and the _value_ is the element value. The elements are visited in index order. - For object and map types, the _key_ is the string attribute name or element key, and the _value_ is the attribute or element value. The elements are visited in the order defined by a lexicographic sort of the attribute names or keys. - For set types, the _key_ and _value_ are both the element value. The elements are visited in an undefined but consistent order. The expression after the colon and (in the case of object `for`) the expression after the `=>` are both evaluated once for each element of the source collection, in a local scope that defines the key and value variable names specified. The results of evaluating these expressions for each input element are used to populate an element in the new collection. In the case of tuple `for`, the single expression becomes an element, appending values to the tuple in visit order. In the case of object `for`, the pair of expressions is used as an attribute name and value respectively, creating an element in the resulting object. In the case of object `for`, it is an error if two input elements produce the same result from the attribute name expression, since duplicate attributes are not possible. If the ellipsis symbol (`...`) appears immediately after the value expression, this activates the grouping mode in which each value in the resulting object is a _tuple_ of all of the values that were produced against each distinct key. - `[for v in ["a", "b"]: v]` returns `["a", "b"]`. - `[for i, v in ["a", "b"]: i]` returns `[0, 1]`. - `{for i, v in ["a", "b"]: v => i}` returns `{a = 0, b = 1}`. - `{for i, v in ["a", "a", "b"]: v => i}` produces an error, because attribute `a` is defined twice. - `{for i, v in ["a", "a", "b"]: v => i...}` returns `{a = [0, 1], b = [2]}`. If the `if` keyword is used after the element expression(s), it applies an additional predicate that can be used to conditionally filter elements from the source collection from consideration. The expression following `if` is evaluated once for each source element, in the same scope used for the element expression(s). It must evaluate to a boolean value; if `true`, the element will be evaluated as normal, while if `false` the element will be skipped. - `[for i, v in ["a", "b", "c"]: v if i < 2]` returns `["a", "b"]`. If the collection value, element expression(s) or condition expression return unknown values that are otherwise type-valid, the result is a value of the dynamic pseudo-type. ### Index Operator The _index_ operator returns the value of a single element of a collection value. It is a postfix operator and can be applied to any value that has a tuple, object, map, or list type. ```ebnf Index = "[" Expression "]"; ``` The expression delimited by the brackets is the _key_ by which an element will be looked up. If the index operator is applied to a value of tuple or list type, the key expression must be an non-negative integer number representing the zero-based element index to access. If applied to a value of object or map type, the key expression must be a string representing the attribute name or element key. If the given key value is not of the appropriate type, a conversion is attempted using the conversion rules from the HCL syntax-agnostic information model. An error is produced if the given key expression does not correspond to an element in the collection, either because it is of an unconvertable type, because it is outside the range of elements for a tuple or list, or because the given attribute or key does not exist. If either the collection or the key are an unknown value of an otherwise-suitable type, the return value is an unknown value whose type matches what type would be returned given known values, or a value of the dynamic pseudo-type if type information alone cannot determine a suitable return type. Within the brackets that delimit the index key, newline sequences are ignored as whitespace. The HCL native syntax also includes a _legacy_ index operator that exists only for compatibility with the precursor language HIL: ```ebnf LegacyIndex = '.' digit+ ``` This legacy index operator must be supported by parser for compatibility but should not be used in new configurations. This allows an attribute-access-like syntax for indexing, must still be interpreted as an index operation rather than attribute access. The legacy syntax does not support chaining of index operations, like `foo.0.0.bar`, because the interpretation of `0.0` as a number literal token takes priority and thus renders the resulting sequence invalid. ### Attribute Access Operator The _attribute access_ operator returns the value of a single attribute in an object value. It is a postfix operator and can be applied to any value that has an object type. ```ebnf GetAttr = "." Identifier; ``` The given identifier is interpreted as the name of the attribute to access. An error is produced if the object to which the operator is applied does not have an attribute with the given name. If the object is an unknown value of a type that has the attribute named, the result is an unknown value of the attribute's type. ### Splat Operators The _splat operators_ allow convenient access to attributes or elements of elements in a tuple, list, or set value. There are two kinds of "splat" operator: - The _attribute-only_ splat operator supports only attribute lookups into the elements from a list, but supports an arbitrary number of them. - The _full_ splat operator additionally supports indexing into the elements from a list, and allows any combination of attribute access and index operations. ```ebnf Splat = attrSplat | fullSplat; attrSplat = "." "*" GetAttr*; fullSplat = "[" "*" "]" (GetAttr | Index)*; ``` The splat operators can be thought of as shorthands for common operations that could otherwise be performed using _for expressions_: - `tuple.*.foo.bar[0]` is approximately equivalent to `[for v in tuple: v.foo.bar][0]`. - `tuple[*].foo.bar[0]` is approximately equivalent to `[for v in tuple: v.foo.bar[0]]` Note the difference in how the trailing index operator is interpreted in each case. This different interpretation is the key difference between the _attribute-only_ and _full_ splat operators. Splat operators have one additional behavior compared to the equivalent _for expressions_ shown above: if a splat operator is applied to a value that is _not_ of tuple, list, or set type, the value is coerced automatically into a single-value list of the value type: - `any_object.*.id` is equivalent to `[any_object.id]`, assuming that `any_object` is a single object. - `any_number.*` is equivalent to `[any_number]`, assuming that `any_number` is a single number. If applied to a null value that is not tuple, list, or set, the result is always an empty tuple, which allows conveniently converting a possibly-null scalar value into a tuple of zero or one elements. It is illegal to apply a splat operator to a null value of tuple, list, or set type. ### Operations Operations apply a particular operator to either one or two expression terms. ```ebnf Operation = unaryOp | binaryOp; unaryOp = ("-" | "!") ExprTerm; binaryOp = ExprTerm binaryOperator ExprTerm; binaryOperator = compareOperator | arithmeticOperator | logicOperator; compareOperator = "==" | "!=" | "<" | ">" | "<=" | ">="; arithmeticOperator = "+" | "-" | "*" | "/" | "%"; logicOperator = "&&" | "||" | "!"; ``` The unary operators have the highest precedence. The binary operators are grouped into the following precedence levels: ``` Level Operators 6 * / % 5 + - 4 > >= < <= 3 == != 2 && 1 || ``` Higher values of "level" bind tighter. Operators within the same precedence level have left-to-right associativity. For example, `x / y * z` is equivalent to `(x / y) * z`. ### Comparison Operators Comparison operators always produce boolean values, as a result of testing the relationship between two values. The two equality operators apply to values of any type: ``` a == b equal a != b not equal ``` Two values are equal if the are of identical types and their values are equal as defined in the HCL syntax-agnostic information model. The equality operators are commutative and opposite, such that `(a == b) == !(a != b)` and `(a == b) == (b == a)` for all values `a` and `b`. The four numeric comparison operators apply only to numbers: ``` a < b less than a <= b less than or equal to a > b greater than a >= b greater than or equal to ``` If either operand of a comparison operator is a correctly-typed unknown value or a value of the dynamic pseudo-type, the result is an unknown boolean. ### Arithmetic Operators Arithmetic operators apply only to number values and always produce number values as results. ``` a + b sum (addition) a - b difference (subtraction) a * b product (multiplication) a / b quotient (division) a % b remainder (modulo) -a negation ``` Arithmetic operations are considered to be performed in an arbitrary-precision number space. If either operand of an arithmetic operator is an unknown number or a value of the dynamic pseudo-type, the result is an unknown number. ### Logic Operators Logic operators apply only to boolean values and always produce boolean values as results. ``` a && b logical AND a || b logical OR !a logical NOT ``` If either operand of a logic operator is an unknown bool value or a value of the dynamic pseudo-type, the result is an unknown bool value. ### Conditional Operator The conditional operator allows selecting from one of two expressions based on the outcome of a boolean expression. ```ebnf Conditional = Expression "?" Expression ":" Expression; ``` The first expression is the _predicate_, which is evaluated and must produce a boolean result. If the predicate value is `true`, the result of the second expression is the result of the conditional. If the predicate value is `false`, the result of the third expression is the result of the conditional. The second and third expressions must be of the same type or must be able to unify into a common type using the type unification rules defined in the HCL syntax-agnostic information model. This unified type is the result type of the conditional, with both expressions converted as necessary to the unified type. If the predicate is an unknown boolean value or a value of the dynamic pseudo-type then the result is an unknown value of the unified type of the other two expressions. If either the second or third expressions produce errors when evaluated, these errors are passed through only if the erroneous expression is selected. This allows for expressions such as `length(some_list) > 0 ? some_list[0] : default` (given some suitable `length` function) without producing an error when the predicate is `false`. ## Templates The template sub-language is used within template expressions to concisely combine strings and other values to produce other strings. It can also be used in isolation as a standalone template language. ```ebnf Template = ( TemplateLiteral | TemplateInterpolation | TemplateDirective )* TemplateDirective = TemplateIf | TemplateFor; ``` A template behaves like an expression that always returns a string value. The different elements of the template are evaluated and combined into a single string to return. If any of the elements produce an unknown string or a value of the dynamic pseudo-type, the result is an unknown string. An important use-case for standalone templates is to enable the use of expressions in alternative HCL syntaxes where a native expression grammar is not available. For example, the HCL JSON profile treats the values of JSON strings as standalone templates when attributes are evaluated in expression mode. ### Template Literals A template literal is a literal sequence of characters to include in the resulting string. When the template sub-language is used standalone, a template literal can contain any unicode character, with the exception of the sequences that introduce interpolations and directives, and for the sequences that escape those introductions. The interpolation and directive introductions are escaped by doubling their leading characters. The `${` sequence is escaped as `$${` and the `%{` sequence is escaped as `%%{`. When the template sub-language is embedded in the expression language via _template expressions_, additional constraints and transforms are applied to template literals as described in the definition of template expressions. The value of a template literal can be modified by _strip markers_ in any interpolations or directives that are adjacent to it. A strip marker is a tilde (`~`) placed immediately after the opening `{` or before the closing `}` of a template sequence: - `hello ${~ "world" }` produces `"helloworld"`. - `%{ if true ~} hello %{~ endif }` produces `"hello"`. When a strip marker is present, any spaces adjacent to it in the corresponding string literal (if any) are removed before producing the final value. Space characters are interpreted as per Unicode's definition. Stripping is done at syntax level rather than value level. Values returned by interpolations or directives are not subject to stripping: - `${"hello" ~}${" world"}` produces `"hello world"`, and not `"helloworld"`, because the space is not in a template literal directly adjacent to the strip marker. ### Template Interpolations An _interpolation sequence_ evaluates an expression (written in the expression sub-language), converts the result to a string value, and replaces itself with the resulting string. ```ebnf TemplateInterpolation = ("${" | "${~") Expression ("}" | "~}"; ``` If the expression result cannot be converted to a string, an error is produced. ### Template If Directive The template `if` directive is the template equivalent of the _conditional expression_, allowing selection of one of two sub-templates based on the value of a predicate expression. ```ebnf TemplateIf = ( ("%{" | "%{~") "if" Expression ("}" | "~}") Template ( ("%{" | "%{~") "else" ("}" | "~}") Template )? ("%{" | "%{~") "endif" ("}" | "~}") ); ``` The evaluation of the `if` directive is equivalent to the conditional expression, with the following exceptions: - The two sub-templates always produce strings, and thus the result value is also always a string. - The `else` clause may be omitted, in which case the conditional's third expression result is implied to be the empty string. ### Template For Directive The template `for` directive is the template equivalent of the _for expression_, producing zero or more copies of its sub-template based on the elements of a collection. ```ebnf TemplateFor = ( ("%{" | "%{~") "for" Identifier ("," Identifier) "in" Expression ("}" | "~}") Template ("%{" | "%{~") "endfor" ("}" | "~}") ); ``` The evaluation of the `for` directive is equivalent to the _for expression_ when producing a tuple, with the following exceptions: - The sub-template always produces a string. - There is no equivalent of the "if" clause on the for expression. - The elements of the resulting tuple are all converted to strings and concatenated to produce a flat string result. ### Template Interpolation Unwrapping As a special case, a template that consists only of a single interpolation, with no surrounding literals, directives or other interpolations, is "unwrapped". In this case, the result of the interpolation expression is returned verbatim, without conversion to string. This special case exists primarily to enable the native template language to be used inside strings in alternative HCL syntaxes that lack a first-class template or expression syntax. Unwrapping allows arbitrary expressions to be used to populate attributes when strings in such languages are interpreted as templates. - `${true}` produces the boolean value `true` - `${"${true}"}` produces the boolean value `true`, because both the inner and outer interpolations are subject to unwrapping. - `hello ${true}` produces the string `"hello true"` - `${""}${true}` produces the string `"true"` because there are two interpolation sequences, even though one produces an empty result. - `%{ for v in [true] }${v}%{ endfor }` produces the string `true` because the presence of the `for` directive circumvents the unwrapping even though the final result is a single value. In some contexts this unwrapping behavior may be circumvented by the calling application, by converting the final template result to string. This is necessary, for example, if a standalone template is being used to produce the direct contents of a file, since the result in that case must always be a string. ## Static Analysis The HCL static analysis operations are implemented for some expression types in the native syntax, as described in the following sections. A goal for static analysis of the native syntax is for the interpretation to be as consistent as possible with the dynamic evaluation interpretation of the given expression, though some deviations are intentionally made in order to maximize the potential for analysis. ### Static List The tuple construction syntax can be interpreted as a static list. All of the expression elements given are returned as the static list elements, with no further interpretation. ### Static Map The object construction syntax can be interpreted as a static map. All of the key/value pairs given are returned as the static pairs, with no further interpretation. The usual requirement that an attribute name be interpretable as a string does not apply to this static analysis, allowing callers to provide map-like constructs with different key types by building on the map syntax. ### Static Call The function call syntax can be interpreted as a static call. The called function name is returned verbatim and the given argument expressions are returned as the static arguments, with no further interpretation. ### Static Traversal A variable expression and any attached attribute access operations and constant index operations can be interpreted as a static traversal. The keywords `true`, `false` and `null` can also be interpreted as static traversals, behaving as if they were references to variables of those names, to allow callers to redefine the meaning of those keywords in certain contexts. hcl-2.14.1/hclsyntax/structure.go000066400000000000000000000235631431334125700167650ustar00rootroot00000000000000package hclsyntax import ( "fmt" "strings" "github.com/hashicorp/hcl/v2" ) // AsHCLBlock returns the block data expressed as a *hcl.Block. func (b *Block) AsHCLBlock() *hcl.Block { if b == nil { return nil } return &hcl.Block{ Type: b.Type, Labels: b.Labels, Body: b.Body, DefRange: b.DefRange(), TypeRange: b.TypeRange, LabelRanges: b.LabelRanges, } } // Body is the implementation of hcl.Body for the HCL native syntax. type Body struct { Attributes Attributes Blocks Blocks // These are used with PartialContent to produce a "remaining items" // body to return. They are nil on all bodies fresh out of the parser. hiddenAttrs map[string]struct{} hiddenBlocks map[string]struct{} SrcRange hcl.Range EndRange hcl.Range // Final token of the body (zero-length range) } // Assert that *Body implements hcl.Body var assertBodyImplBody hcl.Body = &Body{} func (b *Body) walkChildNodes(w internalWalkFunc) { w(b.Attributes) w(b.Blocks) } func (b *Body) Range() hcl.Range { return b.SrcRange } func (b *Body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { content, remainHCL, diags := b.PartialContent(schema) // No we'll see if anything actually remains, to produce errors about // extraneous items. remain := remainHCL.(*Body) for name, attr := range b.Attributes { if _, hidden := remain.hiddenAttrs[name]; !hidden { var suggestions []string for _, attrS := range schema.Attributes { if _, defined := content.Attributes[attrS.Name]; defined { continue } suggestions = append(suggestions, attrS.Name) } suggestion := nameSuggestion(name, suggestions) if suggestion != "" { suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) } else { // Is there a block of the same name? for _, blockS := range schema.Blocks { if blockS.Type == name { suggestion = fmt.Sprintf(" Did you mean to define a block of type %q?", name) break } } } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsupported argument", Detail: fmt.Sprintf("An argument named %q is not expected here.%s", name, suggestion), Subject: &attr.NameRange, }) } } for _, block := range b.Blocks { blockTy := block.Type if _, hidden := remain.hiddenBlocks[blockTy]; !hidden { var suggestions []string for _, blockS := range schema.Blocks { suggestions = append(suggestions, blockS.Type) } suggestion := nameSuggestion(blockTy, suggestions) if suggestion != "" { suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) } else { // Is there an attribute of the same name? for _, attrS := range schema.Attributes { if attrS.Name == blockTy { suggestion = fmt.Sprintf(" Did you mean to define argument %q? If so, use the equals sign to assign it a value.", blockTy) break } } } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsupported block type", Detail: fmt.Sprintf("Blocks of type %q are not expected here.%s", blockTy, suggestion), Subject: &block.TypeRange, }) } } return content, diags } func (b *Body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { attrs := make(hcl.Attributes) var blocks hcl.Blocks var diags hcl.Diagnostics hiddenAttrs := make(map[string]struct{}) hiddenBlocks := make(map[string]struct{}) if b.hiddenAttrs != nil { for k, v := range b.hiddenAttrs { hiddenAttrs[k] = v } } if b.hiddenBlocks != nil { for k, v := range b.hiddenBlocks { hiddenBlocks[k] = v } } for _, attrS := range schema.Attributes { name := attrS.Name attr, exists := b.Attributes[name] _, hidden := hiddenAttrs[name] if hidden || !exists { if attrS.Required { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing required argument", Detail: fmt.Sprintf("The argument %q is required, but no definition was found.", attrS.Name), Subject: b.MissingItemRange().Ptr(), }) } continue } hiddenAttrs[name] = struct{}{} attrs[name] = attr.AsHCLAttribute() } blocksWanted := make(map[string]hcl.BlockHeaderSchema) for _, blockS := range schema.Blocks { blocksWanted[blockS.Type] = blockS } for _, block := range b.Blocks { if _, hidden := hiddenBlocks[block.Type]; hidden { continue } blockS, wanted := blocksWanted[block.Type] if !wanted { continue } if len(block.Labels) > len(blockS.LabelNames) { name := block.Type if len(blockS.LabelNames) == 0 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Extraneous label for %s", name), Detail: fmt.Sprintf( "No labels are expected for %s blocks.", name, ), Subject: block.LabelRanges[0].Ptr(), Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(), }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Extraneous label for %s", name), Detail: fmt.Sprintf( "Only %d labels (%s) are expected for %s blocks.", len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "), name, ), Subject: block.LabelRanges[len(blockS.LabelNames)].Ptr(), Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(), }) } continue } if len(block.Labels) < len(blockS.LabelNames) { name := block.Type diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Missing %s for %s", blockS.LabelNames[len(block.Labels)], name), Detail: fmt.Sprintf( "All %s blocks must have %d labels (%s).", name, len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "), ), Subject: &block.OpenBraceRange, Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(), }) continue } blocks = append(blocks, block.AsHCLBlock()) } // We hide blocks only after we've processed all of them, since otherwise // we can't process more than one of the same type. for _, blockS := range schema.Blocks { hiddenBlocks[blockS.Type] = struct{}{} } remain := &Body{ Attributes: b.Attributes, Blocks: b.Blocks, hiddenAttrs: hiddenAttrs, hiddenBlocks: hiddenBlocks, SrcRange: b.SrcRange, EndRange: b.EndRange, } return &hcl.BodyContent{ Attributes: attrs, Blocks: blocks, MissingItemRange: b.MissingItemRange(), }, remain, diags } func (b *Body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { attrs := make(hcl.Attributes) var diags hcl.Diagnostics if len(b.Blocks) > 0 { example := b.Blocks[0] diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: fmt.Sprintf("Unexpected %q block", example.Type), Detail: "Blocks are not allowed here.", Subject: &example.TypeRange, }) // we will continue processing anyway, and return the attributes // we are able to find so that certain analyses can still be done // in the face of errors. } if b.Attributes == nil { return attrs, diags } for name, attr := range b.Attributes { if _, hidden := b.hiddenAttrs[name]; hidden { continue } attrs[name] = attr.AsHCLAttribute() } return attrs, diags } func (b *Body) MissingItemRange() hcl.Range { return hcl.Range{ Filename: b.SrcRange.Filename, Start: b.SrcRange.Start, End: b.SrcRange.Start, } } // Attributes is the collection of attribute definitions within a body. type Attributes map[string]*Attribute func (a Attributes) walkChildNodes(w internalWalkFunc) { for _, attr := range a { w(attr) } } // Range returns the range of some arbitrary point within the set of // attributes, or an invalid range if there are no attributes. // // This is provided only to complete the Node interface, but has no practical // use. func (a Attributes) Range() hcl.Range { // An attributes doesn't really have a useful range to report, since // it's just a grouping construct. So we'll arbitrarily take the // range of one of the attributes, or produce an invalid range if we have // none. In practice, there's little reason to ask for the range of // an Attributes. for _, attr := range a { return attr.Range() } return hcl.Range{ Filename: "", } } // Attribute represents a single attribute definition within a body. type Attribute struct { Name string Expr Expression SrcRange hcl.Range NameRange hcl.Range EqualsRange hcl.Range } func (a *Attribute) walkChildNodes(w internalWalkFunc) { w(a.Expr) } func (a *Attribute) Range() hcl.Range { return a.SrcRange } // AsHCLAttribute returns the block data expressed as a *hcl.Attribute. func (a *Attribute) AsHCLAttribute() *hcl.Attribute { if a == nil { return nil } return &hcl.Attribute{ Name: a.Name, Expr: a.Expr, Range: a.SrcRange, NameRange: a.NameRange, } } // Blocks is the list of nested blocks within a body. type Blocks []*Block func (bs Blocks) walkChildNodes(w internalWalkFunc) { for _, block := range bs { w(block) } } // Range returns the range of some arbitrary point within the list of // blocks, or an invalid range if there are no blocks. // // This is provided only to complete the Node interface, but has no practical // use. func (bs Blocks) Range() hcl.Range { if len(bs) > 0 { return bs[0].Range() } return hcl.Range{ Filename: "", } } // Block represents a nested block structure type Block struct { Type string Labels []string Body *Body TypeRange hcl.Range LabelRanges []hcl.Range OpenBraceRange hcl.Range CloseBraceRange hcl.Range } func (b *Block) walkChildNodes(w internalWalkFunc) { w(b.Body) } func (b *Block) Range() hcl.Range { return hcl.RangeBetween(b.TypeRange, b.CloseBraceRange) } func (b *Block) DefRange() hcl.Range { lastHeaderRange := b.TypeRange if len(b.LabelRanges) > 0 { lastHeaderRange = b.LabelRanges[len(b.LabelRanges)-1] } return hcl.RangeBetween(b.TypeRange, lastHeaderRange) } hcl-2.14.1/hclsyntax/structure_at_pos.go000066400000000000000000000067531431334125700203340ustar00rootroot00000000000000package hclsyntax import ( "github.com/hashicorp/hcl/v2" ) // ----------------------------------------------------------------------------- // The methods in this file are all optional extension methods that serve to // implement the methods of the same name on *hcl.File when its root body // is provided by this package. // ----------------------------------------------------------------------------- // BlocksAtPos implements the method of the same name for an *hcl.File that // is backed by a *Body. func (b *Body) BlocksAtPos(pos hcl.Pos) []*hcl.Block { list, _ := b.blocksAtPos(pos, true) return list } // InnermostBlockAtPos implements the method of the same name for an *hcl.File // that is backed by a *Body. func (b *Body) InnermostBlockAtPos(pos hcl.Pos) *hcl.Block { _, innermost := b.blocksAtPos(pos, false) return innermost.AsHCLBlock() } // OutermostBlockAtPos implements the method of the same name for an *hcl.File // that is backed by a *Body. func (b *Body) OutermostBlockAtPos(pos hcl.Pos) *hcl.Block { return b.outermostBlockAtPos(pos).AsHCLBlock() } // blocksAtPos is the internal engine of both BlocksAtPos and // InnermostBlockAtPos, which both need to do the same logic but return a // differently-shaped result. // // list is nil if makeList is false, avoiding an allocation. Innermost is // always set, and if the returned list is non-nil it will always match the // final element from that list. func (b *Body) blocksAtPos(pos hcl.Pos, makeList bool) (list []*hcl.Block, innermost *Block) { current := b Blocks: for current != nil { for _, block := range current.Blocks { wholeRange := hcl.RangeBetween(block.TypeRange, block.CloseBraceRange) if wholeRange.ContainsPos(pos) { innermost = block if makeList { list = append(list, innermost.AsHCLBlock()) } current = block.Body continue Blocks } } // If we fall out here then none of the current body's nested blocks // contain the position we are looking for, and so we're done. break } return } // outermostBlockAtPos is the internal version of OutermostBlockAtPos that // returns a hclsyntax.Block rather than an hcl.Block, allowing for further // analysis if necessary. func (b *Body) outermostBlockAtPos(pos hcl.Pos) *Block { // This is similar to blocksAtPos, but simpler because we know it only // ever needs to search the first level of nested blocks. for _, block := range b.Blocks { wholeRange := hcl.RangeBetween(block.TypeRange, block.CloseBraceRange) if wholeRange.ContainsPos(pos) { return block } } return nil } // AttributeAtPos implements the method of the same name for an *hcl.File // that is backed by a *Body. func (b *Body) AttributeAtPos(pos hcl.Pos) *hcl.Attribute { return b.attributeAtPos(pos).AsHCLAttribute() } // attributeAtPos is the internal version of AttributeAtPos that returns a // hclsyntax.Block rather than an hcl.Block, allowing for further analysis if // necessary. func (b *Body) attributeAtPos(pos hcl.Pos) *Attribute { searchBody := b _, block := b.blocksAtPos(pos, false) if block != nil { searchBody = block.Body } for _, attr := range searchBody.Attributes { if attr.SrcRange.ContainsPos(pos) { return attr } } return nil } // OutermostExprAtPos implements the method of the same name for an *hcl.File // that is backed by a *Body. func (b *Body) OutermostExprAtPos(pos hcl.Pos) hcl.Expression { attr := b.attributeAtPos(pos) if attr == nil { return nil } if !attr.Expr.Range().ContainsPos(pos) { return nil } return attr.Expr } hcl-2.14.1/hclsyntax/structure_at_pos_test.go000066400000000000000000000146521431334125700213700ustar00rootroot00000000000000package hclsyntax import ( "reflect" "testing" "github.com/hashicorp/hcl/v2" ) func TestBlocksAtPos(t *testing.T) { tests := map[string]struct { Src string Pos hcl.Pos WantTypes []string }{ "empty": { ``, hcl.Pos{Byte: 0}, nil, }, "spaces": { ` `, hcl.Pos{Byte: 1}, nil, }, "single in header": { `foo {}`, hcl.Pos{Byte: 1}, []string{"foo"}, }, "single in body": { `foo { }`, hcl.Pos{Byte: 7}, []string{"foo"}, }, "single in body with unselected nested": { ` foo { bar { } } `, hcl.Pos{Byte: 10}, []string{"foo"}, }, "single in body with unselected sibling": { ` foo { } bar { } `, hcl.Pos{Byte: 10}, []string{"foo"}, }, "selected nested two levels": { ` foo { bar { } } `, hcl.Pos{Byte: 20}, []string{"foo", "bar"}, }, "selected nested three levels": { ` foo { bar { baz { } } } `, hcl.Pos{Byte: 31}, []string{"foo", "bar", "baz"}, }, "selected nested three levels with unselected sibling after": { ` foo { bar { baz { } } not_wanted {} } `, hcl.Pos{Byte: 31}, []string{"foo", "bar", "baz"}, }, "selected nested three levels with unselected sibling before": { ` foo { not_wanted {} bar { baz { } } } `, hcl.Pos{Byte: 49}, []string{"foo", "bar", "baz"}, }, "unterminated": { `foo { `, hcl.Pos{Byte: 7}, []string{"foo"}, }, "unterminated nested": { ` foo { bar { } `, hcl.Pos{Byte: 16}, []string{"foo", "bar"}, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { f, diags := ParseConfig([]byte(test.Src), "", hcl.Pos{Line: 1, Column: 1}) for _, diag := range diags { // We intentionally ignore diagnostics here because we should be // able to work with the incomplete configuration that results // when the parser does its recovery behavior. However, we do // log them in case it's helpful to someone debugging a failing // test. t.Logf(diag.Error()) } blocks := f.BlocksAtPos(test.Pos) outermost := f.OutermostBlockAtPos(test.Pos) innermost := f.InnermostBlockAtPos(test.Pos) gotTypes := make([]string, len(blocks)) for i, block := range blocks { gotTypes[i] = block.Type } if len(test.WantTypes) == 0 { if len(gotTypes) != 0 { t.Errorf("wrong block types\ngot: %#v\nwant: (none)", gotTypes) } if outermost != nil { t.Errorf("wrong outermost type\ngot: %#v\nwant: (none)", outermost.Type) } if innermost != nil { t.Errorf("wrong innermost type\ngot: %#v\nwant: (none)", innermost.Type) } return } if !reflect.DeepEqual(gotTypes, test.WantTypes) { if len(gotTypes) != 0 { t.Errorf("wrong block types\ngot: %#v\nwant: %#v", gotTypes, test.WantTypes) } } if got, want := outermost.Type, test.WantTypes[0]; got != want { t.Errorf("wrong outermost type\ngot: %#v\nwant: %#v", got, want) } if got, want := innermost.Type, test.WantTypes[len(test.WantTypes)-1]; got != want { t.Errorf("wrong innermost type\ngot: %#v\nwant: %#v", got, want) } }) } } func TestAttributeAtPos(t *testing.T) { tests := map[string]struct { Src string Pos hcl.Pos WantName string }{ "empty": { ``, hcl.Pos{Byte: 0}, "", }, "top-level": { `foo = 1`, hcl.Pos{Byte: 0}, "foo", }, "top-level with ignored sibling after": { ` foo = 1 bar = 2 `, hcl.Pos{Byte: 6}, "foo", }, "top-level ignored sibling before": { ` foo = 1 bar = 2 `, hcl.Pos{Byte: 17}, "bar", }, "nested": { ` foo { bar = 2 } `, hcl.Pos{Byte: 17}, "bar", }, "nested in unterminated block": { ` foo { bar = 2 `, hcl.Pos{Byte: 17}, "bar", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { f, diags := ParseConfig([]byte(test.Src), "", hcl.Pos{Line: 1, Column: 1}) for _, diag := range diags { // We intentionally ignore diagnostics here because we should be // able to work with the incomplete configuration that results // when the parser does its recovery behavior. However, we do // log them in case it's helpful to someone debugging a failing // test. t.Logf(diag.Error()) } got := f.AttributeAtPos(test.Pos) if test.WantName == "" { if got != nil { t.Errorf("wrong attribute name\ngot: %#v\nwant: (none)", got.Name) } return } if got == nil { t.Fatalf("wrong attribute name\ngot: (none)\nwant: %#v", test.WantName) } if got.Name != test.WantName { t.Errorf("wrong attribute name\ngot: %#v\nwant: %#v", got.Name, test.WantName) } }) } } func TestOutermostExprAtPos(t *testing.T) { tests := map[string]struct { Src string Pos hcl.Pos WantSrc string }{ "empty": { ``, hcl.Pos{Byte: 0}, ``, }, "simple bool": { `a = true`, hcl.Pos{Byte: 6}, `true`, }, "simple reference": { `a = blah`, hcl.Pos{Byte: 6}, `blah`, }, "attribute reference": { `a = blah.foo`, hcl.Pos{Byte: 6}, `blah.foo`, }, "parens": { `a = (1 + 1)`, hcl.Pos{Byte: 6}, `(1 + 1)`, }, "tuple cons": { `a = [1, 2, 3]`, hcl.Pos{Byte: 5}, `[1, 2, 3]`, }, "function call": { `a = foom("a")`, hcl.Pos{Byte: 10}, `foom("a")`, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { inputSrc := []byte(test.Src) f, diags := ParseConfig(inputSrc, "", hcl.Pos{Line: 1, Column: 1}) for _, diag := range diags { // We intentionally ignore diagnostics here because we should be // able to work with the incomplete configuration that results // when the parser does its recovery behavior. However, we do // log them in case it's helpful to someone debugging a failing // test. t.Logf(diag.Error()) } gotExpr := f.OutermostExprAtPos(test.Pos) var gotSrc string if gotExpr != nil { rng := gotExpr.Range() gotSrc = string(rng.SliceBytes(inputSrc)) } if test.WantSrc == "" { if gotExpr != nil { t.Errorf("wrong expression source\ngot: %s\nwant: (none)", gotSrc) } return } if gotExpr == nil { t.Fatalf("wrong expression source\ngot: (none)\nwant: %s", test.WantSrc) } if gotSrc != test.WantSrc { t.Errorf("wrong expression source\ngot: %#v\nwant: %#v", gotSrc, test.WantSrc) } }) } } hcl-2.14.1/hclsyntax/structure_test.go000066400000000000000000000206621431334125700200210ustar00rootroot00000000000000package hclsyntax import ( "fmt" "reflect" "testing" "github.com/hashicorp/hcl/v2" "github.com/kylelemons/godebug/pretty" "github.com/zclconf/go-cty/cty" ) func TestBodyContent(t *testing.T) { tests := []struct { body *Body schema *hcl.BodySchema partial bool want *hcl.BodyContent diagCount int }{ { &Body{}, &hcl.BodySchema{}, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 0, }, // Attributes { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", }, }, }, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{ "foo": &hcl.Attribute{ Name: "foo", }, }, }, 0, }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", }, }, }, &hcl.BodySchema{}, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // attribute "foo" is not expected }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", }, }, }, &hcl.BodySchema{}, true, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 0, // in partial mode, so extra "foo" is acceptable }, { &Body{ Attributes: Attributes{}, }, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 0, // "foo" not required, so no error }, { &Body{ Attributes: Attributes{}, }, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "foo", Required: true, }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // "foo" is required }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // attribute "foo" not expected (it's defined as a block) }, // Blocks { &Body{ Blocks: Blocks{ &Block{ Type: "foo", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, Blocks: hcl.Blocks{ { Type: "foo", Body: (*Body)(nil), }, }, }, 0, }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", }, &Block{ Type: "foo", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, Blocks: hcl.Blocks{ { Type: "foo", Body: (*Body)(nil), }, { Type: "foo", Body: (*Body)(nil), }, }, }, 0, }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", }, &Block{ Type: "bar", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, Blocks: hcl.Blocks{ { Type: "foo", Body: (*Body)(nil), }, }, }, 1, // blocks of type "bar" not expected }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", }, &Block{ Type: "bar", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, true, &hcl.BodyContent{ Attributes: hcl.Attributes{}, Blocks: hcl.Blocks{ { Type: "foo", Body: (*Body)(nil), }, }, }, 0, // extra "bar" allowed because we're in partial mode }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", Labels: []string{"bar"}, }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", LabelNames: []string{"name"}, }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, Blocks: hcl.Blocks{ { Type: "foo", Labels: []string{"bar"}, Body: (*Body)(nil), }, }, }, 0, }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", LabelNames: []string{"name"}, }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // missing label "name" }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", Labels: []string{"bar"}, LabelRanges: []hcl.Range{{}}, }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // no labels expected }, { &Body{ Blocks: Blocks{ &Block{ Type: "foo", Labels: []string{"bar", "baz"}, LabelRanges: []hcl.Range{{}, {}}, }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", LabelNames: []string{"name"}, }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // too many labels }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", }, }, }, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "foo", }, }, }, false, &hcl.BodyContent{ Attributes: hcl.Attributes{}, }, 1, // should've been a block, not an attribute }, } prettyConfig := &pretty.Config{ Diffable: true, IncludeUnexported: true, PrintStringers: true, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { var got *hcl.BodyContent var diags hcl.Diagnostics if test.partial { got, _, diags = test.body.PartialContent(test.schema) } else { got, diags = test.body.Content(test.schema) } if len(diags) != test.diagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } if !reflect.DeepEqual(got, test.want) { t.Errorf( "wrong result\ndiff: %s", prettyConfig.Compare(test.want, got), ) } }) } } func TestBodyJustAttributes(t *testing.T) { tests := []struct { body *Body want hcl.Attributes diagCount int }{ { &Body{}, hcl.Attributes{}, 0, }, { &Body{ Attributes: Attributes{}, }, hcl.Attributes{}, 0, }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", Expr: &LiteralValueExpr{ Val: cty.StringVal("bar"), }, }, }, }, hcl.Attributes{ "foo": &hcl.Attribute{ Name: "foo", Expr: &LiteralValueExpr{ Val: cty.StringVal("bar"), }, }, }, 0, }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", Expr: &LiteralValueExpr{ Val: cty.StringVal("bar"), }, }, }, Blocks: Blocks{ { Type: "foo", }, }, }, hcl.Attributes{ "foo": &hcl.Attribute{ Name: "foo", Expr: &LiteralValueExpr{ Val: cty.StringVal("bar"), }, }, }, 1, // blocks are not allowed here }, { &Body{ Attributes: Attributes{ "foo": &Attribute{ Name: "foo", Expr: &LiteralValueExpr{ Val: cty.StringVal("bar"), }, }, }, hiddenAttrs: map[string]struct{}{ "foo": struct{}{}, }, }, hcl.Attributes{}, 0, }, } prettyConfig := &pretty.Config{ Diffable: true, IncludeUnexported: true, PrintStringers: true, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { got, diags := test.body.JustAttributes() if len(diags) != test.diagCount { t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag.Error()) } } if !reflect.DeepEqual(got, test.want) { t.Errorf( "wrong result\nbody: %s\ndiff: %s", prettyConfig.Sprint(test.body), prettyConfig.Compare(test.want, got), ) } }) } } hcl-2.14.1/hclsyntax/token.go000066400000000000000000000226501431334125700160410ustar00rootroot00000000000000package hclsyntax import ( "bytes" "fmt" "github.com/apparentlymart/go-textseg/v13/textseg" "github.com/hashicorp/hcl/v2" ) // Token represents a sequence of bytes from some HCL code that has been // tagged with a type and its range within the source file. type Token struct { Type TokenType Bytes []byte Range hcl.Range } // Tokens is a slice of Token. type Tokens []Token // TokenType is an enumeration used for the Type field on Token. type TokenType rune const ( // Single-character tokens are represented by their own character, for // convenience in producing these within the scanner. However, the values // are otherwise arbitrary and just intended to be mnemonic for humans // who might see them in debug output. TokenOBrace TokenType = '{' TokenCBrace TokenType = '}' TokenOBrack TokenType = '[' TokenCBrack TokenType = ']' TokenOParen TokenType = '(' TokenCParen TokenType = ')' TokenOQuote TokenType = '«' TokenCQuote TokenType = '»' TokenOHeredoc TokenType = 'H' TokenCHeredoc TokenType = 'h' TokenStar TokenType = '*' TokenSlash TokenType = '/' TokenPlus TokenType = '+' TokenMinus TokenType = '-' TokenPercent TokenType = '%' TokenEqual TokenType = '=' TokenEqualOp TokenType = '≔' TokenNotEqual TokenType = '≠' TokenLessThan TokenType = '<' TokenLessThanEq TokenType = '≤' TokenGreaterThan TokenType = '>' TokenGreaterThanEq TokenType = '≥' TokenAnd TokenType = '∧' TokenOr TokenType = '∨' TokenBang TokenType = '!' TokenDot TokenType = '.' TokenComma TokenType = ',' TokenEllipsis TokenType = '…' TokenFatArrow TokenType = '⇒' TokenQuestion TokenType = '?' TokenColon TokenType = ':' TokenTemplateInterp TokenType = '∫' TokenTemplateControl TokenType = 'λ' TokenTemplateSeqEnd TokenType = '∎' TokenQuotedLit TokenType = 'Q' // might contain backslash escapes TokenStringLit TokenType = 'S' // cannot contain backslash escapes TokenNumberLit TokenType = 'N' TokenIdent TokenType = 'I' TokenComment TokenType = 'C' TokenNewline TokenType = '\n' TokenEOF TokenType = '␄' // The rest are not used in the language but recognized by the scanner so // we can generate good diagnostics in the parser when users try to write // things that might work in other languages they are familiar with, or // simply make incorrect assumptions about the HCL language. TokenBitwiseAnd TokenType = '&' TokenBitwiseOr TokenType = '|' TokenBitwiseNot TokenType = '~' TokenBitwiseXor TokenType = '^' TokenStarStar TokenType = '➚' TokenApostrophe TokenType = '\'' TokenBacktick TokenType = '`' TokenSemicolon TokenType = ';' TokenTabs TokenType = '␉' TokenInvalid TokenType = '�' TokenBadUTF8 TokenType = '💩' TokenQuotedNewline TokenType = '␤' // TokenNil is a placeholder for when a token is required but none is // available, e.g. when reporting errors. The scanner will never produce // this as part of a token stream. TokenNil TokenType = '\x00' ) func (t TokenType) GoString() string { return fmt.Sprintf("hclsyntax.%s", t.String()) } type scanMode int const ( scanNormal scanMode = iota scanTemplate scanIdentOnly ) type tokenAccum struct { Filename string Bytes []byte Pos hcl.Pos Tokens []Token StartByte int } func (f *tokenAccum) emitToken(ty TokenType, startOfs, endOfs int) { // Walk through our buffer to figure out how much we need to adjust // the start pos to get our end pos. start := f.Pos start.Column += startOfs + f.StartByte - f.Pos.Byte // Safe because only ASCII spaces can be in the offset start.Byte = startOfs + f.StartByte end := start end.Byte = endOfs + f.StartByte b := f.Bytes[startOfs:endOfs] for len(b) > 0 { advance, seq, _ := textseg.ScanGraphemeClusters(b, true) if (len(seq) == 1 && seq[0] == '\n') || (len(seq) == 2 && seq[0] == '\r' && seq[1] == '\n') { end.Line++ end.Column = 1 } else { end.Column++ } b = b[advance:] } f.Pos = end f.Tokens = append(f.Tokens, Token{ Type: ty, Bytes: f.Bytes[startOfs:endOfs], Range: hcl.Range{ Filename: f.Filename, Start: start, End: end, }, }) } type heredocInProgress struct { Marker []byte StartOfLine bool } func tokenOpensFlushHeredoc(tok Token) bool { if tok.Type != TokenOHeredoc { return false } return bytes.HasPrefix(tok.Bytes, []byte{'<', '<', '-'}) } // checkInvalidTokens does a simple pass across the given tokens and generates // diagnostics for tokens that should _never_ appear in HCL source. This // is intended to avoid the need for the parser to have special support // for them all over. // // Returns a diagnostics with no errors if everything seems acceptable. // Otherwise, returns zero or more error diagnostics, though tries to limit // repetition of the same information. func checkInvalidTokens(tokens Tokens) hcl.Diagnostics { var diags hcl.Diagnostics toldBitwise := 0 toldExponent := 0 toldBacktick := 0 toldApostrophe := 0 toldSemicolon := 0 toldTabs := 0 toldBadUTF8 := 0 for _, tok := range tokens { tokRange := func() *hcl.Range { r := tok.Range return &r } switch tok.Type { case TokenBitwiseAnd, TokenBitwiseOr, TokenBitwiseXor, TokenBitwiseNot: if toldBitwise < 4 { var suggestion string switch tok.Type { case TokenBitwiseAnd: suggestion = " Did you mean boolean AND (\"&&\")?" case TokenBitwiseOr: suggestion = " Did you mean boolean OR (\"||\")?" case TokenBitwiseNot: suggestion = " Did you mean boolean NOT (\"!\")?" } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsupported operator", Detail: fmt.Sprintf("Bitwise operators are not supported.%s", suggestion), Subject: tokRange(), }) toldBitwise++ } case TokenStarStar: if toldExponent < 1 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unsupported operator", Detail: "\"**\" is not a supported operator. Exponentiation is not supported as an operator.", Subject: tokRange(), }) toldExponent++ } case TokenBacktick: // Only report for alternating (even) backticks, so we won't report both start and ends of the same // backtick-quoted string. if (toldBacktick % 2) == 0 { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid character", Detail: "The \"`\" character is not valid. To create a multi-line string, use the \"heredoc\" syntax, like \"< # # This script uses the unicode spec to generate a Ragel state machine # that recognizes unicode alphanumeric characters. It generates 5 # character classes: uupper, ulower, ualpha, udigit, and ualnum. # Currently supported encodings are UTF-8 [default] and UCS-4. # # Usage: unicode2ragel.rb [options] # -e, --encoding [ucs4 | utf8] Data encoding # -h, --help Show this message # # This script was originally written as part of the Ferret search # engine library. # # Author: Rakan El-Khalil require 'optparse' require 'open-uri' ENCODINGS = [ :utf8, :ucs4 ] ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" } DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt" DEFAULT_MACHINE_NAME= "WChar" ### # Display vars & default option TOTAL_WIDTH = 80 RANGE_WIDTH = 23 @encoding = :utf8 @chart_url = DEFAULT_CHART_URL machine_name = DEFAULT_MACHINE_NAME properties = [] @output = $stdout ### # Option parsing cli_opts = OptionParser.new do |opts| opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o| @encoding = o.downcase.to_sym end opts.on("-h", "--help", "Show this message") do puts opts exit end opts.on("-u", "--url URL", "URL to process") do |o| @chart_url = o end opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o| machine_name = o end opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o| properties = o end opts.on("-o", "--output FILE", "output file") do |o| @output = File.new(o, "w+") end end cli_opts.parse(ARGV) unless ENCODINGS.member? @encoding puts "Invalid encoding: #{@encoding}" puts cli_opts exit end ## # Downloads the document at url and yields every alpha line's hex # range and description. def each_alpha( url, property ) open( url ) do |file| file.each_line do |line| next if line =~ /^#/; next if line !~ /; #{property} #/; range, description = line.split(/;/) range.strip! description.gsub!(/.*#/, '').strip! if range =~ /\.\./ start, stop = range.split '..' else start = stop = range end yield start.hex .. stop.hex, description end end end ### # Formats to hex at minimum width def to_hex( n ) r = "%0X" % n r = "0#{r}" unless (r.length % 2).zero? r end ### # UCS4 is just a straight hex conversion of the unicode codepoint. def to_ucs4( range ) rangestr = "0x" + to_hex(range.begin) rangestr << "..0x" + to_hex(range.end) if range.begin != range.end [ rangestr ] end ## # 0x00 - 0x7f -> 0zzzzzzz[7] # 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6] # 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6] # 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6] UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff] def to_utf8_enc( n ) r = 0 if n <= 0x7f r = n elsif n <= 0x7ff y = 0xc0 | (n >> 6) z = 0x80 | (n & 0x3f) r = y << 8 | z elsif n <= 0xffff x = 0xe0 | (n >> 12) y = 0x80 | (n >> 6) & 0x3f z = 0x80 | n & 0x3f r = x << 16 | y << 8 | z elsif n <= 0x10ffff w = 0xf0 | (n >> 18) x = 0x80 | (n >> 12) & 0x3f y = 0x80 | (n >> 6) & 0x3f z = 0x80 | n & 0x3f r = w << 24 | x << 16 | y << 8 | z end to_hex(r) end def from_utf8_enc( n ) n = n.hex r = 0 if n <= 0x7f r = n elsif n <= 0xdfff y = (n >> 8) & 0x1f z = n & 0x3f r = y << 6 | z elsif n <= 0xefffff x = (n >> 16) & 0x0f y = (n >> 8) & 0x3f z = n & 0x3f r = x << 10 | y << 6 | z elsif n <= 0xf7ffffff w = (n >> 24) & 0x07 x = (n >> 16) & 0x3f y = (n >> 8) & 0x3f z = n & 0x3f r = w << 18 | x << 12 | y << 6 | z end r end ### # Given a range, splits it up into ranges that can be continuously # encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff] # This is not strictly needed since the current [5.1] unicode standard # doesn't have ranges that straddle utf8 boundaries. This is included # for completeness as there is no telling if that will ever change. def utf8_ranges( range ) ranges = [] UTF8_BOUNDARIES.each do |max| if range.begin <= max if range.end <= max ranges << range return ranges end ranges << (range.begin .. max) range = (max + 1) .. range.end end end ranges end def build_range( start, stop ) size = start.size/2 left = size - 1 return [""] if size < 1 a = start[0..1] b = stop[0..1] ### # Shared prefix if a == b return build_range(start[2..-1], stop[2..-1]).map do |elt| "0x#{a} " + elt end end ### # Unshared prefix, end of run return ["0x#{a}..0x#{b} "] if left.zero? ### # Unshared prefix, not end of run # Range can be 0x123456..0x56789A # Which is equivalent to: # 0x123456 .. 0x12FFFF # 0x130000 .. 0x55FFFF # 0x560000 .. 0x56789A ret = [] ret << build_range(start, a + "FF" * left) ### # Only generate middle range if need be. if a.hex+1 != b.hex max = to_hex(b.hex - 1) max = "FF" if b == "FF" ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left end ### # Don't generate last range if it is covered by first range ret << build_range(b + "00" * left, stop) unless b == "FF" ret.flatten! end def to_utf8( range ) utf8_ranges( range ).map do |r| begin_enc = to_utf8_enc(r.begin) end_enc = to_utf8_enc(r.end) build_range begin_enc, end_enc end.flatten! end ## # Perform a 3-way comparison of the number of codepoints advertised by # the unicode spec for the given range, the originally parsed range, # and the resulting utf8 encoded range. def count_codepoints( code ) code.split(' ').inject(1) do |acc, elt| if elt =~ /0x(.+)\.\.0x(.+)/ if @encoding == :utf8 acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1) else acc * ($2.hex - $1.hex + 1) end else acc end end end def is_valid?( range, desc, codes ) spec_count = 1 spec_count = $1.to_i if desc =~ /\[(\d+)\]/ range_count = range.end - range.begin + 1 sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) } sum == spec_count and sum == range_count end ## # Generate the state maching to stdout def generate_machine( name, property ) pipe = " " @output.puts " #{name} = " each_alpha( @chart_url, property ) do |range, desc| codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range) #raise "Invalid encoding of range #{range}: #{codes.inspect}" unless # is_valid? range, desc, codes range_width = codes.map { |a| a.size }.max range_width = RANGE_WIDTH if range_width < RANGE_WIDTH desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11 desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH if desc.size > desc_width desc = desc[0..desc_width - 4] + "..." end codes.each_with_index do |r, idx| desc = "" unless idx.zero? code = "%-#{range_width}s" % r @output.puts " #{pipe} #{code} ##{desc}" pipe = "|" end end @output.puts " ;" @output.puts "" end @output.puts < 0: line.lead[0].SpacesBefore = 2 * len(indents) indents = append(indents, netBrackets) case netBrackets < 0: closed := -netBrackets for closed > 0 && len(indents) > 0 { switch { case closed > indents[len(indents)-1]: closed -= indents[len(indents)-1] indents = indents[:len(indents)-1] case closed < indents[len(indents)-1]: indents[len(indents)-1] -= closed closed = 0 default: indents = indents[:len(indents)-1] closed = 0 } } line.lead[0].SpacesBefore = 2 * len(indents) default: line.lead[0].SpacesBefore = 2 * len(indents) } } } func formatSpaces(lines []formatLine) { for _, line := range lines { for i, token := range line.lead { var before, after *Token if i > 0 { before = line.lead[i-1] } else { before = nilToken } if i < (len(line.lead) - 1) { after = line.lead[i+1] } else { continue } if spaceAfterToken(token, before, after) { after.SpacesBefore = 1 } else { after.SpacesBefore = 0 } } for i, token := range line.assign { if i == 0 { // first token in "assign" always has one space before to // separate the equals sign from what it's assigning. token.SpacesBefore = 1 } var before, after *Token if i > 0 { before = line.assign[i-1] } else { before = nilToken } if i < (len(line.assign) - 1) { after = line.assign[i+1] } else { continue } if spaceAfterToken(token, before, after) { after.SpacesBefore = 1 } else { after.SpacesBefore = 0 } } } } func formatCells(lines []formatLine) { chainStart := -1 maxColumns := 0 // We'll deal with the "assign" cell first, since moving that will // also impact the "comment" cell. closeAssignChain := func(i int) { for _, chainLine := range lines[chainStart:i] { columns := chainLine.lead.Columns() spaces := (maxColumns - columns) + 1 chainLine.assign[0].SpacesBefore = spaces } chainStart = -1 maxColumns = 0 } for i, line := range lines { if line.assign == nil { if chainStart != -1 { closeAssignChain(i) } } else { if chainStart == -1 { chainStart = i } columns := line.lead.Columns() if columns > maxColumns { maxColumns = columns } } } if chainStart != -1 { closeAssignChain(len(lines)) } // Now we'll deal with the comments closeCommentChain := func(i int) { for _, chainLine := range lines[chainStart:i] { columns := chainLine.lead.Columns() + chainLine.assign.Columns() spaces := (maxColumns - columns) + 1 chainLine.comment[0].SpacesBefore = spaces } chainStart = -1 maxColumns = 0 } for i, line := range lines { if line.comment == nil { if chainStart != -1 { closeCommentChain(i) } } else { if chainStart == -1 { chainStart = i } columns := line.lead.Columns() + line.assign.Columns() if columns > maxColumns { maxColumns = columns } } } if chainStart != -1 { closeCommentChain(len(lines)) } } // spaceAfterToken decides whether a particular subject token should have a // space after it when surrounded by the given before and after tokens. // "before" can be TokenNil, if the subject token is at the start of a sequence. func spaceAfterToken(subject, before, after *Token) bool { switch { case after.Type == hclsyntax.TokenNewline || after.Type == hclsyntax.TokenNil: // Never add spaces before a newline return false case subject.Type == hclsyntax.TokenIdent && after.Type == hclsyntax.TokenOParen: // Don't split a function name from open paren in a call return false case subject.Type == hclsyntax.TokenDot || after.Type == hclsyntax.TokenDot: // Don't use spaces around attribute access dots return false case after.Type == hclsyntax.TokenComma || after.Type == hclsyntax.TokenEllipsis: // No space right before a comma or ... in an argument list return false case subject.Type == hclsyntax.TokenComma: // Always a space after a comma return true case subject.Type == hclsyntax.TokenQuotedLit || subject.Type == hclsyntax.TokenStringLit || subject.Type == hclsyntax.TokenOQuote || subject.Type == hclsyntax.TokenOHeredoc || after.Type == hclsyntax.TokenQuotedLit || after.Type == hclsyntax.TokenStringLit || after.Type == hclsyntax.TokenCQuote || after.Type == hclsyntax.TokenCHeredoc: // No extra spaces within templates return false case inKeyword.TokenMatches(subject.asHCLSyntax()) && before.Type == hclsyntax.TokenIdent: // This is a special case for inside for expressions where a user // might want to use a literal tuple constructor: // [for x in [foo]: x] // ... in that case, we would normally produce in[foo] thinking that // in is a reference, but we'll recognize it as a keyword here instead // to make the result less confusing. return true case after.Type == hclsyntax.TokenOBrack && (subject.Type == hclsyntax.TokenIdent || subject.Type == hclsyntax.TokenNumberLit || tokenBracketChange(subject) < 0): return false case subject.Type == hclsyntax.TokenBang: // No space after a bang return false case subject.Type == hclsyntax.TokenMinus: // Since a minus can either be subtraction or negation, and the latter // should _not_ have a space after it, we need to use some heuristics // to decide which case this is. // We guess that we have a negation if the token before doesn't look // like it could be the end of an expression. switch before.Type { case hclsyntax.TokenNil: // Minus at the start of input must be a negation return false case hclsyntax.TokenOParen, hclsyntax.TokenOBrace, hclsyntax.TokenOBrack, hclsyntax.TokenEqual, hclsyntax.TokenColon, hclsyntax.TokenComma, hclsyntax.TokenQuestion: // Minus immediately after an opening bracket or separator must be a negation. return false case hclsyntax.TokenPlus, hclsyntax.TokenStar, hclsyntax.TokenSlash, hclsyntax.TokenPercent, hclsyntax.TokenMinus: // Minus immediately after another arithmetic operator must be negation. return false case hclsyntax.TokenEqualOp, hclsyntax.TokenNotEqual, hclsyntax.TokenGreaterThan, hclsyntax.TokenGreaterThanEq, hclsyntax.TokenLessThan, hclsyntax.TokenLessThanEq: // Minus immediately after another comparison operator must be negation. return false case hclsyntax.TokenAnd, hclsyntax.TokenOr, hclsyntax.TokenBang: // Minus immediately after logical operator doesn't make sense but probably intended as negation. return false default: return true } case subject.Type == hclsyntax.TokenOBrace || after.Type == hclsyntax.TokenCBrace: // Unlike other bracket types, braces have spaces on both sides of them, // both in single-line nested blocks foo { bar = baz } and in object // constructor expressions foo = { bar = baz }. if subject.Type == hclsyntax.TokenOBrace && after.Type == hclsyntax.TokenCBrace { // An open brace followed by a close brace is an exception, however. // e.g. foo {} rather than foo { } return false } return true // In the unlikely event that an interpolation expression is just // a single object constructor, we'll put a space between the ${ and // the following { to make this more obvious, and then the same // thing for the two braces at the end. case (subject.Type == hclsyntax.TokenTemplateInterp || subject.Type == hclsyntax.TokenTemplateControl) && after.Type == hclsyntax.TokenOBrace: return true case subject.Type == hclsyntax.TokenCBrace && after.Type == hclsyntax.TokenTemplateSeqEnd: return true // Don't add spaces between interpolated items case subject.Type == hclsyntax.TokenTemplateSeqEnd && (after.Type == hclsyntax.TokenTemplateInterp || after.Type == hclsyntax.TokenTemplateControl): return false case tokenBracketChange(subject) > 0: // No spaces after open brackets return false case tokenBracketChange(after) < 0: // No spaces before close brackets return false default: // Most tokens are space-separated return true } } func linesForFormat(tokens Tokens) []formatLine { if len(tokens) == 0 { return make([]formatLine, 0) } // first we'll count our lines, so we can allocate the array for them in // a single block. (We want to minimize memory pressure in this codepath, // so it can be run somewhat-frequently by editor integrations.) lineCount := 1 // if there are zero newlines then there is one line for _, tok := range tokens { if tokenIsNewline(tok) { lineCount++ } } // To start, we'll just put everything in the "lead" cell on each line, // and then do another pass over the lines afterwards to adjust. lines := make([]formatLine, lineCount) li := 0 lineStart := 0 for i, tok := range tokens { if tok.Type == hclsyntax.TokenEOF { // The EOF token doesn't belong to any line, and terminates the // token sequence. lines[li].lead = tokens[lineStart:i] break } if tokenIsNewline(tok) { lines[li].lead = tokens[lineStart : i+1] lineStart = i + 1 li++ } } // If a set of tokens doesn't end in TokenEOF (e.g. because it's a // fragment of tokens from the middle of a file) then we might fall // out here with a line still pending. if lineStart < len(tokens) { lines[li].lead = tokens[lineStart:] if lines[li].lead[len(lines[li].lead)-1].Type == hclsyntax.TokenEOF { lines[li].lead = lines[li].lead[:len(lines[li].lead)-1] } } // Now we'll pick off any trailing comments and attribute assignments // to shuffle off into the "comment" and "assign" cells. for i := range lines { line := &lines[i] if len(line.lead) == 0 { // if the line is empty then there's nothing for us to do // (this should happen only for the final line, because all other // lines would have a newline token of some kind) continue } if len(line.lead) > 1 && line.lead[len(line.lead)-1].Type == hclsyntax.TokenComment { line.comment = line.lead[len(line.lead)-1:] line.lead = line.lead[:len(line.lead)-1] } for i, tok := range line.lead { if i > 0 && tok.Type == hclsyntax.TokenEqual { // We only move the tokens into "assign" if the RHS seems to // be a whole expression, which we determine by counting // brackets. If there's a net positive number of brackets // then that suggests we're introducing a multi-line expression. netBrackets := 0 for _, token := range line.lead[i:] { netBrackets += tokenBracketChange(token) } if netBrackets == 0 { line.assign = line.lead[i:] line.lead = line.lead[:i] } break } } } return lines } func tokenIsNewline(tok *Token) bool { if tok.Type == hclsyntax.TokenNewline { return true } else if tok.Type == hclsyntax.TokenComment { // Single line tokens (# and //) consume their terminating newline, // so we need to treat them as newline tokens as well. if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' { return true } } return false } func tokenBracketChange(tok *Token) int { switch tok.Type { case hclsyntax.TokenOBrace, hclsyntax.TokenOBrack, hclsyntax.TokenOParen, hclsyntax.TokenTemplateControl, hclsyntax.TokenTemplateInterp: return 1 case hclsyntax.TokenCBrace, hclsyntax.TokenCBrack, hclsyntax.TokenCParen, hclsyntax.TokenTemplateSeqEnd: return -1 default: return 0 } } // formatLine represents a single line of source code for formatting purposes, // splitting its tokens into up to three "cells": // // lead: always present, representing everything up to one of the others // assign: if line contains an attribute assignment, represents the tokens // starting at (and including) the equals symbol // comment: if line contains any non-comment tokens and ends with a // single-line comment token, represents the comment. // // When formatting, the leading spaces of the first tokens in each of these // cells is adjusted to align vertically their occurences on consecutive // rows. type formatLine struct { lead Tokens assign Tokens comment Tokens } hcl-2.14.1/hclwrite/format_test.go000066400000000000000000000200171431334125700170470ustar00rootroot00000000000000package hclwrite import ( "fmt" "testing" "reflect" "github.com/davecgh/go-spew/spew" "github.com/hashicorp/hcl/v2/hclsyntax" ) func TestFormat(t *testing.T) { tests := []struct { input string want string }{ { ``, ``, }, { `a=1`, `a = 1`, }, { `a=b.c`, `a = b.c`, }, { `a=b[c]`, `a = b[c]`, }, { `a=b()[c]`, `a = b()[c]`, }, { `a=["hello"][0]`, `a = ["hello"][0]`, }, { `( a+2 )`, `(a + 2)`, }, { `( a*2 )`, `(a * 2)`, }, { `( a+-2 )`, `(a + -2)`, }, { `( a*-2 )`, `(a * -2)`, }, { `(-2+1)`, `(-2 + 1)`, }, { `foo(1, -2,a*b, b,c)`, `foo(1, -2, a * b, b, c)`, }, { `foo(a,b...)`, `foo(a, b...)`, }, { `! true`, `!true`, }, { `a="hello ${ name }"`, `a = "hello ${name}"`, }, { `a="hello ${~ name ~}"`, `a = "hello ${~name~}"`, }, { `a="${b}${c}${ d } ${e}"`, `a = "${b}${c}${d} ${e}"`, }, { `"%{if true}${var.foo}%{endif}"`, `"%{if true}${var.foo}%{endif}"`, }, { `b{}`, `b {}`, }, { ` "${ hello }" `, ` "${ hello }" `, }, { ` foo( 1, - 2, a*b, b, c, ) `, ` foo( 1, -2, a * b, b, c, ) `, }, { `a?b:c`, `a ? b : c`, }, { `[ [ ] ]`, `[[]]`, }, { `[for x in y : x]`, `[for x in y : x]`, }, { `[for x in [y] : x]`, `[for x in [y] : x]`, }, { ` [ [ a ] ] `, ` [ [ a ] ] `, }, { ` [[ a ]] `, ` [[ a ]] `, }, { ` [[ [ a ] ]] `, ` [[ [ a ] ]] `, }, { // degenerate case with asymmetrical brackets ` [[ [ a ]] ] `, ` [[ [ a ]] ] `, }, { ` b { a = 1 } `, ` b { a = 1 } `, }, { ` b {a = 1} `, ` b { a = 1 } `, }, { ` a = 1 bungle = 2 `, ` a = 1 bungle = 2 `, }, { ` a = 1 bungle = 2 `, ` a = 1 bungle = 2 `, }, { ` a = 1 # foo bungle = 2 `, ` a = 1 # foo bungle = 2 `, }, { ` a = 1 # foo bungle = "bonce" # baz `, ` a = 1 # foo bungle = "bonce" # baz `, }, { ` # here we go a = 1 # foo bungle = "bonce" # baz `, ` # here we go a = 1 # foo bungle = "bonce" # baz `, }, { ` foo {} # here we go a = 1 # foo bungle = "bonce" # baz `, ` foo {} # here we go a = 1 # foo bungle = "bonce" # baz `, }, { ` a = 1 # foo bungle = "bonce" # baz zebra = "striped" # baz `, ` a = 1 # foo bungle = "bonce" # baz zebra = "striped" # baz `, }, { ` a = 1 # foo bungle = ( "bonce" ) # baz zebra = "striped" # baz `, ` a = 1 # foo bungle = ( "bonce" ) # baz zebra = "striped" # baz `, }, { ` a="apple"# foo bungle=(# woo parens "bonce" )# baz zebra="striped"# baz `, ` a = "apple" # foo bungle = ( # woo parens "bonce" ) # baz zebra = "striped" # baz `, }, { ` 𝒜 = 1 # foo bungle = "🇬🇧" # baz zebra = "striped" # baz `, ` 𝒜 = 1 # foo bungle = "🇬🇧" # baz zebra = "striped" # baz `, }, { ` foo { # ... } `, ` foo { # ... } `, }, { ` foo = { # ... } `, ` foo = { # ... } `, }, { ` foo = [ # ... ] `, ` foo = [ # ... ] `, }, { ` foo = [{ # ... }] `, ` foo = [{ # ... }] `, }, { ` foo { bar { # ... } } `, ` foo { bar { # ... } } `, }, { ` foo { bar = { # ... } } `, ` foo { bar = { # ... } } `, }, { ` foo { bar = [ # ... ] } `, ` foo { bar = [ # ... ] } `, }, { ` foo { bar = </`: ``` $ ls hclwrite/fuzz/testdata/fuzz/FuzzParseConfig 582528ddfad69eb57775199a43e0f9fd5c94bba343ce7bb6724d4ebafe311ed4 ``` A good first step to fixing a detected crasher is to copy the failing input into one of the unit tests in the `hclwrite` package and see it crash there too. After that, it's easy to re-run the test as you try to fix it. hcl-2.14.1/hclwrite/fuzz/fuzz_test.go000066400000000000000000000010721431334125700175530ustar00rootroot00000000000000package fuzzhclwrite import ( "io/ioutil" "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclwrite" ) func FuzzParseConfig(f *testing.F) { f.Fuzz(func(t *testing.T, data []byte) { file, diags := hclwrite.ParseConfig(data, "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Logf("Error when parsing JSON %v", data) for _, diag := range diags { t.Logf("- %s", diag.Error()) } return } _, err := file.WriteTo(ioutil.Discard) if err != nil { t.Fatalf("error writing to file: %s", err) } }) } hcl-2.14.1/hclwrite/fuzz/testdata/000077500000000000000000000000001431334125700170005ustar00rootroot00000000000000hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/000077500000000000000000000000001431334125700177765ustar00rootroot00000000000000hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/000077500000000000000000000000001431334125700230555ustar00rootroot00000000000000hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/attr-expr.hcl000066400000000000000000000000651431334125700254740ustar00rootroot00000000000000go test fuzz v1 []byte("foo = upper(bar + baz[1])\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/attr-literal.hcl000066400000000000000000000000511431334125700261450ustar00rootroot00000000000000go test fuzz v1 []byte("foo = \"bar\"\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/attr.hcl000066400000000000000000000000471431334125700245200ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/block-attrs.hcl000066400000000000000000000000641431334125700257720ustar00rootroot00000000000000go test fuzz v1 []byte("block {\n foo = true\n}\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/block-comment.hcl000066400000000000000000000000751431334125700263010ustar00rootroot00000000000000go test fuzz v1 []byte("/* multi\n line\n comment\n*/\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/block-empty.hcl000066400000000000000000000000461431334125700257730ustar00rootroot00000000000000go test fuzz v1 []byte("block {\n}\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/block-nested.hcl000066400000000000000000000001151431334125700261140ustar00rootroot00000000000000go test fuzz v1 []byte("block {\n another_block {\n foo = bar\n }\n}\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/complex.hcl000066400000000000000000000000751431334125700252160ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar[1].baz[\"foo\"].pizza\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/empty.hcl000066400000000000000000000000321431334125700246760ustar00rootroot00000000000000go test fuzz v1 []byte("")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/escape-dollar.hcl000066400000000000000000000000621431334125700262560ustar00rootroot00000000000000go test fuzz v1 []byte("a = \"hi $${var.foo}\"\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/escape-newline.hcl000066400000000000000000000000551431334125700264440ustar00rootroot00000000000000go test fuzz v1 []byte("a = \"bar\\nbaz\"\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/function-call-tmpl.hcl000066400000000000000000000000661431334125700272570ustar00rootroot00000000000000go test fuzz v1 []byte("a = \"b ${title(var.name)}\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/function-call.hcl000066400000000000000000000000571431334125700263050ustar00rootroot00000000000000go test fuzz v1 []byte("a = title(var.name)\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/hash-comment.hcl000066400000000000000000000000551431334125700261300ustar00rootroot00000000000000go test fuzz v1 []byte("# another comment\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/index.hcl000066400000000000000000000000461431334125700246540ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo[1]\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/int-tmpl.hcl000066400000000000000000000000551431334125700253110ustar00rootroot00000000000000go test fuzz v1 []byte("a = \"foo ${42}\"\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/int.hcl000066400000000000000000000000421431334125700243330ustar00rootroot00000000000000go test fuzz v1 []byte("a = 42\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/just-interp.hcl000066400000000000000000000000561431334125700260320ustar00rootroot00000000000000go test fuzz v1 []byte("a = \"${var.bar}\"\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/literal.hcl000066400000000000000000000000431431334125700251760ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/lots-of-comments.hcl000066400000000000000000000003111431334125700267460ustar00rootroot00000000000000go test fuzz v1 []byte("// comment\nblock {\n // another comment\n another_block { # comment\n // comment\n foo = bar\n }\n\n /* commented out block\n blah {\n bar = foo\n }\n */\n}\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/slash-comment.hcl000066400000000000000000000000461431334125700263170ustar00rootroot00000000000000go test fuzz v1 []byte("// comment\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/splat-attr.hcl000066400000000000000000000000551431334125700256400ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar.*.baz\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/splat-dot-full.hcl000066400000000000000000000000511431334125700264100ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar.*\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/splat-full.hcl000066400000000000000000000000561431334125700256310ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar[*].baz\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/traversal-dot-index-terminal.hcl000066400000000000000000000000511431334125700312460ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar.0\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/traversal-dot-index.hcl000066400000000000000000000000551431334125700274410ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar.4.baz\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/traversal-index.hcl000066400000000000000000000000561431334125700266560ustar00rootroot00000000000000go test fuzz v1 []byte("a = foo.bar[4].baz\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/utf8.hcl000066400000000000000000000000741431334125700244340ustar00rootroot00000000000000go test fuzz v1 []byte("foo = \"föo ${föo(\"föo\")}\"\n")hcl-2.14.1/hclwrite/fuzz/testdata/fuzz/FuzzParseConfig/var.hcl000066400000000000000000000000471431334125700243360ustar00rootroot00000000000000go test fuzz v1 []byte("a = var.bar\n")hcl-2.14.1/hclwrite/generate.go000066400000000000000000000252541431334125700163220ustar00rootroot00000000000000package hclwrite import ( "fmt" "unicode" "unicode/utf8" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) // TokensForValue returns a sequence of tokens that represents the given // constant value. // // This function only supports types that are used by HCL. In particular, it // does not support capsule types and will panic if given one. // // It is not possible to express an unknown value in source code, so this // function will panic if the given value is unknown or contains any unknown // values. A caller can call the value's IsWhollyKnown method to verify that // no unknown values are present before calling TokensForValue. func TokensForValue(val cty.Value) Tokens { toks := appendTokensForValue(val, nil) format(toks) // fiddle with the SpacesBefore field to get canonical spacing return toks } // TokensForTraversal returns a sequence of tokens that represents the given // traversal. // // If the traversal is absolute then the result is a self-contained, valid // reference expression. If the traversal is relative then the returned tokens // could be appended to some other expression tokens to traverse into the // represented expression. func TokensForTraversal(traversal hcl.Traversal) Tokens { toks := appendTokensForTraversal(traversal, nil) format(toks) // fiddle with the SpacesBefore field to get canonical spacing return toks } // TokensForIdentifier returns a sequence of tokens representing just the // given identifier. // // In practice this function can only ever generate exactly one token, because // an identifier is always a leaf token in the syntax tree. // // This is similar to calling TokensForTraversal with a single-step absolute // traversal, but avoids the need to construct a separate traversal object // for this simple common case. If you need to generate a multi-step traversal, // use TokensForTraversal instead. func TokensForIdentifier(name string) Tokens { return Tokens{ newIdentToken(name), } } // TokensForTuple returns a sequence of tokens that represents a tuple // constructor, with element expressions populated from the given list // of tokens. // // TokensForTuple includes the given elements verbatim into the element // positions in the resulting tuple expression, without any validation to // ensure that they represent valid expressions. Use TokensForValue or // TokensForTraversal to generate valid leaf expression values, or use // TokensForTuple, TokensForObject, and TokensForFunctionCall to // generate other nested compound expressions. func TokensForTuple(elems []Tokens) Tokens { var toks Tokens toks = append(toks, &Token{ Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}, }) for index, elem := range elems { if index > 0 { toks = append(toks, &Token{ Type: hclsyntax.TokenComma, Bytes: []byte{','}, }) } toks = append(toks, elem...) } toks = append(toks, &Token{ Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}, }) format(toks) // fiddle with the SpacesBefore field to get canonical spacing return toks } // TokensForObject returns a sequence of tokens that represents an object // constructor, with attribute name/value pairs populated from the given // list of attribute token objects. // // TokensForObject includes the given tokens verbatim into the name and // value positions in the resulting object expression, without any validation // to ensure that they represent valid expressions. Use TokensForValue or // TokensForTraversal to generate valid leaf expression values, or use // TokensForTuple, TokensForObject, and TokensForFunctionCall to // generate other nested compound expressions. // // Note that HCL requires placing a traversal expression in parentheses if // you intend to use it as an attribute name expression, because otherwise // the parser will interpret it as a literal attribute name. TokensForObject // does not handle that situation automatically, so a caller must add the // necessary `TokenOParen` and TokenCParen` manually if needed. func TokensForObject(attrs []ObjectAttrTokens) Tokens { var toks Tokens toks = append(toks, &Token{ Type: hclsyntax.TokenOBrace, Bytes: []byte{'{'}, }) if len(attrs) > 0 { toks = append(toks, &Token{ Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}, }) } for _, attr := range attrs { toks = append(toks, attr.Name...) toks = append(toks, &Token{ Type: hclsyntax.TokenEqual, Bytes: []byte{'='}, }) toks = append(toks, attr.Value...) toks = append(toks, &Token{ Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}, }) } toks = append(toks, &Token{ Type: hclsyntax.TokenCBrace, Bytes: []byte{'}'}, }) format(toks) // fiddle with the SpacesBefore field to get canonical spacing return toks } // TokensForFunctionCall returns a sequence of tokens that represents call // to the function with the given name, using the argument tokens to // populate the argument expressions. // // TokensForFunctionCall includes the given argument tokens verbatim into the // positions in the resulting call expression, without any validation // to ensure that they represent valid expressions. Use TokensForValue or // TokensForTraversal to generate valid leaf expression values, or use // TokensForTuple, TokensForObject, and TokensForFunctionCall to // generate other nested compound expressions. // // This function doesn't include an explicit way to generate the expansion // symbol "..." on the final argument. Currently, generating that requires // manually appending a TokenEllipsis with the bytes "..." to the tokens for // the final argument. func TokensForFunctionCall(funcName string, args ...Tokens) Tokens { var toks Tokens toks = append(toks, TokensForIdentifier(funcName)...) toks = append(toks, &Token{ Type: hclsyntax.TokenOParen, Bytes: []byte{'('}, }) for index, arg := range args { if index > 0 { toks = append(toks, &Token{ Type: hclsyntax.TokenComma, Bytes: []byte{','}, }) } toks = append(toks, arg...) } toks = append(toks, &Token{ Type: hclsyntax.TokenCParen, Bytes: []byte{')'}, }) format(toks) // fiddle with the SpacesBefore field to get canonical spacing return toks } func appendTokensForValue(val cty.Value, toks Tokens) Tokens { switch { case !val.IsKnown(): panic("cannot produce tokens for unknown value") case val.IsNull(): toks = append(toks, &Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(`null`), }) case val.Type() == cty.Bool: var src []byte if val.True() { src = []byte(`true`) } else { src = []byte(`false`) } toks = append(toks, &Token{ Type: hclsyntax.TokenIdent, Bytes: src, }) case val.Type() == cty.Number: bf := val.AsBigFloat() srcStr := bf.Text('f', -1) toks = append(toks, &Token{ Type: hclsyntax.TokenNumberLit, Bytes: []byte(srcStr), }) case val.Type() == cty.String: // TODO: If it's a multi-line string ending in a newline, format // it as a HEREDOC instead. src := escapeQuotedStringLit(val.AsString()) toks = append(toks, &Token{ Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}, }) if len(src) > 0 { toks = append(toks, &Token{ Type: hclsyntax.TokenQuotedLit, Bytes: src, }) } toks = append(toks, &Token{ Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}, }) case val.Type().IsListType() || val.Type().IsSetType() || val.Type().IsTupleType(): toks = append(toks, &Token{ Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}, }) i := 0 for it := val.ElementIterator(); it.Next(); { if i > 0 { toks = append(toks, &Token{ Type: hclsyntax.TokenComma, Bytes: []byte{','}, }) } _, eVal := it.Element() toks = appendTokensForValue(eVal, toks) i++ } toks = append(toks, &Token{ Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}, }) case val.Type().IsMapType() || val.Type().IsObjectType(): toks = append(toks, &Token{ Type: hclsyntax.TokenOBrace, Bytes: []byte{'{'}, }) if val.LengthInt() > 0 { toks = append(toks, &Token{ Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}, }) } i := 0 for it := val.ElementIterator(); it.Next(); { eKey, eVal := it.Element() if hclsyntax.ValidIdentifier(eKey.AsString()) { toks = append(toks, &Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(eKey.AsString()), }) } else { toks = appendTokensForValue(eKey, toks) } toks = append(toks, &Token{ Type: hclsyntax.TokenEqual, Bytes: []byte{'='}, }) toks = appendTokensForValue(eVal, toks) toks = append(toks, &Token{ Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}, }) i++ } toks = append(toks, &Token{ Type: hclsyntax.TokenCBrace, Bytes: []byte{'}'}, }) default: panic(fmt.Sprintf("cannot produce tokens for %#v", val)) } return toks } func appendTokensForTraversal(traversal hcl.Traversal, toks Tokens) Tokens { for _, step := range traversal { toks = appendTokensForTraversalStep(step, toks) } return toks } func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) Tokens { switch ts := step.(type) { case hcl.TraverseRoot: toks = append(toks, &Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(ts.Name), }) case hcl.TraverseAttr: toks = append( toks, &Token{ Type: hclsyntax.TokenDot, Bytes: []byte{'.'}, }, &Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(ts.Name), }, ) case hcl.TraverseIndex: toks = append(toks, &Token{ Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}, }) toks = appendTokensForValue(ts.Key, toks) toks = append(toks, &Token{ Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}, }) default: panic(fmt.Sprintf("unsupported traversal step type %T", step)) } return toks } func escapeQuotedStringLit(s string) []byte { if len(s) == 0 { return nil } buf := make([]byte, 0, len(s)) for i, r := range s { switch r { case '\n': buf = append(buf, '\\', 'n') case '\r': buf = append(buf, '\\', 'r') case '\t': buf = append(buf, '\\', 't') case '"': buf = append(buf, '\\', '"') case '\\': buf = append(buf, '\\', '\\') case '$', '%': buf = appendRune(buf, r) remain := s[i+1:] if len(remain) > 0 && remain[0] == '{' { // Double up our template introducer symbol to escape it. buf = appendRune(buf, r) } default: if !unicode.IsPrint(r) { var fmted string if r < 65536 { fmted = fmt.Sprintf("\\u%04x", r) } else { fmted = fmt.Sprintf("\\U%08x", r) } buf = append(buf, fmted...) } else { buf = appendRune(buf, r) } } } return buf } func appendRune(b []byte, r rune) []byte { l := utf8.RuneLen(r) for i := 0; i < l; i++ { b = append(b, 0) // make room at the end of our buffer } ch := b[len(b)-l:] utf8.EncodeRune(ch, r) return b } hcl-2.14.1/hclwrite/generate_test.go000066400000000000000000000472411431334125700173610ustar00rootroot00000000000000package hclwrite import ( "bytes" "math/big" "sort" "testing" "github.com/google/go-cmp/cmp" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) func TestTokensForValue(t *testing.T) { tests := []struct { Val cty.Value Want Tokens }{ { cty.NullVal(cty.DynamicPseudoType), Tokens{ { Type: hclsyntax.TokenIdent, Bytes: []byte(`null`), }, }, }, { cty.True, Tokens{ { Type: hclsyntax.TokenIdent, Bytes: []byte(`true`), }, }, }, { cty.False, Tokens{ { Type: hclsyntax.TokenIdent, Bytes: []byte(`false`), }, }, }, { cty.NumberIntVal(0), Tokens{ { Type: hclsyntax.TokenNumberLit, Bytes: []byte(`0`), }, }, }, { cty.NumberFloatVal(0.5), Tokens{ { Type: hclsyntax.TokenNumberLit, Bytes: []byte(`0.5`), }, }, }, { cty.NumberVal(big.NewFloat(0).SetPrec(512).Mul(big.NewFloat(40000000), big.NewFloat(2000000))), Tokens{ { Type: hclsyntax.TokenNumberLit, Bytes: []byte(`80000000000000`), }, }, }, { cty.StringVal(""), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal("foo"), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`foo`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal(`"foo"`), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`\"foo\"`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal("hello\nworld\n"), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`hello\nworld\n`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal("hello\r\nworld\r\n"), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`hello\r\nworld\r\n`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal(`what\what`), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`what\\what`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal("𝄞"), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte("𝄞"), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.StringVal("👩🏾"), Tokens{ { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`👩🏾`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, }, }, { cty.EmptyTupleVal, Tokens{ { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, }, }, { cty.TupleVal([]cty.Value{cty.EmptyTupleVal}), Tokens{ { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, }, }, { cty.ListValEmpty(cty.String), Tokens{ { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, }, }, { cty.SetValEmpty(cty.Bool), Tokens{ { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, }, }, { cty.TupleVal([]cty.Value{cty.True}), Tokens{ { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`true`), }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, }, }, { cty.TupleVal([]cty.Value{cty.True, cty.NumberIntVal(0)}), Tokens{ { Type: hclsyntax.TokenOBrack, Bytes: []byte(`[`), }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`true`), }, { Type: hclsyntax.TokenComma, Bytes: []byte(`,`), }, { Type: hclsyntax.TokenNumberLit, Bytes: []byte(`0`), SpacesBefore: 1, }, { Type: hclsyntax.TokenCBrack, Bytes: []byte(`]`), }, }, }, { cty.EmptyObjectVal, Tokens{ { Type: hclsyntax.TokenOBrace, Bytes: []byte(`{`), }, { Type: hclsyntax.TokenCBrace, Bytes: []byte(`}`), }, }, }, { cty.MapValEmpty(cty.Bool), Tokens{ { Type: hclsyntax.TokenOBrace, Bytes: []byte(`{`), }, { Type: hclsyntax.TokenCBrace, Bytes: []byte(`}`), }, }, }, { cty.ObjectVal(map[string]cty.Value{ "foo": cty.True, }), Tokens{ { Type: hclsyntax.TokenOBrace, Bytes: []byte(`{`), }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`foo`), SpacesBefore: 2, }, { Type: hclsyntax.TokenEqual, Bytes: []byte(`=`), SpacesBefore: 1, }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`true`), SpacesBefore: 1, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenCBrace, Bytes: []byte(`}`), }, }, }, { cty.ObjectVal(map[string]cty.Value{ "foo": cty.True, "bar": cty.NumberIntVal(0), }), Tokens{ { Type: hclsyntax.TokenOBrace, Bytes: []byte(`{`), }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`bar`), SpacesBefore: 2, }, { Type: hclsyntax.TokenEqual, Bytes: []byte(`=`), SpacesBefore: 1, }, { Type: hclsyntax.TokenNumberLit, Bytes: []byte(`0`), SpacesBefore: 1, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`foo`), SpacesBefore: 2, }, { Type: hclsyntax.TokenEqual, Bytes: []byte(`=`), SpacesBefore: 1, }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`true`), SpacesBefore: 1, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenCBrace, Bytes: []byte(`}`), }, }, }, { cty.ObjectVal(map[string]cty.Value{ "foo bar": cty.True, }), Tokens{ { Type: hclsyntax.TokenOBrace, Bytes: []byte(`{`), }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 2, }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`foo bar`), }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), }, { Type: hclsyntax.TokenEqual, Bytes: []byte(`=`), SpacesBefore: 1, }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`true`), SpacesBefore: 1, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), }, { Type: hclsyntax.TokenCBrace, Bytes: []byte(`}`), }, }, }, } for _, test := range tests { t.Run(test.Val.GoString(), func(t *testing.T) { got := TokensForValue(test.Val) if !cmp.Equal(got, test.Want) { diff := cmp.Diff(got, test.Want, cmp.Comparer(func(a, b []byte) bool { return bytes.Equal(a, b) })) var gotBuf, wantBuf bytes.Buffer got.WriteTo(&gotBuf) test.Want.WriteTo(&wantBuf) t.Errorf( "wrong result\nvalue: %#v\ngot: %s\nwant: %s\ndiff: %s", test.Val, gotBuf.String(), wantBuf.String(), diff, ) } }) } } func TestTokensForTraversal(t *testing.T) { tests := []struct { Val hcl.Traversal Want Tokens }{ { hcl.Traversal{ hcl.TraverseRoot{Name: "root"}, hcl.TraverseAttr{Name: "attr"}, hcl.TraverseIndex{Key: cty.StringVal("index")}, }, Tokens{ {Type: hclsyntax.TokenIdent, Bytes: []byte("root")}, {Type: hclsyntax.TokenDot, Bytes: []byte(".")}, {Type: hclsyntax.TokenIdent, Bytes: []byte("attr")}, {Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}}, {Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenQuotedLit, Bytes: []byte("index")}, {Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}}, }, }, } for _, test := range tests { got := TokensForTraversal(test.Val) if !cmp.Equal(got, test.Want) { diff := cmp.Diff(got, test.Want, cmp.Comparer(func(a, b []byte) bool { return bytes.Equal(a, b) })) var gotBuf, wantBuf bytes.Buffer got.WriteTo(&gotBuf) test.Want.WriteTo(&wantBuf) t.Errorf( "wrong result\nvalue: %#v\ngot: %s\nwant: %s\ndiff: %s", test.Val, gotBuf.String(), wantBuf.String(), diff, ) } } } func TestTokensForTuple(t *testing.T) { tests := map[string]struct { Val []Tokens Want Tokens }{ "no elements": { nil, Tokens{ {Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}}, {Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}}, }, }, "one element": { []Tokens{ TokensForValue(cty.StringVal("foo")), }, Tokens{ {Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}}, {Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenQuotedLit, Bytes: []byte("foo")}, {Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}}, }, }, "two elements": { []Tokens{ TokensForTraversal(hcl.Traversal{ hcl.TraverseRoot{Name: "root"}, hcl.TraverseAttr{Name: "attr"}, }), TokensForValue(cty.StringVal("foo")), }, Tokens{ {Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}}, {Type: hclsyntax.TokenIdent, Bytes: []byte("root")}, {Type: hclsyntax.TokenDot, Bytes: []byte(".")}, {Type: hclsyntax.TokenIdent, Bytes: []byte("attr")}, {Type: hclsyntax.TokenComma, Bytes: []byte{','}}, {Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 1}, {Type: hclsyntax.TokenQuotedLit, Bytes: []byte("foo")}, {Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}}, }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { got := TokensForTuple(test.Val) if !cmp.Equal(got, test.Want) { diff := cmp.Diff(got, test.Want, cmp.Comparer(func(a, b []byte) bool { return bytes.Equal(a, b) })) var gotBuf, wantBuf bytes.Buffer got.WriteTo(&gotBuf) test.Want.WriteTo(&wantBuf) t.Errorf( "wrong result\nvalue: %#v\ngot: %s\nwant: %s\ndiff: %s", test.Val, gotBuf.String(), wantBuf.String(), diff, ) } }) } } func TestTokensForObject(t *testing.T) { tests := map[string]struct { Val []ObjectAttrTokens Want Tokens }{ "no attributes": { nil, Tokens{ {Type: hclsyntax.TokenOBrace, Bytes: []byte{'{'}}, {Type: hclsyntax.TokenCBrace, Bytes: []byte{'}'}}, }, }, "one attribute": { []ObjectAttrTokens{ { Name: TokensForTraversal(hcl.Traversal{ hcl.TraverseRoot{Name: "bar"}, }), Value: TokensForValue(cty.StringVal("baz")), }, }, Tokens{ {Type: hclsyntax.TokenOBrace, Bytes: []byte{'{'}}, {Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}}, {Type: hclsyntax.TokenIdent, Bytes: []byte("bar"), SpacesBefore: 2}, {Type: hclsyntax.TokenEqual, Bytes: []byte{'='}, SpacesBefore: 1}, {Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 1}, {Type: hclsyntax.TokenQuotedLit, Bytes: []byte("baz")}, {Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}}, {Type: hclsyntax.TokenCBrace, Bytes: []byte{'}'}}, }, }, "two attributes": { []ObjectAttrTokens{ { Name: TokensForTraversal(hcl.Traversal{ hcl.TraverseRoot{Name: "foo"}, }), Value: TokensForTraversal(hcl.Traversal{ hcl.TraverseRoot{Name: "root"}, hcl.TraverseAttr{Name: "attr"}, }), }, { Name: TokensForTraversal(hcl.Traversal{ hcl.TraverseRoot{Name: "bar"}, }), Value: TokensForValue(cty.StringVal("baz")), }, }, Tokens{ {Type: hclsyntax.TokenOBrace, Bytes: []byte{'{'}}, {Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}}, {Type: hclsyntax.TokenIdent, Bytes: []byte("foo"), SpacesBefore: 2}, {Type: hclsyntax.TokenEqual, Bytes: []byte{'='}, SpacesBefore: 1}, {Type: hclsyntax.TokenIdent, Bytes: []byte("root"), SpacesBefore: 1}, {Type: hclsyntax.TokenDot, Bytes: []byte(".")}, {Type: hclsyntax.TokenIdent, Bytes: []byte("attr")}, {Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}}, {Type: hclsyntax.TokenIdent, Bytes: []byte("bar"), SpacesBefore: 2}, {Type: hclsyntax.TokenEqual, Bytes: []byte{'='}, SpacesBefore: 1}, {Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 1}, {Type: hclsyntax.TokenQuotedLit, Bytes: []byte("baz")}, {Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}}, {Type: hclsyntax.TokenCBrace, Bytes: []byte{'}'}}, }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { got := TokensForObject(test.Val) if !cmp.Equal(got, test.Want) { diff := cmp.Diff(got, test.Want, cmp.Comparer(func(a, b []byte) bool { return bytes.Equal(a, b) })) var gotBuf, wantBuf bytes.Buffer got.WriteTo(&gotBuf) test.Want.WriteTo(&wantBuf) t.Errorf( "wrong result\nvalue: %#v\ngot: %s\nwant: %s\ndiff: %s", test.Val, gotBuf.String(), wantBuf.String(), diff, ) } }) } } func TestTokensForFunctionCall(t *testing.T) { tests := map[string]struct { FuncName string Val []Tokens Want Tokens }{ "no arguments": { "uuid", nil, Tokens{ {Type: hclsyntax.TokenIdent, Bytes: []byte("uuid")}, {Type: hclsyntax.TokenOParen, Bytes: []byte{'('}}, {Type: hclsyntax.TokenCParen, Bytes: []byte(")")}, }, }, "one argument": { "strlen", []Tokens{ TokensForValue(cty.StringVal("hello")), }, Tokens{ {Type: hclsyntax.TokenIdent, Bytes: []byte("strlen")}, {Type: hclsyntax.TokenOParen, Bytes: []byte{'('}}, {Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenQuotedLit, Bytes: []byte("hello")}, {Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`)}, {Type: hclsyntax.TokenCParen, Bytes: []byte(")")}, }, }, "two arguments": { "list", []Tokens{ TokensForIdentifier("string"), TokensForIdentifier("int"), }, Tokens{ {Type: hclsyntax.TokenIdent, Bytes: []byte("list")}, {Type: hclsyntax.TokenOParen, Bytes: []byte{'('}}, {Type: hclsyntax.TokenIdent, Bytes: []byte("string")}, {Type: hclsyntax.TokenComma, Bytes: []byte(",")}, {Type: hclsyntax.TokenIdent, Bytes: []byte("int"), SpacesBefore: 1}, {Type: hclsyntax.TokenCParen, Bytes: []byte(")")}, }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { got := TokensForFunctionCall(test.FuncName, test.Val...) if !cmp.Equal(got, test.Want) { diff := cmp.Diff(got, test.Want, cmp.Comparer(func(a, b []byte) bool { return bytes.Equal(a, b) })) var gotBuf, wantBuf bytes.Buffer got.WriteTo(&gotBuf) test.Want.WriteTo(&wantBuf) t.Errorf( "wrong result\nvalue: %#v\ngot: %s\nwant: %s\ndiff: %s", test.Val, gotBuf.String(), wantBuf.String(), diff, ) } }) } } func TestTokenGenerateConsistency(t *testing.T) { bytesComparer := cmp.Comparer(func(a, b []byte) bool { return bytes.Equal(a, b) }) // This test verifies that different ways of generating equivalent token // sequences all generate identical tokens, to help us keep them all in // sync under future maintanence. t.Run("tuple constructor", func(t *testing.T) { tests := map[string]struct { elems []cty.Value }{ "no elements": { nil, }, "one element": { []cty.Value{ cty.StringVal("hello"), }, }, "two elements": { []cty.Value{ cty.StringVal("hello"), cty.StringVal("world"), }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { var listVal cty.Value if len(test.elems) > 0 { listVal = cty.ListVal(test.elems) } else { listVal = cty.ListValEmpty(cty.DynamicPseudoType) } fromListValue := TokensForValue(listVal) fromTupleValue := TokensForValue(cty.TupleVal(test.elems)) elemTokens := make([]Tokens, len(test.elems)) for i, v := range test.elems { elemTokens[i] = TokensForValue(v) } fromTupleTokens := TokensForTuple(elemTokens) if diff := cmp.Diff(fromListValue, fromTupleTokens, bytesComparer); diff != "" { t.Errorf("inconsistency between TokensForValue(list) and TokensForTuple\n%s", diff) } if diff := cmp.Diff(fromTupleValue, fromTupleTokens, bytesComparer); diff != "" { t.Errorf("inconsistency between TokensForValue(tuple) and TokensForTuple\n%s", diff) } }) } }) t.Run("object constructor", func(t *testing.T) { tests := map[string]struct { attrs map[string]cty.Value }{ "no elements": { nil, }, "one element": { map[string]cty.Value{ "greeting": cty.StringVal("hello"), }, }, "two elements": { map[string]cty.Value{ "greeting1": cty.StringVal("hello"), "greeting2": cty.StringVal("world"), }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { var mapVal cty.Value if len(test.attrs) > 0 { mapVal = cty.MapVal(test.attrs) } else { mapVal = cty.MapValEmpty(cty.DynamicPseudoType) } fromMapValue := TokensForValue(mapVal) fromObjectValue := TokensForValue(cty.ObjectVal(test.attrs)) attrTokens := make([]ObjectAttrTokens, 0, len(test.attrs)) // TokensForValue always writes the keys/attributes in cty's // standard iteration order, but TokensForObject gives the // caller direct control of the ordering. The result is // therefore consistent only if the given attributes are // pre-sorted into the same iteration order, which is a lexical // sort by attribute name. keys := make([]string, 0, len(test.attrs)) for k := range test.attrs { keys = append(keys, k) } sort.Strings(keys) for _, k := range keys { v := test.attrs[k] attrTokens = append(attrTokens, ObjectAttrTokens{ Name: TokensForIdentifier(k), Value: TokensForValue(v), }) } fromObjectTokens := TokensForObject(attrTokens) if diff := cmp.Diff(fromMapValue, fromObjectTokens, bytesComparer); diff != "" { t.Errorf("inconsistency between TokensForValue(map) and TokensForObject\n%s", diff) } if diff := cmp.Diff(fromObjectValue, fromObjectTokens, bytesComparer); diff != "" { t.Errorf("inconsistency between TokensForValue(object) and TokensForObject\n%s", diff) } }) } }) } hcl-2.14.1/hclwrite/native_node_sorter.go000066400000000000000000000006721431334125700204160ustar00rootroot00000000000000package hclwrite import ( "github.com/hashicorp/hcl/v2/hclsyntax" ) type nativeNodeSorter struct { Nodes []hclsyntax.Node } func (s nativeNodeSorter) Len() int { return len(s.Nodes) } func (s nativeNodeSorter) Less(i, j int) bool { rangeI := s.Nodes[i].Range() rangeJ := s.Nodes[j].Range() return rangeI.Start.Byte < rangeJ.Start.Byte } func (s nativeNodeSorter) Swap(i, j int) { s.Nodes[i], s.Nodes[j] = s.Nodes[j], s.Nodes[i] } hcl-2.14.1/hclwrite/node.go000066400000000000000000000140021431334125700154420ustar00rootroot00000000000000package hclwrite import ( "fmt" "github.com/google/go-cmp/cmp" ) // node represents a node in the AST. type node struct { content nodeContent list *nodes before, after *node } func newNode(c nodeContent) *node { return &node{ content: c, } } func (n *node) Equal(other *node) bool { return cmp.Equal(n.content, other.content) } func (n *node) BuildTokens(to Tokens) Tokens { return n.content.BuildTokens(to) } // Detach removes the receiver from the list it currently belongs to. If the // node is not currently in a list, this is a no-op. func (n *node) Detach() { if n.list == nil { return } if n.before != nil { n.before.after = n.after } if n.after != nil { n.after.before = n.before } if n.list.first == n { n.list.first = n.after } if n.list.last == n { n.list.last = n.before } n.list = nil n.before = nil n.after = nil } // ReplaceWith removes the receiver from the list it currently belongs to and // inserts a new node with the given content in its place. If the node is not // currently in a list, this function will panic. // // The return value is the newly-constructed node, containing the given content. // After this function returns, the reciever is no longer attached to a list. func (n *node) ReplaceWith(c nodeContent) *node { if n.list == nil { panic("can't replace node that is not in a list") } before := n.before after := n.after list := n.list n.before, n.after, n.list = nil, nil, nil nn := newNode(c) nn.before = before nn.after = after nn.list = list if before != nil { before.after = nn } if after != nil { after.before = nn } return nn } func (n *node) assertUnattached() { if n.list != nil { panic(fmt.Sprintf("attempt to attach already-attached node %#v", n)) } } // nodeContent is the interface type implemented by all AST content types. type nodeContent interface { walkChildNodes(w internalWalkFunc) BuildTokens(to Tokens) Tokens } // nodes is a list of nodes. type nodes struct { first, last *node } func (ns *nodes) BuildTokens(to Tokens) Tokens { for n := ns.first; n != nil; n = n.after { to = n.BuildTokens(to) } return to } func (ns *nodes) Clear() { ns.first = nil ns.last = nil } func (ns *nodes) Append(c nodeContent) *node { n := &node{ content: c, } ns.AppendNode(n) n.list = ns return n } func (ns *nodes) AppendNode(n *node) { if ns.last != nil { n.before = ns.last ns.last.after = n } n.list = ns ns.last = n if ns.first == nil { ns.first = n } } // Insert inserts a nodeContent at a given position. // This is just a wrapper for InsertNode. See InsertNode for details. func (ns *nodes) Insert(pos *node, c nodeContent) *node { n := &node{ content: c, } ns.InsertNode(pos, n) n.list = ns return n } // InsertNode inserts a node at a given position. // The first argument is a node reference before which to insert. // To insert it to an empty list, set position to nil. func (ns *nodes) InsertNode(pos *node, n *node) { if pos == nil { // inserts n to empty list. ns.first = n ns.last = n } else { // inserts n before pos. pos.before.after = n n.before = pos.before pos.before = n n.after = pos } n.list = ns } func (ns *nodes) AppendUnstructuredTokens(tokens Tokens) *node { if len(tokens) == 0 { return nil } n := newNode(tokens) ns.AppendNode(n) n.list = ns return n } // FindNodeWithContent searches the nodes for a node whose content equals // the given content. If it finds one then it returns it. Otherwise it returns // nil. func (ns *nodes) FindNodeWithContent(content nodeContent) *node { for n := ns.first; n != nil; n = n.after { if n.content == content { return n } } return nil } // nodeSet is an unordered set of nodes. It is used to describe a set of nodes // that all belong to the same list that have some role or characteristic // in common. type nodeSet map[*node]struct{} func newNodeSet() nodeSet { return make(nodeSet) } func (ns nodeSet) Has(n *node) bool { if ns == nil { return false } _, exists := ns[n] return exists } func (ns nodeSet) Add(n *node) { ns[n] = struct{}{} } func (ns nodeSet) Remove(n *node) { delete(ns, n) } func (ns nodeSet) Clear() { for n := range ns { delete(ns, n) } } func (ns nodeSet) List() []*node { if len(ns) == 0 { return nil } ret := make([]*node, 0, len(ns)) // Determine which list we are working with. We assume here that all of // the nodes belong to the same list, since that is part of the contract // for nodeSet. var list *nodes for n := range ns { list = n.list break } // We recover the order by iterating over the whole list. This is not // the most efficient way to do it, but our node lists should always be // small so not worth making things more complex. for n := list.first; n != nil; n = n.after { if ns.Has(n) { ret = append(ret, n) } } return ret } // FindNodeWithContent searches the nodes for a node whose content equals // the given content. If it finds one then it returns it. Otherwise it returns // nil. func (ns nodeSet) FindNodeWithContent(content nodeContent) *node { for n := range ns { if n.content == content { return n } } return nil } type internalWalkFunc func(*node) // inTree can be embedded into a content struct that has child nodes to get // a standard implementation of the NodeContent interface and a record of // a potential parent node. type inTree struct { parent *node children *nodes } func newInTree() inTree { return inTree{ children: &nodes{}, } } func (it *inTree) assertUnattached() { if it.parent != nil { panic(fmt.Sprintf("node is already attached to %T", it.parent.content)) } } func (it *inTree) walkChildNodes(w internalWalkFunc) { for n := it.children.first; n != nil; n = n.after { w(n) } } func (it *inTree) BuildTokens(to Tokens) Tokens { for n := it.children.first; n != nil; n = n.after { to = n.BuildTokens(to) } return to } // leafNode can be embedded into a content struct to give it a do-nothing // implementation of walkChildNodes type leafNode struct { } func (n *leafNode) walkChildNodes(w internalWalkFunc) { } hcl-2.14.1/hclwrite/parser.go000066400000000000000000000523421431334125700160220ustar00rootroot00000000000000package hclwrite import ( "fmt" "sort" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) // Our "parser" here is actually not doing any parsing of its own. Instead, // it leans on the native parser in hclsyntax, and then uses the source ranges // from the AST to partition the raw token sequence to match the raw tokens // up to AST nodes. // // This strategy feels somewhat counter-intuitive, since most of the work the // parser does is thrown away here, but this strategy is chosen because the // normal parsing work done by hclsyntax is considered to be the "main case", // while modifying and re-printing source is more of an edge case, used only // in ancillary tools, and so it's good to keep all the main parsing logic // with the main case but keep all of the extra complexity of token wrangling // out of the main parser, which is already rather complex just serving the // use-cases it already serves. // // If the parsing step produces any errors, the returned File is nil because // we can't reliably extract tokens from the partial AST produced by an // erroneous parse. func parse(src []byte, filename string, start hcl.Pos) (*File, hcl.Diagnostics) { file, diags := hclsyntax.ParseConfig(src, filename, start) if diags.HasErrors() { return nil, diags } // To do our work here, we use the "native" tokens (those from hclsyntax) // to match against source ranges in the AST, but ultimately produce // slices from our sequence of "writer" tokens, which contain only // *relative* position information that is more appropriate for // transformation/writing use-cases. nativeTokens, diags := hclsyntax.LexConfig(src, filename, start) if diags.HasErrors() { // should never happen, since we would've caught these diags in // the first call above. return nil, diags } writerTokens := writerTokens(nativeTokens) from := inputTokens{ nativeTokens: nativeTokens, writerTokens: writerTokens, } before, root, after := parseBody(file.Body.(*hclsyntax.Body), from) ret := &File{ inTree: newInTree(), srcBytes: src, body: root, } nodes := ret.inTree.children nodes.Append(before.Tokens()) nodes.AppendNode(root) nodes.Append(after.Tokens()) return ret, diags } type inputTokens struct { nativeTokens hclsyntax.Tokens writerTokens Tokens } func (it inputTokens) Partition(rng hcl.Range) (before, within, after inputTokens) { start, end := partitionTokens(it.nativeTokens, rng) before = it.Slice(0, start) within = it.Slice(start, end) after = it.Slice(end, len(it.nativeTokens)) return } func (it inputTokens) PartitionType(ty hclsyntax.TokenType) (before, within, after inputTokens) { for i, t := range it.writerTokens { if t.Type == ty { return it.Slice(0, i), it.Slice(i, i+1), it.Slice(i+1, len(it.nativeTokens)) } } panic(fmt.Sprintf("didn't find any token of type %s", ty)) } func (it inputTokens) PartitionTypeOk(ty hclsyntax.TokenType) (before, within, after inputTokens, ok bool) { for i, t := range it.writerTokens { if t.Type == ty { return it.Slice(0, i), it.Slice(i, i+1), it.Slice(i+1, len(it.nativeTokens)), true } } return inputTokens{}, inputTokens{}, inputTokens{}, false } func (it inputTokens) PartitionTypeSingle(ty hclsyntax.TokenType) (before inputTokens, found *Token, after inputTokens) { before, within, after := it.PartitionType(ty) if within.Len() != 1 { panic("PartitionType found more than one token") } return before, within.Tokens()[0], after } // PartitionIncludeComments is like Partition except the returned "within" // range includes any lead and line comments associated with the range. func (it inputTokens) PartitionIncludingComments(rng hcl.Range) (before, within, after inputTokens) { start, end := partitionTokens(it.nativeTokens, rng) start = partitionLeadCommentTokens(it.nativeTokens[:start]) _, afterNewline := partitionLineEndTokens(it.nativeTokens[end:]) end += afterNewline before = it.Slice(0, start) within = it.Slice(start, end) after = it.Slice(end, len(it.nativeTokens)) return } // PartitionBlockItem is similar to PartitionIncludeComments but it returns // the comments as separate token sequences so that they can be captured into // AST attributes. It makes assumptions that apply only to block items, so // should not be used for other constructs. func (it inputTokens) PartitionBlockItem(rng hcl.Range) (before, leadComments, within, lineComments, newline, after inputTokens) { before, within, after = it.Partition(rng) before, leadComments = before.PartitionLeadComments() lineComments, newline, after = after.PartitionLineEndTokens() return } func (it inputTokens) PartitionLeadComments() (before, within inputTokens) { start := partitionLeadCommentTokens(it.nativeTokens) before = it.Slice(0, start) within = it.Slice(start, len(it.nativeTokens)) return } func (it inputTokens) PartitionLineEndTokens() (comments, newline, after inputTokens) { afterComments, afterNewline := partitionLineEndTokens(it.nativeTokens) comments = it.Slice(0, afterComments) newline = it.Slice(afterComments, afterNewline) after = it.Slice(afterNewline, len(it.nativeTokens)) return } func (it inputTokens) Slice(start, end int) inputTokens { // When we slice, we create a new slice with no additional capacity because // we expect that these slices will be mutated in order to insert // new code into the AST, and we want to ensure that a new underlying // array gets allocated in that case, rather than writing into some // following slice and corrupting it. return inputTokens{ nativeTokens: it.nativeTokens[start:end:end], writerTokens: it.writerTokens[start:end:end], } } func (it inputTokens) Len() int { return len(it.nativeTokens) } func (it inputTokens) Tokens() Tokens { return it.writerTokens } func (it inputTokens) Types() []hclsyntax.TokenType { ret := make([]hclsyntax.TokenType, len(it.nativeTokens)) for i, tok := range it.nativeTokens { ret[i] = tok.Type } return ret } // parseBody locates the given body within the given input tokens and returns // the resulting *Body object as well as the tokens that appeared before and // after it. func parseBody(nativeBody *hclsyntax.Body, from inputTokens) (inputTokens, *node, inputTokens) { before, within, after := from.PartitionIncludingComments(nativeBody.SrcRange) // The main AST doesn't retain the original source ordering of the // body items, so we need to reconstruct that ordering by inspecting // their source ranges. nativeItems := make([]hclsyntax.Node, 0, len(nativeBody.Attributes)+len(nativeBody.Blocks)) for _, nativeAttr := range nativeBody.Attributes { nativeItems = append(nativeItems, nativeAttr) } for _, nativeBlock := range nativeBody.Blocks { nativeItems = append(nativeItems, nativeBlock) } sort.Sort(nativeNodeSorter{nativeItems}) body := &Body{ inTree: newInTree(), items: newNodeSet(), } remain := within for _, nativeItem := range nativeItems { beforeItem, item, afterItem := parseBodyItem(nativeItem, remain) if beforeItem.Len() > 0 { body.AppendUnstructuredTokens(beforeItem.Tokens()) } body.appendItemNode(item) remain = afterItem } if remain.Len() > 0 { body.AppendUnstructuredTokens(remain.Tokens()) } return before, newNode(body), after } func parseBodyItem(nativeItem hclsyntax.Node, from inputTokens) (inputTokens, *node, inputTokens) { before, leadComments, within, lineComments, newline, after := from.PartitionBlockItem(nativeItem.Range()) var item *node switch tItem := nativeItem.(type) { case *hclsyntax.Attribute: item = parseAttribute(tItem, within, leadComments, lineComments, newline) case *hclsyntax.Block: item = parseBlock(tItem, within, leadComments, lineComments, newline) default: // should never happen if caller is behaving panic("unsupported native item type") } return before, item, after } func parseAttribute(nativeAttr *hclsyntax.Attribute, from, leadComments, lineComments, newline inputTokens) *node { attr := &Attribute{ inTree: newInTree(), } children := attr.inTree.children { cn := newNode(newComments(leadComments.Tokens())) attr.leadComments = cn children.AppendNode(cn) } before, nameTokens, from := from.Partition(nativeAttr.NameRange) { children.AppendUnstructuredTokens(before.Tokens()) if nameTokens.Len() != 1 { // Should never happen with valid input panic("attribute name is not exactly one token") } token := nameTokens.Tokens()[0] in := newNode(newIdentifier(token)) attr.name = in children.AppendNode(in) } before, equalsTokens, from := from.Partition(nativeAttr.EqualsRange) children.AppendUnstructuredTokens(before.Tokens()) children.AppendUnstructuredTokens(equalsTokens.Tokens()) before, exprTokens, from := from.Partition(nativeAttr.Expr.Range()) { children.AppendUnstructuredTokens(before.Tokens()) exprNode := parseExpression(nativeAttr.Expr, exprTokens) attr.expr = exprNode children.AppendNode(exprNode) } { cn := newNode(newComments(lineComments.Tokens())) attr.lineComments = cn children.AppendNode(cn) } children.AppendUnstructuredTokens(newline.Tokens()) // Collect any stragglers, though there shouldn't be any children.AppendUnstructuredTokens(from.Tokens()) return newNode(attr) } func parseBlock(nativeBlock *hclsyntax.Block, from, leadComments, lineComments, newline inputTokens) *node { block := &Block{ inTree: newInTree(), } children := block.inTree.children { cn := newNode(newComments(leadComments.Tokens())) block.leadComments = cn children.AppendNode(cn) } before, typeTokens, from := from.Partition(nativeBlock.TypeRange) { children.AppendUnstructuredTokens(before.Tokens()) if typeTokens.Len() != 1 { // Should never happen with valid input panic("block type name is not exactly one token") } token := typeTokens.Tokens()[0] in := newNode(newIdentifier(token)) block.typeName = in children.AppendNode(in) } before, labelsNode, from := parseBlockLabels(nativeBlock, from) block.labels = labelsNode children.AppendNode(labelsNode) before, oBrace, from := from.Partition(nativeBlock.OpenBraceRange) children.AppendUnstructuredTokens(before.Tokens()) block.open = children.AppendUnstructuredTokens(oBrace.Tokens()) // We go a bit out of order here: we go hunting for the closing brace // so that we have a delimited body, but then we'll deal with the body // before we actually append the closing brace and any straggling tokens // that appear after it. bodyTokens, cBrace, from := from.Partition(nativeBlock.CloseBraceRange) before, body, after := parseBody(nativeBlock.Body, bodyTokens) children.AppendUnstructuredTokens(before.Tokens()) block.body = body children.AppendNode(body) children.AppendUnstructuredTokens(after.Tokens()) block.close = children.AppendUnstructuredTokens(cBrace.Tokens()) // stragglers children.AppendUnstructuredTokens(from.Tokens()) if lineComments.Len() > 0 { // blocks don't actually have line comments, so we'll just treat // them as extra stragglers children.AppendUnstructuredTokens(lineComments.Tokens()) } children.AppendUnstructuredTokens(newline.Tokens()) return newNode(block) } func parseBlockLabels(nativeBlock *hclsyntax.Block, from inputTokens) (inputTokens, *node, inputTokens) { labelsObj := newBlockLabels(nil) children := labelsObj.children var beforeAll inputTokens for i, rng := range nativeBlock.LabelRanges { var before, labelTokens inputTokens before, labelTokens, from = from.Partition(rng) if i == 0 { beforeAll = before } else { children.AppendUnstructuredTokens(before.Tokens()) } tokens := labelTokens.Tokens() var ln *node if len(tokens) == 1 && tokens[0].Type == hclsyntax.TokenIdent { ln = newNode(newIdentifier(tokens[0])) } else { ln = newNode(newQuoted(tokens)) } labelsObj.items.Add(ln) children.AppendNode(ln) } after := from return beforeAll, newNode(labelsObj), after } func parseExpression(nativeExpr hclsyntax.Expression, from inputTokens) *node { expr := newExpression() children := expr.inTree.children nativeVars := nativeExpr.Variables() for _, nativeTraversal := range nativeVars { before, traversal, after := parseTraversal(nativeTraversal, from) children.AppendUnstructuredTokens(before.Tokens()) children.AppendNode(traversal) expr.absTraversals.Add(traversal) from = after } // Attach any stragglers that don't belong to a traversal to the expression // itself. In an expression with no traversals at all, this is just the // entirety of "from". children.AppendUnstructuredTokens(from.Tokens()) return newNode(expr) } func parseTraversal(nativeTraversal hcl.Traversal, from inputTokens) (before inputTokens, n *node, after inputTokens) { traversal := newTraversal() children := traversal.inTree.children before, from, after = from.Partition(nativeTraversal.SourceRange()) stepAfter := from for _, nativeStep := range nativeTraversal { before, step, after := parseTraversalStep(nativeStep, stepAfter) children.AppendUnstructuredTokens(before.Tokens()) children.AppendNode(step) traversal.steps.Add(step) stepAfter = after } return before, newNode(traversal), after } func parseTraversalStep(nativeStep hcl.Traverser, from inputTokens) (before inputTokens, n *node, after inputTokens) { var children *nodes switch tNativeStep := nativeStep.(type) { case hcl.TraverseRoot, hcl.TraverseAttr: step := newTraverseName() children = step.inTree.children before, from, after = from.Partition(nativeStep.SourceRange()) inBefore, token, inAfter := from.PartitionTypeSingle(hclsyntax.TokenIdent) name := newIdentifier(token) children.AppendUnstructuredTokens(inBefore.Tokens()) step.name = children.Append(name) children.AppendUnstructuredTokens(inAfter.Tokens()) return before, newNode(step), after case hcl.TraverseIndex: step := newTraverseIndex() children = step.inTree.children before, from, after = from.Partition(nativeStep.SourceRange()) if inBefore, dot, from, ok := from.PartitionTypeOk(hclsyntax.TokenDot); ok { children.AppendUnstructuredTokens(inBefore.Tokens()) children.AppendUnstructuredTokens(dot.Tokens()) valBefore, valToken, valAfter := from.PartitionTypeSingle(hclsyntax.TokenNumberLit) children.AppendUnstructuredTokens(valBefore.Tokens()) key := newNumber(valToken) step.key = children.Append(key) children.AppendUnstructuredTokens(valAfter.Tokens()) return before, newNode(step), after } var inBefore, oBrack, keyTokens, cBrack inputTokens inBefore, oBrack, from = from.PartitionType(hclsyntax.TokenOBrack) children.AppendUnstructuredTokens(inBefore.Tokens()) children.AppendUnstructuredTokens(oBrack.Tokens()) keyTokens, cBrack, from = from.PartitionType(hclsyntax.TokenCBrack) keyVal := tNativeStep.Key switch keyVal.Type() { case cty.String: key := newQuoted(keyTokens.Tokens()) step.key = children.Append(key) case cty.Number: valBefore, valToken, valAfter := keyTokens.PartitionTypeSingle(hclsyntax.TokenNumberLit) children.AppendUnstructuredTokens(valBefore.Tokens()) key := newNumber(valToken) step.key = children.Append(key) children.AppendUnstructuredTokens(valAfter.Tokens()) } children.AppendUnstructuredTokens(cBrack.Tokens()) children.AppendUnstructuredTokens(from.Tokens()) return before, newNode(step), after default: panic(fmt.Sprintf("unsupported traversal step type %T", nativeStep)) } } // writerTokens takes a sequence of tokens as produced by the main hclsyntax // package and transforms it into an equivalent sequence of tokens using // this package's own token model. // // The resulting list contains the same number of tokens and uses the same // indices as the input, allowing the two sets of tokens to be correlated // by index. func writerTokens(nativeTokens hclsyntax.Tokens) Tokens { // Ultimately we want a slice of token _pointers_, but since we can // predict how much memory we're going to devote to tokens we'll allocate // it all as a single flat buffer and thus give the GC less work to do. tokBuf := make([]Token, len(nativeTokens)) var lastByteOffset int for i, mainToken := range nativeTokens { // Create a copy of the bytes so that we can mutate without // corrupting the original token stream. bytes := make([]byte, len(mainToken.Bytes)) copy(bytes, mainToken.Bytes) tokBuf[i] = Token{ Type: mainToken.Type, Bytes: bytes, // We assume here that spaces are always ASCII spaces, since // that's what the scanner also assumes, and thus the number // of bytes skipped is also the number of space characters. SpacesBefore: mainToken.Range.Start.Byte - lastByteOffset, } lastByteOffset = mainToken.Range.End.Byte } // Now make a slice of pointers into the previous slice. ret := make(Tokens, len(tokBuf)) for i := range ret { ret[i] = &tokBuf[i] } return ret } // partitionTokens takes a sequence of tokens and a hcl.Range and returns // two indices within the token sequence that correspond with the range // boundaries, such that the slice operator could be used to produce // three token sequences for before, within, and after respectively: // // start, end := partitionTokens(toks, rng) // before := toks[:start] // within := toks[start:end] // after := toks[end:] // // This works best when the range is aligned with token boundaries (e.g. // because it was produced in terms of the scanner's result) but if that isn't // true then it will make a best effort that may produce strange results at // the boundaries. // // Native hclsyntax tokens are used here, because they contain the necessary // absolute position information. However, since writerTokens produces a // correlatable sequence of writer tokens, the resulting indices can be // used also to index into its result, allowing the partitioning of writer // tokens to be driven by the partitioning of native tokens. // // The tokens are assumed to be in source order and non-overlapping, which // will be true if the token sequence from the scanner is used directly. func partitionTokens(toks hclsyntax.Tokens, rng hcl.Range) (start, end int) { // We use a linear search here because we assume that in most cases our // target range is close to the beginning of the sequence, and the sequences // are generally small for most reasonable files anyway. for i := 0; ; i++ { if i >= len(toks) { // No tokens for the given range at all! return len(toks), len(toks) } if toks[i].Range.Start.Byte >= rng.Start.Byte { start = i break } } for i := start; ; i++ { if i >= len(toks) { // The range "hangs off" the end of the token sequence return start, len(toks) } if toks[i].Range.Start.Byte >= rng.End.Byte { end = i // end marker is exclusive break } } return start, end } // partitionLeadCommentTokens takes a sequence of tokens that is assumed // to immediately precede a construct that can have lead comment tokens, // and returns the index into that sequence where the lead comments begin. // // Lead comments are defined as whole lines containing only comment tokens // with no blank lines between. If no such lines are found, the returned // index will be len(toks). func partitionLeadCommentTokens(toks hclsyntax.Tokens) int { // single-line comments (which is what we're interested in here) // consume their trailing newline, so we can just walk backwards // until we stop seeing comment tokens. for i := len(toks) - 1; i >= 0; i-- { if toks[i].Type != hclsyntax.TokenComment { return i + 1 } } return 0 } // partitionLineEndTokens takes a sequence of tokens that is assumed // to immediately follow a construct that can have a line comment, and // returns first the index where any line comments end and then second // the index immediately after the trailing newline. // // Line comments are defined as comments that appear immediately after // a construct on the same line where its significant tokens ended. // // Since single-line comment tokens (# and //) include the newline that // terminates them, in the presence of these the two returned indices // will be the same since the comment itself serves as the line end. func partitionLineEndTokens(toks hclsyntax.Tokens) (afterComment, afterNewline int) { for i := 0; i < len(toks); i++ { tok := toks[i] if tok.Type != hclsyntax.TokenComment { switch tok.Type { case hclsyntax.TokenNewline: return i, i + 1 case hclsyntax.TokenEOF: // Although this is valid, we mustn't include the EOF // itself as our "newline" or else strange things will // happen when we try to append new items. return i, i default: // If we have well-formed input here then nothing else should be // possible. This path should never happen, because we only try // to extract tokens from the sequence if the parser succeeded, // and it should catch this problem itself. panic("malformed line trailers: expected only comments and newlines") } } if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' { // Newline at the end of a single-line comment serves both as // the end of comments *and* the end of the line. return i + 1, i + 1 } } return len(toks), len(toks) } // lexConfig uses the hclsyntax scanner to get a token stream and then // rewrites it into this package's token model. // // Any errors produced during scanning are ignored, so the results of this // function should be used with care. func lexConfig(src []byte) Tokens { mainTokens, _ := hclsyntax.LexConfig(src, "", hcl.Pos{Byte: 0, Line: 1, Column: 1}) return writerTokens(mainTokens) } hcl-2.14.1/hclwrite/parser_test.go000066400000000000000000000700051431334125700170550ustar00rootroot00000000000000package hclwrite import ( "fmt" "reflect" "testing" "github.com/davecgh/go-spew/spew" "github.com/google/go-cmp/cmp" "github.com/kylelemons/godebug/pretty" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" ) func TestParse(t *testing.T) { tests := []struct { src string want TestTreeNode }{ { "", TestTreeNode{ Type: "Body", }, }, { "a = 1\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " 1", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "# aye aye aye\na = 1\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", Val: "# aye aye aye\n", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " 1", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = 1 # because it is\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " 1", }, }, }, { Type: "comments", Val: " # because it is\n", }, }, }, }, }, }, { "# bee bee bee\n\nb = 1\n", // two newlines separate the comment from the attribute TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Tokens", // Only lead/line comments attached to an object have type "comments" Val: "# bee bee bee\n\n", }, { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "b", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " 1", }, }, }, { Type: "comments", Val: "", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = (\n 1 + 2\n)\nb = 3\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ {Type: "comments"}, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " (\n 1 + 2\n)", }, }, }, {Type: "comments"}, { Type: "Tokens", Val: "\n", }, }, }, { Type: "Attribute", Children: []TestTreeNode{ {Type: "comments"}, { Type: "identifier", Val: "b", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " 3", }, }, }, {Type: "comments"}, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "b {}\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Block", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "b", }, { Type: "blockLabels", }, { Type: "Tokens", Val: " {", }, { Type: "Body", }, { Type: "Tokens", Val: "}", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "b label {}\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Block", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "b", }, { Type: "blockLabels", Children: []TestTreeNode{ { Type: "identifier", Val: " label", }, }, }, { Type: "Tokens", Val: " {", }, { Type: "Body", }, { Type: "Tokens", Val: "}", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "b \"label\" {}\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Block", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "b", }, { Type: "blockLabels", Children: []TestTreeNode{ { Type: "quoted", Val: ` "label"`, }, }, }, { Type: "Tokens", Val: " {", }, { Type: "Body", }, { Type: "Tokens", Val: "}", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "b \"label1\" /* foo */ \"label2\" {}\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Block", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "b", }, { Type: "blockLabels", Children: []TestTreeNode{ { Type: "quoted", Val: ` "label1"`, }, { // The comment between the labels just // becomes an "unstructured tokens" // node, because this isn't a place // where we expect comments to attach // to a particular object as // documentation. Type: "Tokens", Val: ` /* foo */`, }, { Type: "quoted", Val: ` "label2"`, }, }, }, { Type: "Tokens", Val: " {", }, { Type: "Body", }, { Type: "Tokens", Val: "}", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "b {\n a = 1\n}\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Block", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "b", }, { Type: "blockLabels", }, { Type: "Tokens", Val: " {", }, { Type: "Body", Children: []TestTreeNode{ { Type: "Tokens", Val: "\n", }, { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: " a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Tokens", Val: " 1", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, { Type: "Tokens", Val: "}", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo.bar\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, { Type: "TraverseName", Children: []TestTreeNode{ { Type: "Tokens", Val: ".", }, { Type: "identifier", Val: "bar", }, }, }, }, }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo[0]\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, { Type: "TraverseIndex", Children: []TestTreeNode{ { Type: "Tokens", Val: "[", }, { Type: "number", Val: "0", }, { Type: "Tokens", Val: "]", }, }, }, }, }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo.0\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, { Type: "TraverseIndex", Children: []TestTreeNode{ { Type: "Tokens", Val: ".", }, { Type: "number", Val: "0", }, }, }, }, }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo.*\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: ".*", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo.*.bar\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: ".*.bar", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo[*]\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: "[*]", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo[*].bar\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: "[*].bar", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo[bar]\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: "[", }, { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: "bar", }, }, }, }, }, { Type: "Tokens", Val: "]", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo[bar.baz]\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: "[", }, { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: "bar", }, }, }, { Type: "TraverseName", Children: []TestTreeNode{ { Type: "Tokens", Val: ".", }, { Type: "identifier", Val: "baz", }, }, }, }, }, { Type: "Tokens", Val: "]", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, { "a = foo[bar].baz\n", TestTreeNode{ Type: "Body", Children: []TestTreeNode{ { Type: "Attribute", Children: []TestTreeNode{ { Type: "comments", }, { Type: "identifier", Val: "a", }, { Type: "Tokens", Val: " =", }, { Type: "Expression", Children: []TestTreeNode{ { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: " foo", }, }, }, }, }, { Type: "Tokens", Val: "[", }, { Type: "Traversal", Children: []TestTreeNode{ { Type: "TraverseName", Children: []TestTreeNode{ { Type: "identifier", Val: "bar", }, }, }, }, }, { Type: "Tokens", Val: "].baz", }, }, }, { Type: "comments", }, { Type: "Tokens", Val: "\n", }, }, }, }, }, }, } for _, test := range tests { t.Run(test.src, func(t *testing.T) { file, diags := parse([]byte(test.src), "", hcl.Pos{Line: 1, Column: 1}) if len(diags) > 0 { for _, diag := range diags { t.Logf(" - %s", diag.Error()) } t.Fatalf("unexpected diagnostics") } got := makeTestTree(file.body) if !cmp.Equal(got, test.want) { diff := cmp.Diff(got, test.want) t.Errorf( "wrong result\ninput:\n%s\n\ngot:\n%s\nwant:%s\n\ndiff:\n%s", test.src, spew.Sdump(got), spew.Sdump(test.want), diff, ) } }) } } func TestPartitionTokens(t *testing.T) { tests := []struct { tokens hclsyntax.Tokens rng hcl.Range wantStart int wantEnd int }{ { hclsyntax.Tokens{}, hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 0}, }, 0, 0, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 4}, }, }, }, hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 4}, }, 0, 1, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 4}, }, }, { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 4}, End: hcl.Pos{Byte: 8}, }, }, { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 8}, End: hcl.Pos{Byte: 12}, }, }, }, hcl.Range{ Start: hcl.Pos{Byte: 4}, End: hcl.Pos{Byte: 8}, }, 1, 2, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 4}, }, }, { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 4}, End: hcl.Pos{Byte: 8}, }, }, { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 8}, End: hcl.Pos{Byte: 12}, }, }, }, hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 8}, }, 0, 2, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 0}, End: hcl.Pos{Byte: 4}, }, }, { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 4}, End: hcl.Pos{Byte: 8}, }, }, { Type: hclsyntax.TokenIdent, Range: hcl.Range{ Start: hcl.Pos{Byte: 8}, End: hcl.Pos{Byte: 12}, }, }, }, hcl.Range{ Start: hcl.Pos{Byte: 4}, End: hcl.Pos{Byte: 12}, }, 1, 3, }, } prettyConfig := &pretty.Config{ Diffable: true, IncludeUnexported: true, PrintStringers: true, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { gotStart, gotEnd := partitionTokens(test.tokens, test.rng) if gotStart != test.wantStart || gotEnd != test.wantEnd { t.Errorf( "wrong result\ntokens: %s\nrange: %#v\ngot: %d, %d\nwant: %d, %d", prettyConfig.Sprint(test.tokens), test.rng, gotStart, test.wantStart, gotEnd, test.wantEnd, ) } }) } } func TestPartitionLeadCommentTokens(t *testing.T) { tests := []struct { tokens hclsyntax.Tokens wantStart int }{ { hclsyntax.Tokens{}, 0, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenComment, }, }, 0, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenComment, }, { Type: hclsyntax.TokenComment, }, }, 0, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenComment, }, { Type: hclsyntax.TokenNewline, }, }, 2, }, { hclsyntax.Tokens{ { Type: hclsyntax.TokenComment, }, { Type: hclsyntax.TokenNewline, }, { Type: hclsyntax.TokenComment, }, }, 2, }, } prettyConfig := &pretty.Config{ Diffable: true, IncludeUnexported: true, PrintStringers: true, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { gotStart := partitionLeadCommentTokens(test.tokens) if gotStart != test.wantStart { t.Errorf( "wrong result\ntokens: %s\ngot: %d\nwant: %d", prettyConfig.Sprint(test.tokens), gotStart, test.wantStart, ) } }) } } func TestLexConfig(t *testing.T) { tests := []struct { input string want Tokens }{ { `a b `, Tokens{ { Type: hclsyntax.TokenIdent, Bytes: []byte(`a`), SpacesBefore: 0, }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`b`), SpacesBefore: 2, }, { Type: hclsyntax.TokenEOF, Bytes: []byte{}, SpacesBefore: 1, }, }, }, { ` foo "bar" "baz" { pizza = " cheese " } `, Tokens{ { Type: hclsyntax.TokenNewline, Bytes: []byte{'\n'}, SpacesBefore: 0, }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`foo`), SpacesBefore: 0, }, { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 1, }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`bar`), SpacesBefore: 0, }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), SpacesBefore: 0, }, { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 1, }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(`baz`), SpacesBefore: 0, }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), SpacesBefore: 0, }, { Type: hclsyntax.TokenOBrace, Bytes: []byte(`{`), SpacesBefore: 1, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), SpacesBefore: 0, }, { Type: hclsyntax.TokenIdent, Bytes: []byte(`pizza`), SpacesBefore: 4, }, { Type: hclsyntax.TokenEqual, Bytes: []byte(`=`), SpacesBefore: 1, }, { Type: hclsyntax.TokenOQuote, Bytes: []byte(`"`), SpacesBefore: 1, }, { Type: hclsyntax.TokenQuotedLit, Bytes: []byte(` cheese `), SpacesBefore: 0, }, { Type: hclsyntax.TokenCQuote, Bytes: []byte(`"`), SpacesBefore: 0, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), SpacesBefore: 0, }, { Type: hclsyntax.TokenCBrace, Bytes: []byte(`}`), SpacesBefore: 0, }, { Type: hclsyntax.TokenNewline, Bytes: []byte("\n"), SpacesBefore: 0, }, { Type: hclsyntax.TokenEOF, Bytes: []byte{}, SpacesBefore: 0, }, }, }, } prettyConfig := &pretty.Config{ Diffable: true, IncludeUnexported: true, PrintStringers: true, } for _, test := range tests { t.Run(test.input, func(t *testing.T) { got := lexConfig([]byte(test.input)) if !reflect.DeepEqual(got, test.want) { diff := prettyConfig.Compare(test.want, got) t.Errorf( "wrong result\ninput: %s\ndiff: %s", test.input, diff, ) } }) } } hcl-2.14.1/hclwrite/public.go000066400000000000000000000023521431334125700160000ustar00rootroot00000000000000package hclwrite import ( "bytes" "github.com/hashicorp/hcl/v2" ) // NewFile creates a new file object that is empty and ready to have constructs // added t it. func NewFile() *File { body := &Body{ inTree: newInTree(), items: newNodeSet(), } file := &File{ inTree: newInTree(), } file.body = file.inTree.children.Append(body) return file } // ParseConfig interprets the given source bytes into a *hclwrite.File. The // resulting AST can be used to perform surgical edits on the source code // before turning it back into bytes again. func ParseConfig(src []byte, filename string, start hcl.Pos) (*File, hcl.Diagnostics) { return parse(src, filename, start) } // Format takes source code and performs simple whitespace changes to transform // it to a canonical layout style. // // Format skips constructing an AST and works directly with tokens, so it // is less expensive than formatting via the AST for situations where no other // changes will be made. It also ignores syntax errors and can thus be applied // to partial source code, although the result in that case may not be // desirable. func Format(src []byte) []byte { tokens := lexConfig(src) format(tokens) buf := &bytes.Buffer{} tokens.WriteTo(buf) return buf.Bytes() } hcl-2.14.1/hclwrite/round_trip_test.go000066400000000000000000000076651431334125700177620ustar00rootroot00000000000000package hclwrite import ( "bytes" "testing" "github.com/sergi/go-diff/diffmatchpatch" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" ) func TestRoundTripVerbatim(t *testing.T) { tests := []string{ ``, `foo = 1 `, ` foobar = 1 baz = 1 `, ` # this file is awesome # tossed salads and scrambled eggs foobar = 1 baz = 1 block { a = "a" b = "b" c = "c" d = "d" subblock { } subblock { e = "e" } } # and they all lived happily ever after `, } for _, test := range tests { t.Run(test, func(t *testing.T) { src := []byte(test) file, diags := parse(src, "", hcl.Pos{Line: 1, Column: 1}) if len(diags) != 0 { for _, diag := range diags { t.Logf(" - %s", diag.Error()) } t.Fatalf("unexpected diagnostics") } wr := &bytes.Buffer{} n, err := file.WriteTo(wr) if n != int64(len(test)) { t.Errorf("wrong number of bytes %d; want %d", n, len(test)) } if err != nil { t.Fatalf("error from WriteTo") } result := wr.Bytes() if !bytes.Equal(result, src) { dmp := diffmatchpatch.New() diffs := dmp.DiffMain(string(src), string(result), false) //t.Errorf("wrong result\nresult:\n%s\ninput:\n%s", result, src) t.Errorf("wrong result\ndiff: (red indicates missing lines, and green indicates unexpected lines)\n%s", dmp.DiffPrettyText(diffs)) } }) } } func TestRoundTripFormat(t *testing.T) { // The goal of this test is to verify that the formatter doesn't change // the semantics of any expressions when it adds and removes whitespace. // String templates are the primary area of concern here, but we also // test some other things for completeness sake. // // The tests here must define zero or more attributes, which will be // extract with JustAttributes and evaluated both before and after // formatting. tests := []string{ "", "\n\n\n", "a=1\n", "a=\"hello\"\n", "a=\"${hello} world\"\n", "a=upper(\"hello\")\n", "a=upper(hello)\n", "a=[1,2,3,4,five]\n", "a={greeting=hello}\n", "a={\ngreeting=hello\n}\n", "a={\ngreeting=hello}\n", "a={greeting=hello\n}\n", "a={greeting=hello,number=five,sarcastic=\"${upper(hello)}\"\n}\n", "a={\ngreeting=hello\nnumber=five\nsarcastic=\"${upper(hello)}\"\n}\n", "a=<", }, } } // Tokens is a flat list of tokens. type Tokens []*Token func (ts Tokens) Bytes() []byte { buf := &bytes.Buffer{} ts.WriteTo(buf) return buf.Bytes() } func (ts Tokens) testValue() string { return string(ts.Bytes()) } // Columns returns the number of columns (grapheme clusters) the token sequence // occupies. The result is not meaningful if there are newline or single-line // comment tokens in the sequence. func (ts Tokens) Columns() int { ret := 0 for _, token := range ts { ret += token.SpacesBefore // spaces are always worth one column each ct, _ := textseg.TokenCount(token.Bytes, textseg.ScanGraphemeClusters) ret += ct } return ret } // WriteTo takes an io.Writer and writes the bytes for each token to it, // along with the spacing that separates each token. In other words, this // allows serializing the tokens to a file or other such byte stream. func (ts Tokens) WriteTo(wr io.Writer) (int64, error) { // We know we're going to be writing a lot of small chunks of repeated // space characters, so we'll prepare a buffer of these that we can // easily pass to wr.Write without any further allocation. spaces := make([]byte, 40) for i := range spaces { spaces[i] = ' ' } var n int64 var err error for _, token := range ts { if err != nil { return n, err } for spacesBefore := token.SpacesBefore; spacesBefore > 0; spacesBefore -= len(spaces) { thisChunk := spacesBefore if thisChunk > len(spaces) { thisChunk = len(spaces) } var thisN int thisN, err = wr.Write(spaces[:thisChunk]) n += int64(thisN) if err != nil { return n, err } } var thisN int thisN, err = wr.Write(token.Bytes) n += int64(thisN) } return n, err } func (ts Tokens) walkChildNodes(w internalWalkFunc) { // Unstructured tokens have no child nodes } func (ts Tokens) BuildTokens(to Tokens) Tokens { return append(to, ts...) } // ObjectAttrTokens represents the raw tokens for the name and value of // one attribute in an object constructor expression. // // This is defined primarily for use with function TokensForObject. See // that function's documentation for more information. type ObjectAttrTokens struct { Name Tokens Value Tokens } func newIdentToken(name string) *Token { return &Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(name), } } hcl-2.14.1/integrationtest/000077500000000000000000000000001431334125700155735ustar00rootroot00000000000000hcl-2.14.1/integrationtest/convertfunc_test.go000066400000000000000000000042161431334125700215200ustar00rootroot00000000000000package integrationtest import ( "testing" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) // TestTypeConvertFunc is an integration test of all of the layers involved // in making the type conversion function from ext/typeexpr work. // // This requires co-operation between the hclsyntax package, the ext/typeexpr // package, and the underlying cty functionality in order to work correctly. // // There are unit tests for the function implementation itself in the // ext/typeexpr package, so this test is focused on making sure the function // is given the opportunity to decode the second argument as a type expression // when the function is called from HCL native syntax. func TestTypeConvertFunc(t *testing.T) { // The convert function is special because it takes a type expression // rather than a value expression as its second argument. In this case, // we're asking it to convert a tuple into a list of strings: const exprSrc = `convert(["hello"], list(string))` // It achieves this by marking that second argument as being of a custom // type (a "capsule type", in cty terminology) that has a special // annotation which hclsyntax.FunctionCallExpr understands as allowing // the type to handle the analysis of the unevaluated expression, instead // of evaluating it as normal. // // To see more details of how this works, look at the definitions of // typexpr.TypeConstraintType and typeexpr.ConvertFunc, and at the // implementation of hclsyntax.FunctionCallExpr.Value. expr, diags := hclsyntax.ParseExpression([]byte(exprSrc), "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } ctx := &hcl.EvalContext{ Functions: map[string]function.Function{ "convert": typeexpr.ConvertFunc, }, } got, diags := expr.Value(ctx) if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } want := cty.ListVal([]cty.Value{cty.StringVal("hello")}) if !want.RawEquals(got) { t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want) } } hcl-2.14.1/integrationtest/doc.go000066400000000000000000000004561431334125700166740ustar00rootroot00000000000000// Package integrationtest is an internal package that contains some // tests that attempt to exercise many HCL features together in realistic // scenarios. This is in addition to -- but not a substitute for -- unit tests // that verify the behavior of each feature separately. package integrationtest hcl-2.14.1/integrationtest/hcldec_into_expr_test.go000066400000000000000000000103641431334125700224760ustar00rootroot00000000000000package integrationtest import ( "testing" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/hashicorp/hcl/v2/hcldec" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) // TestHCLDecDecodeToExpr tests both hcldec's support for types with custom // expression decoding rules and the two expression capsule types implemented // in ext/customdecode. This mechanism requires cooperation between those // two components and cty in order to work, so it's helpful to exercise it in // an integration test. func TestHCLDecDecodeToExpr(t *testing.T) { // Here we're going to capture the structure of two simple expressions // without immediately evaluating them. const input = ` a = foo b = foo c = "hello" ` // We'll capture "a" directly as an expression, losing its evaluation // context but retaining its structure. We'll capture "b" as a // customdecode.ExpressionClosure, which gives us both the expression // itself and the evaluation context it was originally evaluated in. // We also have "c" here just to make sure we can still decode into a // "normal" type via standard expression evaluation. f, diags := hclsyntax.ParseConfig([]byte(input), "", hcl.Pos{Line: 1, Column: 1}) if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } spec := hcldec.ObjectSpec{ "a": &hcldec.AttrSpec{ Name: "a", Type: customdecode.ExpressionType, Required: true, }, "b": &hcldec.AttrSpec{ Name: "b", Type: customdecode.ExpressionClosureType, Required: true, }, "c": &hcldec.AttrSpec{ Name: "c", Type: cty.String, Required: true, }, } ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "foo": cty.StringVal("foo value"), }, } objVal, diags := hcldec.Decode(f.Body, spec, ctx) if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } aVal := objVal.GetAttr("a") bVal := objVal.GetAttr("b") cVal := objVal.GetAttr("c") if got, want := aVal.Type(), customdecode.ExpressionType; !got.Equals(want) { t.Fatalf("wrong type for 'a'\ngot: %#v\nwant: %#v", got, want) } if got, want := bVal.Type(), customdecode.ExpressionClosureType; !got.Equals(want) { t.Fatalf("wrong type for 'b'\ngot: %#v\nwant: %#v", got, want) } if got, want := cVal.Type(), cty.String; !got.Equals(want) { t.Fatalf("wrong type for 'c'\ngot: %#v\nwant: %#v", got, want) } gotAExpr := customdecode.ExpressionFromVal(aVal) wantAExpr := &hclsyntax.ScopeTraversalExpr{ Traversal: hcl.Traversal{ hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 5, Byte: 5}, End: hcl.Pos{Line: 2, Column: 8, Byte: 8}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 2, Column: 5, Byte: 5}, End: hcl.Pos{Line: 2, Column: 8, Byte: 8}, }, } if diff := cmp.Diff(wantAExpr, gotAExpr, cmpopts.IgnoreUnexported(hcl.TraverseRoot{})); diff != "" { t.Errorf("wrong expression for a\n%s", diff) } bClosure := customdecode.ExpressionClosureFromVal(bVal) gotBVal, diags := bClosure.Value() wantBVal := cty.StringVal("foo value") if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } if got, want := gotBVal, wantBVal; !want.RawEquals(got) { t.Errorf("wrong 'b' result\ngot: %#v\nwant: %#v", got, want) } if got, want := cVal, cty.StringVal("hello"); !want.RawEquals(got) { t.Errorf("wrong 'c'\ngot: %#v\nwant: %#v", got, want) } // One additional "trick" we can do with the expression closure is to // evaluate the expression in a _derived_ EvalContext, rather than the // captured one. This could be useful for introducing additional local // variables/functions in a particular context, for example. deriveCtx := bClosure.EvalContext.NewChild() deriveCtx.Variables = map[string]cty.Value{ "foo": cty.StringVal("overridden foo value"), } gotBVal2, diags := bClosure.Expression.Value(deriveCtx) wantBVal2 := cty.StringVal("overridden foo value") if diags.HasErrors() { t.Fatalf("unexpected problems: %s", diags.Error()) } if got, want := gotBVal2, wantBVal2; !want.RawEquals(got) { t.Errorf("wrong 'b' result with derived EvalContext\ngot: %#v\nwant: %#v", got, want) } } hcl-2.14.1/integrationtest/terraformlike_test.go000066400000000000000000000275101431334125700220340ustar00rootroot00000000000000package integrationtest import ( "reflect" "sort" "testing" "github.com/davecgh/go-spew/spew" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/dynblock" "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2/hcldec" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2/json" "github.com/zclconf/go-cty/cty" ) // TestTerraformLike parses both a native syntax and a JSON representation // of the same HashiCorp Terraform-like configuration structure and then makes // assertions against the result of each. // // Terraform exercises a lot of different HCL codepaths, so this is not // exhaustive but tries to cover a variety of different relevant scenarios. func TestTerraformLike(t *testing.T) { tests := map[string]func() (*hcl.File, hcl.Diagnostics){ "native syntax": func() (*hcl.File, hcl.Diagnostics) { return hclsyntax.ParseConfig( []byte(terraformLikeNativeSyntax), "config.tf", hcl.Pos{Line: 1, Column: 1}, ) }, "JSON": func() (*hcl.File, hcl.Diagnostics) { return json.Parse( []byte(terraformLikeJSON), "config.tf.json", ) }, } type Variable struct { Name string `hcl:"name,label"` } type Resource struct { Type string `hcl:"type,label"` Name string `hcl:"name,label"` Config hcl.Body `hcl:",remain"` DependsOn hcl.Expression `hcl:"depends_on,attr"` } type Module struct { Name string `hcl:"name,label"` Providers hcl.Expression `hcl:"providers"` } type Root struct { Variables []*Variable `hcl:"variable,block"` Resources []*Resource `hcl:"resource,block"` Modules []*Module `hcl:"module,block"` } instanceDecode := &hcldec.ObjectSpec{ "image_id": &hcldec.AttrSpec{ Name: "image_id", Required: true, Type: cty.String, }, "instance_type": &hcldec.AttrSpec{ Name: "instance_type", Required: true, Type: cty.String, }, "tags": &hcldec.AttrSpec{ Name: "tags", Required: false, Type: cty.Map(cty.String), }, } securityGroupDecode := &hcldec.ObjectSpec{ "ingress": &hcldec.BlockListSpec{ TypeName: "ingress", Nested: &hcldec.ObjectSpec{ "cidr_block": &hcldec.AttrSpec{ Name: "cidr_block", Required: true, Type: cty.String, }, }, }, } for name, loadFunc := range tests { t.Run(name, func(t *testing.T) { file, diags := loadFunc() if len(diags) != 0 { t.Errorf("unexpected diagnostics during parse") for _, diag := range diags { t.Logf("- %s", diag) } return } body := file.Body var root Root diags = gohcl.DecodeBody(body, nil, &root) if len(diags) != 0 { t.Errorf("unexpected diagnostics during root eval") for _, diag := range diags { t.Logf("- %s", diag) } return } wantVars := []*Variable{ { Name: "image_id", }, } if gotVars := root.Variables; !reflect.DeepEqual(gotVars, wantVars) { t.Errorf("wrong Variables\ngot: %swant: %s", spew.Sdump(gotVars), spew.Sdump(wantVars)) } if got, want := len(root.Resources), 3; got != want { t.Fatalf("wrong number of Resources %d; want %d", got, want) } sort.Slice(root.Resources, func(i, j int) bool { return root.Resources[i].Name < root.Resources[j].Name }) t.Run("resource 0", func(t *testing.T) { r := root.Resources[0] if got, want := r.Type, "happycloud_security_group"; got != want { t.Errorf("wrong type %q; want %q", got, want) } if got, want := r.Name, "private"; got != want { t.Errorf("wrong type %q; want %q", got, want) } // For this one we're including support for the dynamic block // extension, since Terraform uses this to allow dynamic // generation of blocks within resource configuration. forEachCtx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "extra_private_cidr_blocks": cty.ListVal([]cty.Value{ cty.StringVal("172.16.0.0/12"), cty.StringVal("169.254.0.0/16"), }), }), }, } dynBody := dynblock.Expand(r.Config, forEachCtx) cfg, diags := hcldec.Decode(dynBody, securityGroupDecode, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics decoding Config") for _, diag := range diags { t.Logf("- %s", diag) } return } wantCfg := cty.ObjectVal(map[string]cty.Value{ "ingress": cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "cidr_block": cty.StringVal("10.0.0.0/8"), }), cty.ObjectVal(map[string]cty.Value{ "cidr_block": cty.StringVal("192.168.0.0/16"), }), cty.ObjectVal(map[string]cty.Value{ "cidr_block": cty.StringVal("172.16.0.0/12"), }), cty.ObjectVal(map[string]cty.Value{ "cidr_block": cty.StringVal("169.254.0.0/16"), }), }), }) if !cfg.RawEquals(wantCfg) { t.Errorf("wrong config\ngot: %#v\nwant: %#v", cfg, wantCfg) } }) t.Run("resource 1", func(t *testing.T) { r := root.Resources[1] if got, want := r.Type, "happycloud_security_group"; got != want { t.Errorf("wrong type %q; want %q", got, want) } if got, want := r.Name, "public"; got != want { t.Errorf("wrong type %q; want %q", got, want) } cfg, diags := hcldec.Decode(r.Config, securityGroupDecode, nil) if len(diags) != 0 { t.Errorf("unexpected diagnostics decoding Config") for _, diag := range diags { t.Logf("- %s", diag) } return } wantCfg := cty.ObjectVal(map[string]cty.Value{ "ingress": cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "cidr_block": cty.StringVal("0.0.0.0/0"), }), }), }) if !cfg.RawEquals(wantCfg) { t.Errorf("wrong config\ngot: %#v\nwant: %#v", cfg, wantCfg) } }) t.Run("resource 2", func(t *testing.T) { r := root.Resources[2] if got, want := r.Type, "happycloud_instance"; got != want { t.Errorf("wrong type %q; want %q", got, want) } if got, want := r.Name, "test"; got != want { t.Errorf("wrong type %q; want %q", got, want) } vars := hcldec.Variables(r.Config, &hcldec.AttrSpec{ Name: "image_id", Type: cty.String, }) if got, want := len(vars), 1; got != want { t.Errorf("wrong number of variables in image_id %#v; want %#v", got, want) } if got, want := vars[0].RootName(), "var"; got != want { t.Errorf("wrong image_id variable RootName %#v; want %#v", got, want) } ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ "image_id": cty.StringVal("image-1234"), }), }, } cfg, diags := hcldec.Decode(r.Config, instanceDecode, ctx) if len(diags) != 0 { t.Errorf("unexpected diagnostics decoding Config") for _, diag := range diags { t.Logf("- %s", diag) } return } wantCfg := cty.ObjectVal(map[string]cty.Value{ "instance_type": cty.StringVal("z3.weedy"), "image_id": cty.StringVal("image-1234"), "tags": cty.MapVal(map[string]cty.Value{ "Name": cty.StringVal("foo"), "Environment": cty.StringVal("prod"), }), }) if !cfg.RawEquals(wantCfg) { t.Errorf("wrong config\ngot: %#v\nwant: %#v", cfg, wantCfg) } exprs, diags := hcl.ExprList(r.DependsOn) if len(diags) != 0 { t.Errorf("unexpected diagnostics extracting depends_on") for _, diag := range diags { t.Logf("- %s", diag) } return } if got, want := len(exprs), 1; got != want { t.Errorf("wrong number of depends_on exprs %#v; want %#v", got, want) } traversal, diags := hcl.AbsTraversalForExpr(exprs[0]) if len(diags) != 0 { t.Errorf("unexpected diagnostics decoding depends_on[0]") for _, diag := range diags { t.Logf("- %s", diag) } return } if got, want := len(traversal), 2; got != want { t.Errorf("wrong number of depends_on traversal steps %#v; want %#v", got, want) } if got, want := traversal.RootName(), "happycloud_security_group"; got != want { t.Errorf("wrong depends_on traversal RootName %#v; want %#v", got, want) } }) t.Run("module", func(t *testing.T) { if got, want := len(root.Modules), 1; got != want { t.Fatalf("wrong number of Modules %d; want %d", got, want) } mod := root.Modules[0] if got, want := mod.Name, "foo"; got != want { t.Errorf("wrong module name %q; want %q", got, want) } pExpr := mod.Providers pairs, diags := hcl.ExprMap(pExpr) if len(diags) != 0 { t.Errorf("unexpected diagnostics extracting providers") for _, diag := range diags { t.Logf("- %s", diag) } } if got, want := len(pairs), 1; got != want { t.Fatalf("wrong number of key/value pairs in providers %d; want %d", got, want) } pair := pairs[0] kt, diags := hcl.AbsTraversalForExpr(pair.Key) if len(diags) != 0 { t.Errorf("unexpected diagnostics extracting providers key %#v", pair.Key) for _, diag := range diags { t.Logf("- %s", diag) } } vt, diags := hcl.AbsTraversalForExpr(pair.Value) if len(diags) != 0 { t.Errorf("unexpected diagnostics extracting providers value %#v", pair.Value) for _, diag := range diags { t.Logf("- %s", diag) } } if got, want := len(kt), 1; got != want { t.Fatalf("wrong number of key traversal steps %d; want %d", got, want) } if got, want := len(vt), 2; got != want { t.Fatalf("wrong number of value traversal steps %d; want %d", got, want) } if got, want := kt.RootName(), "null"; got != want { t.Errorf("wrong number key traversal root %s; want %s", got, want) } if got, want := vt.RootName(), "null"; got != want { t.Errorf("wrong number value traversal root %s; want %s", got, want) } if at, ok := vt[1].(hcl.TraverseAttr); ok { if got, want := at.Name, "foo"; got != want { t.Errorf("wrong number value traversal attribute name %s; want %s", got, want) } } else { t.Errorf("wrong value traversal [1] type %T; want hcl.TraverseAttr", vt[1]) } }) }) } } const terraformLikeNativeSyntax = ` variable "image_id" { } resource "happycloud_instance" "test" { instance_type = "z3.weedy" image_id = var.image_id tags = { "Name" = "foo" "${"Environment"}" = "prod" } depends_on = [ happycloud_security_group.public, ] } resource "happycloud_security_group" "public" { ingress { cidr_block = "0.0.0.0/0" } } resource "happycloud_security_group" "private" { ingress { cidr_block = "10.0.0.0/8" } ingress { cidr_block = "192.168.0.0/16" } dynamic "ingress" { for_each = var.extra_private_cidr_blocks content { cidr_block = ingress.value } } } module "foo" { providers = { null = null.foo } } ` const terraformLikeJSON = ` { "variable": { "image_id": {} }, "resource": { "happycloud_instance": { "test": { "instance_type": "z3.weedy", "image_id": "${var.image_id}", "tags": { "Name": "foo", "${\"Environment\"}": "prod" }, "depends_on": [ "happycloud_security_group.public" ] } }, "happycloud_security_group": { "public": { "ingress": { "cidr_block": "0.0.0.0/0" } }, "private": { "ingress": [ { "cidr_block": "10.0.0.0/8" }, { "cidr_block": "192.168.0.0/16" } ], "dynamic": { "ingress": { "for_each": "${var.extra_private_cidr_blocks}", "iterator": "block", "content": { "cidr_block": "${block.value}" } } } } } }, "module": { "foo": { "providers": { "null": "null.foo" } } } } ` hcl-2.14.1/json/000077500000000000000000000000001431334125700133215ustar00rootroot00000000000000hcl-2.14.1/json/ast.go000066400000000000000000000041131431334125700144360ustar00rootroot00000000000000package json import ( "math/big" "github.com/hashicorp/hcl/v2" ) type node interface { Range() hcl.Range StartRange() hcl.Range } type objectVal struct { Attrs []*objectAttr SrcRange hcl.Range // range of the entire object, brace-to-brace OpenRange hcl.Range // range of the opening brace CloseRange hcl.Range // range of the closing brace } func (n *objectVal) Range() hcl.Range { return n.SrcRange } func (n *objectVal) StartRange() hcl.Range { return n.OpenRange } type objectAttr struct { Name string Value node NameRange hcl.Range // range of the name string } func (n *objectAttr) Range() hcl.Range { return n.NameRange } func (n *objectAttr) StartRange() hcl.Range { return n.NameRange } type arrayVal struct { Values []node SrcRange hcl.Range // range of the entire object, bracket-to-bracket OpenRange hcl.Range // range of the opening bracket } func (n *arrayVal) Range() hcl.Range { return n.SrcRange } func (n *arrayVal) StartRange() hcl.Range { return n.OpenRange } type booleanVal struct { Value bool SrcRange hcl.Range } func (n *booleanVal) Range() hcl.Range { return n.SrcRange } func (n *booleanVal) StartRange() hcl.Range { return n.SrcRange } type numberVal struct { Value *big.Float SrcRange hcl.Range } func (n *numberVal) Range() hcl.Range { return n.SrcRange } func (n *numberVal) StartRange() hcl.Range { return n.SrcRange } type stringVal struct { Value string SrcRange hcl.Range } func (n *stringVal) Range() hcl.Range { return n.SrcRange } func (n *stringVal) StartRange() hcl.Range { return n.SrcRange } type nullVal struct { SrcRange hcl.Range } func (n *nullVal) Range() hcl.Range { return n.SrcRange } func (n *nullVal) StartRange() hcl.Range { return n.SrcRange } // invalidVal is used as a placeholder where a value is needed for a valid // parse tree but the input was invalid enough to prevent one from being // created. type invalidVal struct { SrcRange hcl.Range } func (n invalidVal) Range() hcl.Range { return n.SrcRange } func (n invalidVal) StartRange() hcl.Range { return n.SrcRange } hcl-2.14.1/json/didyoumean.go000066400000000000000000000021451431334125700160100ustar00rootroot00000000000000package json import ( "github.com/agext/levenshtein" ) var keywords = []string{"false", "true", "null"} // keywordSuggestion tries to find a valid JSON keyword that is close to the // given string and returns it if found. If no keyword is close enough, returns // the empty string. func keywordSuggestion(given string) string { return nameSuggestion(given, keywords) } // nameSuggestion tries to find a name from the given slice of suggested names // that is close to the given name and returns it if found. If no suggestion // is close enough, returns the empty string. // // The suggestions are tried in order, so earlier suggestions take precedence // if the given string is similar to two or more suggestions. // // This function is intended to be used with a relatively-small number of // suggestions. It's not optimized for hundreds or thousands of them. func nameSuggestion(given string, suggestions []string) string { for _, suggestion := range suggestions { dist := levenshtein.Distance(given, suggestion, nil) if dist < 3 { // threshold determined experimentally return suggestion } } return "" } hcl-2.14.1/json/didyoumean_test.go000066400000000000000000000015331431334125700170470ustar00rootroot00000000000000package json import "testing" func TestKeywordSuggestion(t *testing.T) { tests := []struct { Input, Want string }{ {"true", "true"}, {"false", "false"}, {"null", "null"}, {"bananas", ""}, {"NaN", ""}, {"Inf", ""}, {"Infinity", ""}, {"void", ""}, {"undefined", ""}, {"ture", "true"}, {"tru", "true"}, {"tre", "true"}, {"treu", "true"}, {"rtue", "true"}, {"flase", "false"}, {"fales", "false"}, {"flse", "false"}, {"fasle", "false"}, {"fasel", "false"}, {"flue", "false"}, {"nil", "null"}, {"nul", "null"}, {"unll", "null"}, {"nll", "null"}, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { got := keywordSuggestion(test.Input) if got != test.Want { t.Errorf( "wrong result\ninput: %q\ngot: %q\nwant: %q", test.Input, got, test.Want, ) } }) } } hcl-2.14.1/json/doc.go000066400000000000000000000011131431334125700144110ustar00rootroot00000000000000// Package json is the JSON parser for HCL. It parses JSON files and returns // implementations of the core HCL structural interfaces in terms of the // JSON data inside. // // This is not a generic JSON parser. Instead, it deals with the mapping from // the JSON information model to the HCL information model, using a number // of hard-coded structural conventions. // // In most cases applications will not import this package directly, but will // instead access its functionality indirectly through functions in the main // "hcl" package and in the "hclparse" package. package json hcl-2.14.1/json/fuzz/000077500000000000000000000000001431334125700143175ustar00rootroot00000000000000hcl-2.14.1/json/fuzz/.gitignore000066400000000000000000000000171431334125700163050ustar00rootroot00000000000000fuzz*-fuzz.zip hcl-2.14.1/json/fuzz/Makefile000066400000000000000000000011521431334125700157560ustar00rootroot00000000000000 ifndef FUZZ_WORK_DIR $(error FUZZ_WORK_DIR is not set) endif default: @echo "See README.md for usage instructions" fuzz-config: fuzz-exec-config fuzz-exec-%: fuzz%-fuzz.zip go-fuzz -bin=./fuzz$*-fuzz.zip -workdir=$(FUZZ_WORK_DIR) fuzz%-fuzz.zip: %/fuzz.go go-fuzz-build github.com/hashicorp/hcl/v2/json/fuzz/$* tools: go get -u github.com/dvyukov/go-fuzz/go-fuzz go get -u github.com/dvyukov/go-fuzz/go-fuzz-build clean: rm fuzz*-fuzz.zip .PHONY: tools clean fuzz-config fuzz-expr fuzz-template fuzz-traversal .PRECIOUS: fuzzconfig-fuzz.zip fuzzexpr-fuzz.zip fuzztemplate-fuzz.zip fuzztraversal-fuzz.zip hcl-2.14.1/json/fuzz/README.md000066400000000000000000000033261431334125700156020ustar00rootroot00000000000000# JSON syntax fuzzing utilities This directory contains helper functions and corpora that can be used to fuzz-test the HCL JSON parser using Go's native fuzz testing capabilities. Please see https://go.dev/doc/fuzz/ for more information on fuzzing. ## Prerequisites * Go 1.18 ## Running the fuzzer Each exported function in the `json` package has a corresponding fuzz test. These can be run one at a time via `go test`: ``` $ cd fuzz $ go test -fuzz FuzzParse ``` This command will exit only when a crasher is found (see "Understanding the result" below). ## Seed corpus The seed corpus for each fuzz test function is stored in the corresponding directory under `json/fuzz/testdata/fuzz`. For example: ``` $ ls json/fuzz/testdata/fuzz/FuzzParse attr-expr.hcl.json attr-literal.hcl.json block-attrs.hcl.json ... ``` Additional seed inputs can be added to this corpus. Each file must be in the Go 1.18 corpus file format. Files can be converted to this format using the `file2fuzz` tool. To install it: ``` $ go install golang.org/x/tools/cmd/file2fuzz@latest $ file2fuzz -help ``` ## Understanding the result A small number of subdirectories will be created in the work directory. If you let `go-fuzz` run for a few minutes (the more minutes the better) it may detect "crashers", which are inputs that caused the parser to panic. These are written to `json/fuzz/testdata/fuzz//`: ``` $ ls json/fuzz/testdata/fuzz/FuzzParseTemplate 582528ddfad69eb57775199a43e0f9fd5c94bba343ce7bb6724d4ebafe311ed4 ``` A good first step to fixing a detected crasher is to copy the failing input into one of the unit tests in the `json` package and see it crash there too. After that, it's easy to re-run the test as you try to fix it. hcl-2.14.1/json/fuzz/fuzz_test.go000066400000000000000000000005351431334125700167060ustar00rootroot00000000000000package fuzzjson import ( "testing" "github.com/hashicorp/hcl/v2/json" ) func FuzzParse(f *testing.F) { f.Fuzz(func(t *testing.T, data []byte) { _, diags := json.Parse(data, "") if diags.HasErrors() { t.Logf("Error when parsing JSON %v", data) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } }) } hcl-2.14.1/json/fuzz/testdata/000077500000000000000000000000001431334125700161305ustar00rootroot00000000000000hcl-2.14.1/json/fuzz/testdata/fuzz/000077500000000000000000000000001431334125700171265ustar00rootroot00000000000000hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/000077500000000000000000000000001431334125700210575ustar00rootroot00000000000000hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/attr-expr.hcl.json000066400000000000000000000001111431334125700244360ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"foo\": \"${upper(bar + baz[1])}\"\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/attr-literal.hcl.json000066400000000000000000000000661431334125700251250ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"foo\": \"bar\"\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/block-attrs.hcl.json000066400000000000000000000001101431334125700247340ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"block\": {\n \"foo\": true\n }\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/block-empty.json000066400000000000000000000000631431334125700241770ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"block\": {}\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/block-nested.hcl.json000066400000000000000000000001561431334125700250730ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"block\": {\n \"another_block\": {\n \"foo\": \"bar\"\n }\n }\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/empty.hcl.json000066400000000000000000000000361431334125700236540ustar00rootroot00000000000000go test fuzz v1 []byte("{}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/list-empty.json000066400000000000000000000000631431334125700240600ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"hello\": []\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/list-nested.json000066400000000000000000000000651431334125700242060ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"hello\": [[]]\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/list-values.json000066400000000000000000000001511431334125700242170ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"hello\": [\n \"hello\",\n true,\n 1.2\n ]\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/number-big.hcl.json000066400000000000000000000000721431334125700245450ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"foo\": 1.234234e30\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/number-int.hcl.json000066400000000000000000000000631431334125700245760ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"foo\": 1024\n}\n")hcl-2.14.1/json/fuzz/testdata/fuzz/FuzzParse/utf8.hcl.json000066400000000000000000000001151431334125700234020ustar00rootroot00000000000000go test fuzz v1 []byte("{\n \"foo\": \"föo ${föo(\\\"föo\\\")}\"\n}\n")hcl-2.14.1/json/is.go000066400000000000000000000045531431334125700142720ustar00rootroot00000000000000package json import ( "github.com/hashicorp/hcl/v2" ) // IsJSONExpression returns true if and only if the given expression is one // that originated in a JSON document. // // Applications aiming to be syntax-agnostic should not use this function and // should instead use the normal expression evaluation or static analysis // APIs. // // However, JSON expressions do have a unique behavior whereby they interpret // the source JSON differently depending on the hcl.EvalContext value passed // to the Value method -- in particular, a nil hcl.EvalContext returns // literal strings rather than interpreting them as HCL template syntax -- // and so in exceptional cases an application may wish to rely on that behavior // in situations where it specifically knows the expression originated in JSON, // in case it needs to do some non-standard handling of the expression in that // case. // // Caution: The normal HCL API allows for HCL expression implementations that // wrap other HCL expression implementations. This function will return false // if given an expression of some other type that encapsulates a JSON // expression, even if the wrapper implementation would in principle preserve // the special evaluation behavior of the wrapped expression. func IsJSONExpression(maybeJSONExpr hcl.Expression) bool { _, ok := maybeJSONExpr.(*expression) return ok } // IsJSONBody returns true if and only if the given body is one that originated // in a JSON document. // // Applications aiming to be syntax-agnostic should not use this function and // should instead use the normal schema-driven or "just attributes' decoding // APIs. // // Howeer, JSON expressions do have a unique behavior whereby various different // source JSON shapes can be interpreted in different ways depending on the // given schema, and so in exceptional cases an application may need to // perform some deeper analysis first in order to distinguish variants of // different physical structure. // // Caution: The normal HCL API allows for HCL body implementations that wrap // other HCL body implementations. This function will return false if given an // expression of some other type that encapsulates a JSON body, even if // the wrapper implementation would in principle preserve the special // decoding behavior of the wrapped body. func IsJSONBody(maybeJSONBody hcl.Body) bool { _, ok := maybeJSONBody.(*body) return ok } hcl-2.14.1/json/navigation.go000066400000000000000000000025511431334125700160120ustar00rootroot00000000000000package json import ( "fmt" "strings" ) type navigation struct { root node } // Implementation of hcled.ContextString func (n navigation) ContextString(offset int) string { steps := navigationStepsRev(n.root, offset) if steps == nil { return "" } // We built our slice backwards, so we'll reverse it in-place now. half := len(steps) / 2 // integer division for i := 0; i < half; i++ { steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i] } ret := strings.Join(steps, "") if len(ret) > 0 && ret[0] == '.' { ret = ret[1:] } return ret } func navigationStepsRev(v node, offset int) []string { switch tv := v.(type) { case *objectVal: // Do any of our properties have an object that contains the target // offset? for _, attr := range tv.Attrs { k := attr.Name av := attr.Value switch av.(type) { case *objectVal, *arrayVal: // okay default: continue } if av.Range().ContainsOffset(offset) { return append(navigationStepsRev(av, offset), "."+k) } } case *arrayVal: // Do any of our elements contain the target offset? for i, elem := range tv.Values { switch elem.(type) { case *objectVal, *arrayVal: // okay default: continue } if elem.Range().ContainsOffset(offset) { return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i)) } } } return nil } hcl-2.14.1/json/navigation_test.go000066400000000000000000000017151431334125700170520ustar00rootroot00000000000000package json import ( "fmt" "strconv" "testing" ) func TestNavigationContextString(t *testing.T) { src := ` { "version": 1, "resource": { "null_resource": { "baz": { "id": "foo" }, "boz": [ { "ov": { } } ] } } } ` file, diags := Parse([]byte(src), "test.json") if len(diags) != 0 { fmt.Printf("offset %d\n", diags[0].Subject.Start.Byte) t.Errorf("Unexpected diagnostics: %s", diags) } if file == nil { t.Fatalf("Got nil file") } nav := file.Nav.(navigation) tests := []struct { Offset int Want string }{ {0, ``}, {8, ``}, {36, `resource`}, {60, `resource.null_resource`}, {89, `resource.null_resource.baz`}, {141, `resource.null_resource.boz`}, } for _, test := range tests { t.Run(strconv.Itoa(test.Offset), func(t *testing.T) { got := nav.ContextString(test.Offset) if got != test.Want { t.Errorf("wrong result\ngot: %s\nwant: %s", got, test.Want) } }) } } hcl-2.14.1/json/parser.go000066400000000000000000000312351431334125700151500ustar00rootroot00000000000000package json import ( "encoding/json" "fmt" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func parseFileContent(buf []byte, filename string, start hcl.Pos) (node, hcl.Diagnostics) { tokens := scan(buf, pos{Filename: filename, Pos: start}) p := newPeeker(tokens) node, diags := parseValue(p) if len(diags) == 0 && p.Peek().Type != tokenEOF { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous data after value", Detail: "Extra characters appear after the JSON value.", Subject: p.Peek().Range.Ptr(), }) } return node, diags } func parseExpression(buf []byte, filename string, start hcl.Pos) (node, hcl.Diagnostics) { tokens := scan(buf, pos{Filename: filename, Pos: start}) p := newPeeker(tokens) node, diags := parseValue(p) if len(diags) == 0 && p.Peek().Type != tokenEOF { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous data after value", Detail: "Extra characters appear after the JSON value.", Subject: p.Peek().Range.Ptr(), }) } return node, diags } func parseValue(p *peeker) (node, hcl.Diagnostics) { tok := p.Peek() wrapInvalid := func(n node, diags hcl.Diagnostics) (node, hcl.Diagnostics) { if n != nil { return n, diags } return invalidVal{tok.Range}, diags } switch tok.Type { case tokenBraceO: return wrapInvalid(parseObject(p)) case tokenBrackO: return wrapInvalid(parseArray(p)) case tokenNumber: return wrapInvalid(parseNumber(p)) case tokenString: return wrapInvalid(parseString(p)) case tokenKeyword: return wrapInvalid(parseKeyword(p)) case tokenBraceC: return wrapInvalid(nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Missing JSON value", Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.", Subject: &tok.Range, }, }) case tokenBrackC: return wrapInvalid(nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Missing array element value", Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.", Subject: &tok.Range, }, }) case tokenEOF: return wrapInvalid(nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Missing value", Detail: "The JSON data ends prematurely.", Subject: &tok.Range, }, }) default: return wrapInvalid(nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid start of value", Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.", Subject: &tok.Range, }, }) } } func tokenCanStartValue(tok token) bool { switch tok.Type { case tokenBraceO, tokenBrackO, tokenNumber, tokenString, tokenKeyword: return true default: return false } } func parseObject(p *peeker) (node, hcl.Diagnostics) { var diags hcl.Diagnostics open := p.Read() attrs := []*objectAttr{} // recover is used to shift the peeker to what seems to be the end of // our object, so that when we encounter an error we leave the peeker // at a reasonable point in the token stream to continue parsing. recover := func(tok token) { open := 1 for { switch tok.Type { case tokenBraceO: open++ case tokenBraceC: open-- if open <= 1 { return } case tokenEOF: // Ran out of source before we were able to recover, // so we'll bail here and let the caller deal with it. return } tok = p.Read() } } Token: for { if p.Peek().Type == tokenBraceC { break Token } keyNode, keyDiags := parseValue(p) diags = diags.Extend(keyDiags) if keyNode == nil { return nil, diags } keyStrNode, ok := keyNode.(*stringVal) if !ok { return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid object property name", Detail: "A JSON object property name must be a string", Subject: keyNode.StartRange().Ptr(), }) } key := keyStrNode.Value colon := p.Read() if colon.Type != tokenColon { recover(colon) if colon.Type == tokenBraceC || colon.Type == tokenComma { // Catch common mistake of using braces instead of brackets // for an object. return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing object value", Detail: "A JSON object attribute must have a value, introduced by a colon.", Subject: &colon.Range, }) } if colon.Type == tokenEquals { // Possible confusion with native HCL syntax. return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing property value colon", Detail: "JSON uses a colon as its name/value delimiter, not an equals sign.", Subject: &colon.Range, }) } return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing property value colon", Detail: "A colon must appear between an object property's name and its value.", Subject: &colon.Range, }) } valNode, valDiags := parseValue(p) diags = diags.Extend(valDiags) if valNode == nil { return nil, diags } attrs = append(attrs, &objectAttr{ Name: key, Value: valNode, NameRange: keyStrNode.SrcRange, }) switch p.Peek().Type { case tokenComma: comma := p.Read() if p.Peek().Type == tokenBraceC { // Special error message for this common mistake return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Trailing comma in object", Detail: "JSON does not permit a trailing comma after the final property in an object.", Subject: &comma.Range, }) } continue Token case tokenEOF: return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed object", Detail: "No closing brace was found for this JSON object.", Subject: &open.Range, }) case tokenBrackC: // Consume the bracket anyway, so that we don't return with the peeker // at a strange place. p.Read() return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Mismatched braces", Detail: "A JSON object must be closed with a brace, not a bracket.", Subject: p.Peek().Range.Ptr(), }) case tokenBraceC: break Token default: recover(p.Read()) return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing attribute seperator comma", Detail: "A comma must appear between each property definition in an object.", Subject: p.Peek().Range.Ptr(), }) } } close := p.Read() return &objectVal{ Attrs: attrs, SrcRange: hcl.RangeBetween(open.Range, close.Range), OpenRange: open.Range, CloseRange: close.Range, }, diags } func parseArray(p *peeker) (node, hcl.Diagnostics) { var diags hcl.Diagnostics open := p.Read() vals := []node{} // recover is used to shift the peeker to what seems to be the end of // our array, so that when we encounter an error we leave the peeker // at a reasonable point in the token stream to continue parsing. recover := func(tok token) { open := 1 for { switch tok.Type { case tokenBrackO: open++ case tokenBrackC: open-- if open <= 1 { return } case tokenEOF: // Ran out of source before we were able to recover, // so we'll bail here and let the caller deal with it. return } tok = p.Read() } } Token: for { if p.Peek().Type == tokenBrackC { break Token } valNode, valDiags := parseValue(p) diags = diags.Extend(valDiags) if valNode == nil { return nil, diags } vals = append(vals, valNode) switch p.Peek().Type { case tokenComma: comma := p.Read() if p.Peek().Type == tokenBrackC { // Special error message for this common mistake return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Trailing comma in array", Detail: "JSON does not permit a trailing comma after the final value in an array.", Subject: &comma.Range, }) } continue Token case tokenColon: recover(p.Read()) return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid array value", Detail: "A colon is not used to introduce values in a JSON array.", Subject: p.Peek().Range.Ptr(), }) case tokenEOF: recover(p.Read()) return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Unclosed object", Detail: "No closing bracket was found for this JSON array.", Subject: &open.Range, }) case tokenBraceC: recover(p.Read()) return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Mismatched brackets", Detail: "A JSON array must be closed with a bracket, not a brace.", Subject: p.Peek().Range.Ptr(), }) case tokenBrackC: break Token default: recover(p.Read()) return nil, diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing attribute seperator comma", Detail: "A comma must appear between each value in an array.", Subject: p.Peek().Range.Ptr(), }) } } close := p.Read() return &arrayVal{ Values: vals, SrcRange: hcl.RangeBetween(open.Range, close.Range), OpenRange: open.Range, }, diags } func parseNumber(p *peeker) (node, hcl.Diagnostics) { tok := p.Read() // Use encoding/json to validate the number syntax. // TODO: Do this more directly to produce better diagnostics. var num json.Number err := json.Unmarshal(tok.Bytes, &num) if err != nil { return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid JSON number", Detail: fmt.Sprintf("There is a syntax error in the given JSON number."), Subject: &tok.Range, }, } } // We want to guarantee that we parse numbers the same way as cty (and thus // native syntax HCL) would here, so we'll use the cty parser even though // in most other cases we don't actually introduce cty concepts until // decoding time. We'll unwrap the parsed float immediately afterwards, so // the cty value is just a temporary helper. nv, err := cty.ParseNumberVal(string(num)) if err != nil { // Should never happen if above passed, since JSON numbers are a subset // of what cty can parse... return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid JSON number", Detail: fmt.Sprintf("There is a syntax error in the given JSON number."), Subject: &tok.Range, }, } } return &numberVal{ Value: nv.AsBigFloat(), SrcRange: tok.Range, }, nil } func parseString(p *peeker) (node, hcl.Diagnostics) { tok := p.Read() var str string err := json.Unmarshal(tok.Bytes, &str) if err != nil { var errRange hcl.Range if serr, ok := err.(*json.SyntaxError); ok { errOfs := serr.Offset errPos := tok.Range.Start errPos.Byte += int(errOfs) // TODO: Use the byte offset to properly count unicode // characters for the column, and mark the whole of the // character that was wrong as part of our range. errPos.Column += int(errOfs) errEndPos := errPos errEndPos.Byte++ errEndPos.Column++ errRange = hcl.Range{ Filename: tok.Range.Filename, Start: errPos, End: errEndPos, } } else { errRange = tok.Range } var contextRange *hcl.Range if errRange != tok.Range { contextRange = &tok.Range } // FIXME: Eventually we should parse strings directly here so // we can produce a more useful error message in the face fo things // such as invalid escapes, etc. return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid JSON string", Detail: fmt.Sprintf("There is a syntax error in the given JSON string."), Subject: &errRange, Context: contextRange, }, } } return &stringVal{ Value: str, SrcRange: tok.Range, }, nil } func parseKeyword(p *peeker) (node, hcl.Diagnostics) { tok := p.Read() s := string(tok.Bytes) switch s { case "true": return &booleanVal{ Value: true, SrcRange: tok.Range, }, nil case "false": return &booleanVal{ Value: false, SrcRange: tok.Range, }, nil case "null": return &nullVal{ SrcRange: tok.Range, }, nil case "undefined", "NaN", "Infinity": return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid JSON keyword", Detail: fmt.Sprintf("The JavaScript identifier %q cannot be used in JSON.", s), Subject: &tok.Range, }, } default: var dym string if suggest := keywordSuggestion(s); suggest != "" { dym = fmt.Sprintf(" Did you mean %q?", suggest) } return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Invalid JSON keyword", Detail: fmt.Sprintf("%q is not a valid JSON keyword.%s", s, dym), Subject: &tok.Range, }, } } } hcl-2.14.1/json/parser_test.go000066400000000000000000000341711431334125700162110ustar00rootroot00000000000000package json import ( "math/big" "testing" "github.com/go-test/deep" "github.com/hashicorp/hcl/v2" ) func init() { deep.MaxDepth = 999 } func TestParse(t *testing.T) { tests := []struct { Input string Want node DiagCount int }{ // Simple, single-token constructs { `true`, &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 5, Byte: 4}, }, }, 0, }, { `false`, &booleanVal{ Value: false, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, 0, }, { `null`, &nullVal{ SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 5, Byte: 4}, }, }, 0, }, { `undefined`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 10, Byte: 9}, }}, 1, }, { `flase`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }}, 1, }, { `"hello"`, &stringVal{ Value: "hello", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, }, }, 0, }, { `"hello\nworld"`, &stringVal{ Value: "hello\nworld", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, 0, }, { `"hello \"world\""`, &stringVal{ Value: `hello "world"`, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 18, Byte: 17}, }, }, 0, }, { `"hello \\"`, &stringVal{ Value: "hello \\", SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 11, Byte: 10}, }, }, 0, }, { `"hello`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }}, 1, }, { `"he\llo"`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }}, 1, }, { `1`, &numberVal{ Value: mustBigFloat("1"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `1.2`, &numberVal{ Value: mustBigFloat("1.2"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, }, 0, }, { `-1`, &numberVal{ Value: mustBigFloat("-1"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }, }, 0, }, { `1.2e5`, &numberVal{ Value: mustBigFloat("120000"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, 0, }, { `1.2e+5`, &numberVal{ Value: mustBigFloat("120000"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, 0, }, { `1.2e-5`, &numberVal{ Value: mustBigFloat("1.2e-5"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, }, 0, }, { `.1`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }}, 1, }, { `+2`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }}, 1, }, { `1 2`, &numberVal{ Value: mustBigFloat("1"), SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 1, }, // Objects { `{"hello": true}`, &objectVal{ Attrs: []*objectAttr{ { Name: "hello", Value: &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 11, Byte: 10}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, CloseRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, }, 0, }, { `{"hello": true, "bye": false}`, &objectVal{ Attrs: []*objectAttr{ { Name: "hello", Value: &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 11, Byte: 10}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, { Name: "bye", Value: &booleanVal{ Value: false, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 24, Byte: 23}, End: hcl.Pos{Line: 1, Column: 29, Byte: 28}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 17, Byte: 16}, End: hcl.Pos{Line: 1, Column: 22, Byte: 21}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 30, Byte: 29}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, CloseRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 29, Byte: 28}, End: hcl.Pos{Line: 1, Column: 30, Byte: 29}, }, }, 0, }, { `{}`, &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, CloseRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }, }, 0, }, { `{"hello":true`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{"hello":true]`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{"hello":true,}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{true:false}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{"hello": true, "hello": true}`, &objectVal{ Attrs: []*objectAttr{ { Name: "hello", Value: &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 11, Byte: 10}, End: hcl.Pos{Line: 1, Column: 15, Byte: 14}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, }, }, { Name: "hello", Value: &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 26, Byte: 25}, End: hcl.Pos{Line: 1, Column: 30, Byte: 29}, }, }, NameRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 17, Byte: 16}, End: hcl.Pos{Line: 1, Column: 24, Byte: 23}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 31, Byte: 30}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, CloseRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 30, Byte: 29}, End: hcl.Pos{Line: 1, Column: 31, Byte: 30}, }, }, 0, }, { `{"hello": true, "hello": true, "hello", true}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, // comma used where colon is expected }, { `{"hello", "world"}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `[]`, &arrayVal{ Values: []node{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `[true]`, &arrayVal{ Values: []node{ &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 7, Byte: 6}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `[true, false]`, &arrayVal{ Values: []node{ &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 6, Byte: 5}, }, }, &booleanVal{ Value: false, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 8, Byte: 7}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 14, Byte: 13}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `[[]]`, &arrayVal{ Values: []node{ &arrayVal{ Values: []node{}, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 4, Byte: 3}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }, }, }, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 5, Byte: 4}, }, OpenRange: hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `[`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 2, }, { `[true`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `]`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `[true,]`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `[[],]`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `["hello":true]`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `[true}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{"wrong"=true}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{"wrong" = true}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, { `{"wrong" true}`, invalidVal{hcl.Range{ Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }}, 1, }, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { got, diag := parseFileContent([]byte(test.Input), "", hcl.Pos{Byte: 0, Line: 1, Column: 1}) if len(diag) != test.DiagCount { t.Errorf("got %d diagnostics; want %d", len(diag), test.DiagCount) for _, d := range diag { t.Logf(" - %s", d.Error()) } } if diff := deep.Equal(got, test.Want); diff != nil { for _, problem := range diff { t.Error(problem) } } }) } } func TestParseWithPos(t *testing.T) { tests := []struct { Input string StartPos hcl.Pos Want node DiagCount int }{ // Simple, single-token constructs { `true`, hcl.Pos{Byte: 0, Line: 3, Column: 10}, &booleanVal{ Value: true, SrcRange: hcl.Range{ Start: hcl.Pos{Line: 3, Column: 10, Byte: 0}, End: hcl.Pos{Line: 3, Column: 14, Byte: 4}, }, }, 0, }, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { got, diag := parseFileContent([]byte(test.Input), "", test.StartPos) if len(diag) != test.DiagCount { t.Errorf("got %d diagnostics; want %d", len(diag), test.DiagCount) for _, d := range diag { t.Logf(" - %s", d.Error()) } } if diff := deep.Equal(got, test.Want); diff != nil { for _, problem := range diff { t.Error(problem) } } }) } } func mustBigFloat(s string) *big.Float { f, _, err := (&big.Float{}).Parse(s, 10) if err != nil { panic(err) } return f } hcl-2.14.1/json/peeker.go000066400000000000000000000005121431334125700151210ustar00rootroot00000000000000package json type peeker struct { tokens []token pos int } func newPeeker(tokens []token) *peeker { return &peeker{ tokens: tokens, pos: 0, } } func (p *peeker) Peek() token { return p.tokens[p.pos] } func (p *peeker) Read() token { ret := p.tokens[p.pos] if ret.Type != tokenEOF { p.pos++ } return ret } hcl-2.14.1/json/public.go000066400000000000000000000072061431334125700151330ustar00rootroot00000000000000package json import ( "fmt" "io/ioutil" "os" "github.com/hashicorp/hcl/v2" ) // Parse attempts to parse the given buffer as JSON and, if successful, returns // a hcl.File for the HCL configuration represented by it. // // This is not a generic JSON parser. Instead, it deals only with the profile // of JSON used to express HCL configuration. // // The returned file is valid only if the returned diagnostics returns false // from its HasErrors method. If HasErrors returns true, the file represents // the subset of data that was able to be parsed, which may be none. func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) { return ParseWithStartPos(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1}) } // ParseWithStartPos attempts to parse like json.Parse, but unlike json.Parse // you can pass a start position of the given JSON as a hcl.Pos. // // In most cases json.Parse should be sufficient, but it can be useful for parsing // a part of JSON with correct positions. func ParseWithStartPos(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) { rootNode, diags := parseFileContent(src, filename, start) switch rootNode.(type) { case *objectVal, *arrayVal: // okay default: diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Root value must be object", Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.", Subject: rootNode.StartRange().Ptr(), }) // Since we've already produced an error message for this being // invalid, we'll return an empty placeholder here so that trying to // extract content from our root body won't produce a redundant // error saying the same thing again in more general terms. fakePos := hcl.Pos{ Byte: 0, Line: 1, Column: 1, } fakeRange := hcl.Range{ Filename: filename, Start: fakePos, End: fakePos, } rootNode = &objectVal{ Attrs: []*objectAttr{}, SrcRange: fakeRange, OpenRange: fakeRange, } } file := &hcl.File{ Body: &body{ val: rootNode, }, Bytes: src, Nav: navigation{rootNode}, } return file, diags } // ParseExpression parses the given buffer as a standalone JSON expression, // returning it as an instance of Expression. func ParseExpression(src []byte, filename string) (hcl.Expression, hcl.Diagnostics) { return ParseExpressionWithStartPos(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1}) } // ParseExpressionWithStartPos parses like json.ParseExpression, but unlike // json.ParseExpression you can pass a start position of the given JSON // expression as a hcl.Pos. func ParseExpressionWithStartPos(src []byte, filename string, start hcl.Pos) (hcl.Expression, hcl.Diagnostics) { node, diags := parseExpression(src, filename, start) return &expression{src: node}, diags } // ParseFile is a convenience wrapper around Parse that first attempts to load // data from the given filename, passing the result to Parse if successful. // // If the file cannot be read, an error diagnostic with nil context is returned. func ParseFile(filename string) (*hcl.File, hcl.Diagnostics) { f, err := os.Open(filename) if err != nil { return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Failed to open file", Detail: fmt.Sprintf("The file %q could not be opened.", filename), }, } } defer f.Close() src, err := ioutil.ReadAll(f) if err != nil { return nil, hcl.Diagnostics{ { Severity: hcl.DiagError, Summary: "Failed to read file", Detail: fmt.Sprintf("The file %q was opened, but an error occured while reading it.", filename), }, } } return Parse(src, filename) } hcl-2.14.1/json/public_test.go000066400000000000000000000171321431334125700161710ustar00rootroot00000000000000package json import ( "fmt" "strings" "testing" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func TestParse_nonObject(t *testing.T) { src := `true` file, diags := Parse([]byte(src), "") if len(diags) != 1 { t.Errorf("got %d diagnostics; want 1", len(diags)) } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Fatalf("got nil Body; want actual body") } if file.Body.(*body).val == nil { t.Errorf("got nil Body object; want placeholder object") } } func TestParseTemplate(t *testing.T) { src := `{"greeting": "hello ${\"world\"}"}` file, diags := Parse([]byte(src), "") if len(diags) != 0 { t.Errorf("got %d diagnostics on parse; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Fatalf("got nil Body; want actual body") } attrs, diags := file.Body.JustAttributes() if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } val, diags := attrs["greeting"].Expr.Value(&hcl.EvalContext{}) if len(diags) != 0 { t.Errorf("got %d diagnostics on eval; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if !val.RawEquals(cty.StringVal("hello world")) { t.Errorf("wrong result %#v; want %#v", val, cty.StringVal("hello world")) } } func TestParseTemplateUnwrap(t *testing.T) { src := `{"greeting": "${true}"}` file, diags := Parse([]byte(src), "") if len(diags) != 0 { t.Errorf("got %d diagnostics on parse; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Fatalf("got nil Body; want actual body") } attrs, diags := file.Body.JustAttributes() if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } val, diags := attrs["greeting"].Expr.Value(&hcl.EvalContext{}) if len(diags) != 0 { t.Errorf("got %d diagnostics on eval; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if !val.RawEquals(cty.True) { t.Errorf("wrong result %#v; want %#v", val, cty.True) } } func TestParse_malformed(t *testing.T) { src := `{ "http_proxy_url: "http://xxxxxx", }` file, diags := Parse([]byte(src), "") if got, want := len(diags), 2; got != want { t.Errorf("got %d diagnostics; want %d", got, want) } if err, want := diags.Error(), `Missing property value colon`; !strings.Contains(err, want) { t.Errorf("diags are %q, but should contain %q", err, want) } if file == nil { t.Errorf("got nil File; want actual file") } } func TestParseWithStartPos(t *testing.T) { src := `{ "foo": { "bar": "baz" } }` part := `{ "bar": "baz" }` file, diags := Parse([]byte(src), "") partFile, partDiags := ParseWithStartPos([]byte(part), "", hcl.Pos{Byte: 0, Line: 2, Column: 10}) if len(diags) != 0 { t.Errorf("got %d diagnostics on parse src; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if len(partDiags) != 0 { t.Errorf("got %d diagnostics on parse part src; want 0", len(partDiags)) for _, diag := range partDiags { t.Logf("- %s", diag.Error()) } } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Fatalf("got nil Body; want actual body") } if partFile == nil { t.Errorf("got nil part File; want actual file") } if partFile.Body == nil { t.Fatalf("got nil part Body; want actual body") } content, diags := file.Body.Content(&hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{{Type: "foo"}}, }) if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } attrs, diags := content.Blocks[0].Body.JustAttributes() if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } srcRange := attrs["bar"].Expr.Range() partAttrs, diags := partFile.Body.JustAttributes() if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } partRange := partAttrs["bar"].Expr.Range() if srcRange.String() != partRange.String() { t.Errorf("The two ranges did not match: src=%s, part=%s", srcRange, partRange) } } func TestParseExpression(t *testing.T) { tests := []struct { Input string Want string }{ { `"hello"`, `cty.StringVal("hello")`, }, { `"hello ${noun}"`, `cty.StringVal("hello world")`, }, { "true", "cty.True", }, { "false", "cty.False", }, { "1", "cty.NumberIntVal(1)", }, { "{}", "cty.EmptyObjectVal", }, { `{"foo":"bar","baz":1}`, `cty.ObjectVal(map[string]cty.Value{"baz":cty.NumberIntVal(1), "foo":cty.StringVal("bar")})`, }, { "[]", "cty.EmptyTupleVal", }, { `["1",2,3]`, `cty.TupleVal([]cty.Value{cty.StringVal("1"), cty.NumberIntVal(2), cty.NumberIntVal(3)})`, }, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { expr, diags := ParseExpression([]byte(test.Input), "") if diags.HasErrors() { t.Errorf("got %d diagnostics; want 0", len(diags)) for _, d := range diags { t.Logf(" - %s", d.Error()) } } value, diags := expr.Value(&hcl.EvalContext{ Variables: map[string]cty.Value{ "noun": cty.StringVal("world"), }, }) if diags.HasErrors() { t.Errorf("got %d diagnostics on decode value; want 0", len(diags)) for _, d := range diags { t.Logf(" - %s", d.Error()) } } got := fmt.Sprintf("%#v", value) if got != test.Want { t.Errorf("got %s, but want %s", got, test.Want) } }) } } func TestParseExpression_malformed(t *testing.T) { src := `invalid` expr, diags := ParseExpression([]byte(src), "") if got, want := len(diags), 1; got != want { t.Errorf("got %d diagnostics; want %d", got, want) } if err, want := diags.Error(), `Invalid JSON keyword`; !strings.Contains(err, want) { t.Errorf("diags are %q, but should contain %q", err, want) } if expr == nil { t.Errorf("got nil Expression; want actual expression") } } func TestParseExpressionWithStartPos(t *testing.T) { src := `{ "foo": "bar" }` part := `"bar"` file, diags := Parse([]byte(src), "") partExpr, partDiags := ParseExpressionWithStartPos([]byte(part), "", hcl.Pos{Byte: 0, Line: 2, Column: 10}) if len(diags) != 0 { t.Errorf("got %d diagnostics on parse src; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if len(partDiags) != 0 { t.Errorf("got %d diagnostics on parse part src; want 0", len(partDiags)) for _, diag := range partDiags { t.Logf("- %s", diag.Error()) } } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Errorf("got nil Body: want actual body") } if partExpr == nil { t.Errorf("got nil Expression; want actual expression") } content, diags := file.Body.Content(&hcl.BodySchema{ Attributes: []hcl.AttributeSchema{{Name: "foo"}}, }) if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } expr := content.Attributes["foo"].Expr if expr.Range().String() != partExpr.Range().String() { t.Errorf("The two ranges did not match: src=%s, part=%s", expr.Range(), partExpr.Range()) } } hcl-2.14.1/json/scanner.go000066400000000000000000000166211431334125700153070ustar00rootroot00000000000000package json import ( "fmt" "github.com/apparentlymart/go-textseg/v13/textseg" "github.com/hashicorp/hcl/v2" ) //go:generate stringer -type tokenType scanner.go type tokenType rune const ( tokenBraceO tokenType = '{' tokenBraceC tokenType = '}' tokenBrackO tokenType = '[' tokenBrackC tokenType = ']' tokenComma tokenType = ',' tokenColon tokenType = ':' tokenKeyword tokenType = 'K' tokenString tokenType = 'S' tokenNumber tokenType = 'N' tokenEOF tokenType = '␄' tokenInvalid tokenType = 0 tokenEquals tokenType = '=' // used only for reminding the user of JSON syntax ) type token struct { Type tokenType Bytes []byte Range hcl.Range } // scan returns the primary tokens for the given JSON buffer in sequence. // // The responsibility of this pass is to just mark the slices of the buffer // as being of various types. It is lax in how it interprets the multi-byte // token types keyword, string and number, preferring to capture erroneous // extra bytes that we presume the user intended to be part of the token // so that we can generate more helpful diagnostics in the parser. func scan(buf []byte, start pos) []token { var tokens []token p := start for { if len(buf) == 0 { tokens = append(tokens, token{ Type: tokenEOF, Bytes: nil, Range: posRange(p, p), }) return tokens } buf, p = skipWhitespace(buf, p) if len(buf) == 0 { tokens = append(tokens, token{ Type: tokenEOF, Bytes: nil, Range: posRange(p, p), }) return tokens } start = p first := buf[0] switch { case first == '{' || first == '}' || first == '[' || first == ']' || first == ',' || first == ':' || first == '=': p.Pos.Column++ p.Pos.Byte++ tokens = append(tokens, token{ Type: tokenType(first), Bytes: buf[0:1], Range: posRange(start, p), }) buf = buf[1:] case first == '"': var tokBuf []byte tokBuf, buf, p = scanString(buf, p) tokens = append(tokens, token{ Type: tokenString, Bytes: tokBuf, Range: posRange(start, p), }) case byteCanStartNumber(first): var tokBuf []byte tokBuf, buf, p = scanNumber(buf, p) tokens = append(tokens, token{ Type: tokenNumber, Bytes: tokBuf, Range: posRange(start, p), }) case byteCanStartKeyword(first): var tokBuf []byte tokBuf, buf, p = scanKeyword(buf, p) tokens = append(tokens, token{ Type: tokenKeyword, Bytes: tokBuf, Range: posRange(start, p), }) default: tokens = append(tokens, token{ Type: tokenInvalid, Bytes: buf[:1], Range: start.Range(1, 1), }) // If we've encountered an invalid then we might as well stop // scanning since the parser won't proceed beyond this point. // We insert a synthetic EOF marker here to match the expectations // of consumers of this data structure. p.Pos.Column++ p.Pos.Byte++ tokens = append(tokens, token{ Type: tokenEOF, Bytes: nil, Range: posRange(p, p), }) return tokens } } } func byteCanStartNumber(b byte) bool { switch b { // We are slightly more tolerant than JSON requires here since we // expect the parser will make a stricter interpretation of the // number bytes, but we specifically don't allow 'e' or 'E' here // since we want the scanner to treat that as the start of an // invalid keyword instead, to produce more intelligible error messages. case '-', '+', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': return true default: return false } } func scanNumber(buf []byte, start pos) ([]byte, []byte, pos) { // The scanner doesn't check that the sequence of digit-ish bytes is // in a valid order. The parser must do this when decoding a number // token. var i int p := start Byte: for i = 0; i < len(buf); i++ { switch buf[i] { case '-', '+', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': p.Pos.Byte++ p.Pos.Column++ default: break Byte } } return buf[:i], buf[i:], p } func byteCanStartKeyword(b byte) bool { switch { // We allow any sequence of alphabetical characters here, even though // JSON is more constrained, so that we can collect what we presume // the user intended to be a single keyword and then check its validity // in the parser, where we can generate better diagnostics. // So e.g. we want to be able to say: // unrecognized keyword "True". Did you mean "true"? case isAlphabetical(b): return true default: return false } } func scanKeyword(buf []byte, start pos) ([]byte, []byte, pos) { var i int p := start Byte: for i = 0; i < len(buf); i++ { b := buf[i] switch { case isAlphabetical(b) || b == '_': p.Pos.Byte++ p.Pos.Column++ default: break Byte } } return buf[:i], buf[i:], p } func scanString(buf []byte, start pos) ([]byte, []byte, pos) { // The scanner doesn't validate correct use of escapes, etc. It pays // attention to escapes only for the purpose of identifying the closing // quote character. It's the parser's responsibility to do proper // validation. // // The scanner also doesn't specifically detect unterminated string // literals, though they can be identified in the parser by checking if // the final byte in a string token is the double-quote character. // Skip the opening quote symbol i := 1 p := start p.Pos.Byte++ p.Pos.Column++ escaping := false Byte: for i < len(buf) { b := buf[i] switch { case b == '\\': escaping = !escaping p.Pos.Byte++ p.Pos.Column++ i++ case b == '"': p.Pos.Byte++ p.Pos.Column++ i++ if !escaping { break Byte } escaping = false case b < 32: break Byte default: // Advance by one grapheme cluster, so that we consider each // grapheme to be a "column". // Ignoring error because this scanner cannot produce errors. advance, _, _ := textseg.ScanGraphemeClusters(buf[i:], true) p.Pos.Byte += advance p.Pos.Column++ i += advance escaping = false } } return buf[:i], buf[i:], p } func skipWhitespace(buf []byte, start pos) ([]byte, pos) { var i int p := start Byte: for i = 0; i < len(buf); i++ { switch buf[i] { case ' ': p.Pos.Byte++ p.Pos.Column++ case '\n': p.Pos.Byte++ p.Pos.Column = 1 p.Pos.Line++ case '\r': // For the purpose of line/column counting we consider a // carriage return to take up no space, assuming that it will // be paired up with a newline (on Windows, for example) that // will account for both of them. p.Pos.Byte++ case '\t': // We arbitrarily count a tab as if it were two spaces, because // we need to choose _some_ number here. This means any system // that renders code on-screen with markers must itself treat // tabs as a pair of spaces for rendering purposes, or instead // use the byte offset and back into its own column position. p.Pos.Byte++ p.Pos.Column += 2 default: break Byte } } return buf[i:], p } type pos struct { Filename string Pos hcl.Pos } func (p *pos) Range(byteLen, charLen int) hcl.Range { start := p.Pos end := p.Pos end.Byte += byteLen end.Column += charLen return hcl.Range{ Filename: p.Filename, Start: start, End: end, } } func posRange(start, end pos) hcl.Range { return hcl.Range{ Filename: start.Filename, Start: start.Pos, End: end.Pos, } } func (t token) GoString() string { return fmt.Sprintf("json.token{json.%s, []byte(%q), %#v}", t.Type, t.Bytes, t.Range) } func isAlphabetical(b byte) bool { return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') } hcl-2.14.1/json/scanner_test.go000066400000000000000000000323421431334125700163440ustar00rootroot00000000000000package json import ( "bytes" "fmt" "reflect" "testing" "github.com/hashicorp/hcl/v2" ) func TestScan(t *testing.T) { tests := []struct { Input string Want []token }{ { ``, []token{ { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, }, }, }, }, { ` `, []token{ { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, End: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, }, }, }, }, { `{}`, []token{ { Type: tokenBraceO, Bytes: []byte(`{`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenBraceC, Bytes: []byte(`}`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, }, }, { `][`, []token{ { Type: tokenBrackC, Bytes: []byte(`]`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenBrackO, Bytes: []byte(`[`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, }, }, { `:,`, []token{ { Type: tokenColon, Bytes: []byte(`:`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenComma, Bytes: []byte(`,`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, }, }, { `1`, []token{ { Type: tokenNumber, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, }, }, { ` 1`, []token{ { Type: tokenNumber, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, End: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, }, }, }, }, { ` 12`, []token{ { Type: tokenNumber, Bytes: []byte(`12`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 4, Line: 1, Column: 5, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 4, Line: 1, Column: 5, }, End: hcl.Pos{ Byte: 4, Line: 1, Column: 5, }, }, }, }, }, { `1 2`, []token{ { Type: tokenNumber, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenNumber, Bytes: []byte(`2`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, End: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, }, }, }, }, { "\n1\n 2", []token{ { Type: tokenNumber, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 2, Column: 1, }, End: hcl.Pos{ Byte: 2, Line: 2, Column: 2, }, }, }, { Type: tokenNumber, Bytes: []byte(`2`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 4, Line: 3, Column: 2, }, End: hcl.Pos{ Byte: 5, Line: 3, Column: 3, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 5, Line: 3, Column: 3, }, End: hcl.Pos{ Byte: 5, Line: 3, Column: 3, }, }, }, }, }, { `-1 2.5`, []token{ { Type: tokenNumber, Bytes: []byte(`-1`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, { Type: tokenNumber, Bytes: []byte(`2.5`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 3, Line: 1, Column: 4, }, End: hcl.Pos{ Byte: 6, Line: 1, Column: 7, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 6, Line: 1, Column: 7, }, End: hcl.Pos{ Byte: 6, Line: 1, Column: 7, }, }, }, }, }, { `true`, []token{ { Type: tokenKeyword, Bytes: []byte(`true`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 4, Line: 1, Column: 5, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 4, Line: 1, Column: 5, }, End: hcl.Pos{ Byte: 4, Line: 1, Column: 5, }, }, }, }, }, { `[true]`, []token{ { Type: tokenBrackO, Bytes: []byte(`[`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenKeyword, Bytes: []byte(`true`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 5, Line: 1, Column: 6, }, }, }, { Type: tokenBrackC, Bytes: []byte(`]`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 5, Line: 1, Column: 6, }, End: hcl.Pos{ Byte: 6, Line: 1, Column: 7, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 6, Line: 1, Column: 7, }, End: hcl.Pos{ Byte: 6, Line: 1, Column: 7, }, }, }, }, }, { `""`, []token{ { Type: tokenString, Bytes: []byte(`""`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, }, }, }, }, { `"hello"`, []token{ { Type: tokenString, Bytes: []byte(`"hello"`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 7, Line: 1, Column: 8, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 7, Line: 1, Column: 8, }, End: hcl.Pos{ Byte: 7, Line: 1, Column: 8, }, }, }, }, }, { `"he\"llo"`, []token{ { Type: tokenString, Bytes: []byte(`"he\"llo"`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 9, Line: 1, Column: 10, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 9, Line: 1, Column: 10, }, End: hcl.Pos{ Byte: 9, Line: 1, Column: 10, }, }, }, }, }, { `"hello\\" 1`, []token{ { Type: tokenString, Bytes: []byte(`"hello\\"`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 9, Line: 1, Column: 10, }, }, }, { Type: tokenNumber, Bytes: []byte(`1`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 10, Line: 1, Column: 11, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, }, }, }, { `"🇬🇧"`, []token{ { Type: tokenString, Bytes: []byte(`"🇬🇧"`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 10, Line: 1, Column: 4, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 10, Line: 1, Column: 4, }, End: hcl.Pos{ Byte: 10, Line: 1, Column: 4, }, }, }, }, }, { `"á́́́́́́́"`, []token{ { Type: tokenString, Bytes: []byte(`"á́́́́́́́"`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 19, Line: 1, Column: 4, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 19, Line: 1, Column: 4, }, End: hcl.Pos{ Byte: 19, Line: 1, Column: 4, }, }, }, }, }, { `&`, []token{ { Type: tokenInvalid, Bytes: []byte(`&`), Range: hcl.Range{ Start: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, { Type: tokenEOF, Range: hcl.Range{ Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, }, }, }, }, } for _, test := range tests { t.Run(test.Input, func(t *testing.T) { buf := []byte(test.Input) start := pos{ Filename: "", Pos: hcl.Pos{ Byte: 0, Line: 1, Column: 1, }, } got := scan(buf, start) if !reflect.DeepEqual(got, test.Want) { errMsg := &bytes.Buffer{} errMsg.WriteString("wrong result\ngot:\n") if len(got) == 0 { errMsg.WriteString(" (empty slice)\n") } for _, tok := range got { fmt.Fprintf(errMsg, " - %#v\n", tok) } errMsg.WriteString("want:\n") if len(test.Want) == 0 { errMsg.WriteString(" (empty slice)\n") } for _, tok := range test.Want { fmt.Fprintf(errMsg, " - %#v\n", tok) } t.Error(errMsg.String()) } }) } } hcl-2.14.1/json/spec.md000066400000000000000000000325511431334125700146030ustar00rootroot00000000000000# HCL JSON Syntax Specification This is the specification for the JSON serialization for hcl. HCL is a system for defining configuration languages for applications. The HCL information model is designed to support multiple concrete syntaxes for configuration, and this JSON-based format complements [the native syntax](../hclsyntax/spec.md) by being easy to machine-generate, whereas the native syntax is oriented towards human authoring and maintenance This syntax is defined in terms of JSON as defined in [RFC7159](https://tools.ietf.org/html/rfc7159). As such it inherits the JSON grammar as-is, and merely defines a specific methodology for interpreting JSON constructs into HCL structural elements and expressions. This mapping is defined such that valid JSON-serialized HCL input can be _produced_ using standard JSON implementations in various programming languages. _Parsing_ such JSON has some additional constraints not beyond what is normally supported by JSON parsers, so a specialized parser may be required that is able to: - Preserve the relative ordering of properties defined in an object. - Preserve multiple definitions of the same property name. - Preserve numeric values to the precision required by the number type in [the HCL syntax-agnostic information model](../spec.md). - Retain source location information for parsed tokens/constructs in order to produce good error messages. ## Structural Elements [The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an abstract container for attribute definitions and child blocks. A body is represented in JSON as either a single JSON object or a JSON array of objects. Body processing is in terms of JSON object properties, visited in the order they appear in the input. Where a body is represented by a single JSON object, the properties of that object are visited in order. Where a body is represented by a JSON array, each of its elements are visited in order and each element has its properties visited in order. If any element of the array is not a JSON object then the input is erroneous. When a body is being processed in the _dynamic attributes_ mode, the allowance of a JSON array in the previous paragraph does not apply and instead a single JSON object is always required. As defined in the language-agnostic model, body processing is in terms of a schema which provides context for interpreting the body's content. For JSON bodies, the schema is crucial to allow differentiation of attribute definitions and block definitions, both of which are represented via object properties. The special property name `"//"`, when used in an object representing a HCL body, is parsed and ignored. A property with this name can be used to include human-readable comments. (This special property name is _not_ processed in this way for any _other_ HCL constructs that are represented as JSON objects.) ### Attributes Where the given schema describes an attribute with a given name, the object property with the matching name — if present — serves as the attribute's definition. When a body is being processed in the _dynamic attributes_ mode, each object property serves as an attribute definition for the attribute whose name matches the property name. The value of an attribute definition property is interpreted as an _expression_, as described in a later section. Given a schema that calls for an attribute named "foo", a JSON object like the following provides a definition for that attribute: ```json { "foo": "bar baz" } ``` ### Blocks Where the given schema describes a block with a given type name, each object property with the matching name serves as a definition of zero or more blocks of that type. Processing of child blocks is in terms of nested JSON objects and arrays. If the schema defines one or more _labels_ for the block type, a nested JSON object or JSON array of objects is required for each labelling level. These are flattened to a single ordered sequence of object properties using the same algorithm as for body content as defined above. Each object property serves as a label value at the corresponding level. After any labelling levels, the next nested value is either a JSON object representing a single block body, or a JSON array of JSON objects that each represent a single block body. Use of an array accommodates the definition of multiple blocks that have identical type and labels. Given a schema that calls for a block type named "foo" with no labels, the following JSON objects are all valid definitions of zero or more blocks of this type: ```json { "foo": { "child_attr": "baz" } } ``` ```json { "foo": [ { "child_attr": "baz" }, { "child_attr": "boz" } ] } ``` ```json { "foo": [] } ``` The first of these defines a single child block of type "foo". The second defines _two_ such blocks. The final example shows a degenerate definition of zero blocks, though generators should prefer to omit the property entirely in this scenario. Given a schema that calls for a block type named "foo" with _two_ labels, the extra label levels must be represented as objects or arrays of objects as in the following examples: ```json { "foo": { "bar": { "baz": { "child_attr": "baz" }, "boz": { "child_attr": "baz" } }, "boz": { "baz": { "child_attr": "baz" } } } } ``` ```json { "foo": { "bar": { "baz": { "child_attr": "baz" }, "boz": { "child_attr": "baz" } }, "boz": { "baz": [ { "child_attr": "baz" }, { "child_attr": "boz" } ] } } } ``` ```json { "foo": [ { "bar": { "baz": { "child_attr": "baz" }, "boz": { "child_attr": "baz" } } }, { "bar": { "baz": [ { "child_attr": "baz" }, { "child_attr": "boz" } ] } } ] } ``` ```json { "foo": { "bar": { "baz": { "child_attr": "baz" }, "boz": { "child_attr": "baz" } }, "bar": { "baz": [ { "child_attr": "baz" }, { "child_attr": "boz" } ] } } } ``` Arrays can be introduced at either the label definition or block body definition levels to define multiple definitions of the same block type or labels while preserving order. A JSON HCL parser _must_ support duplicate definitions of the same property name within a single object, preserving all of them and the relative ordering between them. The array-based forms are also required so that JSON HCL configurations can be produced with JSON producing libraries that are not able to preserve property definition order and multiple definitions of the same property. ## Expressions JSON lacks a native expression syntax, so the HCL JSON syntax instead defines a mapping for each of the JSON value types, including a special mapping for strings that allows optional use of arbitrary expressions. ### Objects When interpreted as an expression, a JSON object represents a value of a HCL object type. Each property of the JSON object represents an attribute of the HCL object type. The property name string given in the JSON input is interpreted as a string expression as described below, and its result is converted to string as defined by the syntax-agnostic information model. If such a conversion is not possible, an error is produced and evaluation fails. An instance of the constructed object type is then created, whose values are interpreted by again recursively applying the mapping rules defined in this section to each of the property values. If any evaluated property name strings produce null values, an error is produced and evaluation fails. If any produce _unknown_ values, the _entire object's_ result is an unknown value of the dynamic pseudo-type, signalling that the type of the object cannot be determined. It is an error to define the same property name multiple times within a single JSON object interpreted as an expression. In full expression mode, this constraint applies to the name expression results after conversion to string, rather than the raw string that may contain interpolation expressions. ### Arrays When interpreted as an expression, a JSON array represents a value of a HCL tuple type. Each element of the JSON array represents an element of the HCL tuple type. The tuple type is constructed by enumerating the JSON array elements, creating for each an element whose type is the result of recursively applying the expression mapping rules. Correspondence is preserved between the array element indices and the tuple element indices. An instance of the constructed tuple type is then created, whose values are interpreted by again recursively applying the mapping rules defined in this section. ### Numbers When interpreted as an expression, a JSON number represents a HCL number value. HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must be able to translate exactly the value given to a number of corresponding precision, within the constraints set by the HCL syntax-agnostic information model. In practice, off-the-shelf JSON serializers often do not support customizing the processing of numbers, and instead force processing as 32-bit or 64-bit floating point values. A _producer_ of JSON HCL that uses such a serializer can provide numeric values as JSON strings where they have precision too great for representation in the serializer's chosen numeric type in situations where the result will be converted to number (using the standard conversion rules) by a calling application. Alternatively, for expressions that are evaluated in full expression mode an embedded template interpolation can be used to faithfully represent a number, such as `"${1e150}"`, which will then be evaluated by the underlying HCL native syntax expression evaluator. ### Boolean Values The JSON boolean values `true` and `false`, when interpreted as expressions, represent the corresponding HCL boolean values. ### The Null Value The JSON value `null`, when interpreted as an expression, represents a HCL null value of the dynamic pseudo-type. ### Strings When interpreted as an expression, a JSON string may be interpreted in one of two ways depending on the evaluation mode. If evaluating in literal-only mode (as defined by the syntax-agnostic information model) the literal string is intepreted directly as a HCL string value, by directly using the exact sequence of unicode characters represented. Template interpolations and directives MUST NOT be processed in this mode, allowing any characters that appear as introduction sequences to pass through literally: ```json "Hello world! Template sequences like ${ are not intepreted here." ``` When evaluating in full expression mode (again, as defined by the syntax- agnostic information model) the literal string is instead interpreted as a _standalone template_ in the HCL Native Syntax. The expression evaluation result is then the direct result of evaluating that template with the current variable scope and function table. ```json "Hello, ${name}! Template sequences are interpreted in full expression mode." ``` In particular the _Template Interpolation Unwrapping_ requirement from the HCL native syntax specification must be implemented, allowing the use of single-interpolation templates to represent expressions that would not otherwise be representable in JSON, such as the following example where the result must be a number, rather than a string representation of a number: ```json "${ a + b }" ``` ## Static Analysis The HCL static analysis operations are implemented for JSON values that represent expressions, as described in the following sections. Due to the limited expressive power of the JSON syntax alone, use of these static analyses functions rather than normal expression evaluation is used as additional context for how a JSON value is to be interpreted, which means that static analyses can result in a different interpretation of a given expression than normal evaluation. ### Static List An expression interpreted as a static list must be a JSON array. Each of the values in the array is interpreted as an expression and returned. ### Static Map An expression interpreted as a static map must be a JSON object. Each of the key/value pairs in the object is presented as a pair of expressions. Since object property names are always strings, evaluating the key expression with a non-`nil` evaluation context will evaluate any template sequences given in the property name. ### Static Call An expression interpreted as a static call must be a string. The content of the string is interpreted as a native syntax expression (not a _template_, unlike normal evaluation) and then the static call analysis is delegated to that expression. If the original expression is not a string or its contents cannot be parsed as a native syntax expression then static call analysis is not supported. ### Static Traversal An expression interpreted as a static traversal must be a string. The content of the string is interpreted as a native syntax expression (not a _template_, unlike normal evaluation) and then static traversal analysis is delegated to that expression. If the original expression is not a string or its contents cannot be parsed as a native syntax expression then static call analysis is not supported. hcl-2.14.1/json/structure.go000066400000000000000000000447751431334125700157310ustar00rootroot00000000000000package json import ( "fmt" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" ) // body is the implementation of "Body" used for files processed with the JSON // parser. type body struct { val node // If non-nil, the keys of this map cause the corresponding attributes to // be treated as non-existing. This is used when Body.PartialContent is // called, to produce the "remaining content" Body. hiddenAttrs map[string]struct{} } // expression is the implementation of "Expression" used for files processed // with the JSON parser. type expression struct { src node } func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { content, newBody, diags := b.PartialContent(schema) hiddenAttrs := newBody.(*body).hiddenAttrs var nameSuggestions []string for _, attrS := range schema.Attributes { if _, ok := hiddenAttrs[attrS.Name]; !ok { // Only suggest an attribute name if we didn't use it already. nameSuggestions = append(nameSuggestions, attrS.Name) } } for _, blockS := range schema.Blocks { // Blocks can appear multiple times, so we'll suggest their type // names regardless of whether they've already been used. nameSuggestions = append(nameSuggestions, blockS.Type) } jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil) diags = append(diags, attrDiags...) for _, attr := range jsonAttrs { k := attr.Name if k == "//" { // Ignore "//" keys in objects representing bodies, to allow // their use as comments. continue } if _, ok := hiddenAttrs[k]; !ok { suggestion := nameSuggestion(k, nameSuggestions) if suggestion != "" { suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) } diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Extraneous JSON object property", Detail: fmt.Sprintf("No argument or block type is named %q.%s", k, suggestion), Subject: &attr.NameRange, Context: attr.Range().Ptr(), }) } } return content, diags } func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { var diags hcl.Diagnostics jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil) diags = append(diags, attrDiags...) usedNames := map[string]struct{}{} if b.hiddenAttrs != nil { for k := range b.hiddenAttrs { usedNames[k] = struct{}{} } } content := &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, Blocks: nil, MissingItemRange: b.MissingItemRange(), } // Create some more convenient data structures for our work below. attrSchemas := map[string]hcl.AttributeSchema{} blockSchemas := map[string]hcl.BlockHeaderSchema{} for _, attrS := range schema.Attributes { attrSchemas[attrS.Name] = attrS } for _, blockS := range schema.Blocks { blockSchemas[blockS.Type] = blockS } for _, jsonAttr := range jsonAttrs { attrName := jsonAttr.Name if _, used := b.hiddenAttrs[attrName]; used { continue } if attrS, defined := attrSchemas[attrName]; defined { if existing, exists := content.Attributes[attrName]; exists { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Duplicate argument", Detail: fmt.Sprintf("The argument %q was already set at %s.", attrName, existing.Range), Subject: &jsonAttr.NameRange, Context: jsonAttr.Range().Ptr(), }) continue } content.Attributes[attrS.Name] = &hcl.Attribute{ Name: attrS.Name, Expr: &expression{src: jsonAttr.Value}, Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()), NameRange: jsonAttr.NameRange, } usedNames[attrName] = struct{}{} } else if blockS, defined := blockSchemas[attrName]; defined { bv := jsonAttr.Value blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks) diags = append(diags, blockDiags...) usedNames[attrName] = struct{}{} } // We ignore anything that isn't defined because that's the // PartialContent contract. The Content method will catch leftovers. } // Make sure we got all the required attributes. for _, attrS := range schema.Attributes { if !attrS.Required { continue } if _, defined := content.Attributes[attrS.Name]; !defined { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing required argument", Detail: fmt.Sprintf("The argument %q is required, but no definition was found.", attrS.Name), Subject: b.MissingItemRange().Ptr(), }) } } unusedBody := &body{ val: b.val, hiddenAttrs: usedNames, } return content, unusedBody, diags } // JustAttributes for JSON bodies interprets all properties of the wrapped // JSON object as attributes and returns them. func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { var diags hcl.Diagnostics attrs := make(map[string]*hcl.Attribute) obj, ok := b.val.(*objectVal) if !ok { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect JSON value type", Detail: "A JSON object is required here, setting the arguments for this block.", Subject: b.val.StartRange().Ptr(), }) return attrs, diags } for _, jsonAttr := range obj.Attrs { name := jsonAttr.Name if name == "//" { // Ignore "//" keys in objects representing bodies, to allow // their use as comments. continue } if _, hidden := b.hiddenAttrs[name]; hidden { continue } if existing, exists := attrs[name]; exists { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Duplicate attribute definition", Detail: fmt.Sprintf("The argument %q was already set at %s.", name, existing.Range), Subject: &jsonAttr.NameRange, }) continue } attrs[name] = &hcl.Attribute{ Name: name, Expr: &expression{src: jsonAttr.Value}, Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()), NameRange: jsonAttr.NameRange, } } // No diagnostics possible here, since the parser already took care of // finding duplicates and every JSON value can be a valid attribute value. return attrs, diags } func (b *body) MissingItemRange() hcl.Range { switch tv := b.val.(type) { case *objectVal: return tv.CloseRange case *arrayVal: return tv.OpenRange default: // Should not happen in correct operation, but might show up if the // input is invalid and we are producing partial results. return tv.StartRange() } } func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) { if len(labelsLeft) > 0 { labelName := labelsLeft[0] jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName) diags = append(diags, attrDiags...) if len(jsonAttrs) == 0 { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Missing block label", Detail: fmt.Sprintf("At least one object property is required, whose name represents the %s block's %s.", typeName, labelName), Subject: v.StartRange().Ptr(), }) return } labelsUsed := append(labelsUsed, "") labelRanges := append(labelRanges, hcl.Range{}) for _, p := range jsonAttrs { pk := p.Name labelsUsed[len(labelsUsed)-1] = pk labelRanges[len(labelRanges)-1] = p.NameRange diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...) } return } // By the time we get here, we've peeled off all the labels and we're ready // to deal with the block's actual content. // need to copy the label slices because their underlying arrays will // continue to be mutated after we return. labels := make([]string, len(labelsUsed)) copy(labels, labelsUsed) labelR := make([]hcl.Range, len(labelRanges)) copy(labelR, labelRanges) switch tv := v.(type) { case *nullVal: // There is no block content, e.g the value is null. return case *objectVal: // Single instance of the block *blocks = append(*blocks, &hcl.Block{ Type: typeName, Labels: labels, Body: &body{ val: tv, }, DefRange: tv.OpenRange, TypeRange: *typeRange, LabelRanges: labelR, }) case *arrayVal: // Multiple instances of the block for _, av := range tv.Values { *blocks = append(*blocks, &hcl.Block{ Type: typeName, Labels: labels, Body: &body{ val: av, // might be mistyped; we'll find out when content is requested for this body }, DefRange: tv.OpenRange, TypeRange: *typeRange, LabelRanges: labelR, }) } default: diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect JSON value type", Detail: fmt.Sprintf("Either a JSON object or a JSON array is required, representing the contents of one or more %q blocks.", typeName), Subject: v.StartRange().Ptr(), }) } return } // collectDeepAttrs takes either a single object or an array of objects and // flattens it into a list of object attributes, collecting attributes from // all of the objects in a given array. // // Ordering is preserved, so a list of objects that each have one property // will result in those properties being returned in the same order as the // objects appeared in the array. // // This is appropriate for use only for objects representing bodies or labels // within a block. // // The labelName argument, if non-null, is used to tailor returned error // messages to refer to block labels rather than attributes and child blocks. // It has no other effect. func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) { var diags hcl.Diagnostics var attrs []*objectAttr switch tv := v.(type) { case *nullVal: // If a value is null, then we don't return any attributes or return an error. case *objectVal: attrs = append(attrs, tv.Attrs...) case *arrayVal: for _, ev := range tv.Values { switch tev := ev.(type) { case *objectVal: attrs = append(attrs, tev.Attrs...) default: if labelName != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect JSON value type", Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName), Subject: ev.StartRange().Ptr(), }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect JSON value type", Detail: "A JSON object is required here, to define arguments and child blocks.", Subject: ev.StartRange().Ptr(), }) } } } default: if labelName != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect JSON value type", Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName), Subject: v.StartRange().Ptr(), }) } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Incorrect JSON value type", Detail: "Either a JSON object or JSON array of objects is required here, to define arguments and child blocks.", Subject: v.StartRange().Ptr(), }) } } return attrs, diags } func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { switch v := e.src.(type) { case *stringVal: if ctx != nil { // Parse string contents as a HCL native language expression. // We only do this if we have a context, so passing a nil context // is how the caller specifies that interpolations are not allowed // and that the string should just be returned verbatim. templateSrc := v.Value expr, diags := hclsyntax.ParseTemplate( []byte(templateSrc), v.SrcRange.Filename, // This won't produce _exactly_ the right result, since // the hclsyntax parser can't "see" any escapes we removed // while parsing JSON, but it's better than nothing. hcl.Pos{ Line: v.SrcRange.Start.Line, // skip over the opening quote mark Byte: v.SrcRange.Start.Byte + 1, Column: v.SrcRange.Start.Column + 1, }, ) if diags.HasErrors() { return cty.DynamicVal, diags } val, evalDiags := expr.Value(ctx) diags = append(diags, evalDiags...) return val, diags } return cty.StringVal(v.Value), nil case *numberVal: return cty.NumberVal(v.Value), nil case *booleanVal: return cty.BoolVal(v.Value), nil case *arrayVal: var diags hcl.Diagnostics vals := []cty.Value{} for _, jsonVal := range v.Values { val, valDiags := (&expression{src: jsonVal}).Value(ctx) vals = append(vals, val) diags = append(diags, valDiags...) } return cty.TupleVal(vals), diags case *objectVal: var diags hcl.Diagnostics attrs := map[string]cty.Value{} attrRanges := map[string]hcl.Range{} known := true for _, jsonAttr := range v.Attrs { // In this one context we allow keys to contain interpolation // expressions too, assuming we're evaluating in interpolation // mode. This achieves parity with the native syntax where // object expressions can have dynamic keys, while block contents // may not. name, nameDiags := (&expression{src: &stringVal{ Value: jsonAttr.Name, SrcRange: jsonAttr.NameRange, }}).Value(ctx) valExpr := &expression{src: jsonAttr.Value} val, valDiags := valExpr.Value(ctx) diags = append(diags, nameDiags...) diags = append(diags, valDiags...) var err error name, err = convert.Convert(name, cty.String) if err != nil { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid object key expression", Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err), Subject: &jsonAttr.NameRange, Expression: valExpr, EvalContext: ctx, }) continue } if name.IsNull() { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid object key expression", Detail: "Cannot use null value as an object key.", Subject: &jsonAttr.NameRange, Expression: valExpr, EvalContext: ctx, }) continue } if !name.IsKnown() { // This is a bit of a weird case, since our usual rules require // us to tolerate unknowns and just represent the result as // best we can but if we don't know the key then we can't // know the type of our object at all, and thus we must turn // the whole thing into cty.DynamicVal. This is consistent with // how this situation is handled in the native syntax. // We'll keep iterating so we can collect other errors in // subsequent attributes. known = false continue } nameStr := name.AsString() if _, defined := attrs[nameStr]; defined { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Duplicate object attribute", Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]), Subject: &jsonAttr.NameRange, Expression: e, EvalContext: ctx, }) continue } attrs[nameStr] = val attrRanges[nameStr] = jsonAttr.NameRange } if !known { // We encountered an unknown key somewhere along the way, so // we can't know what our type will eventually be. return cty.DynamicVal, diags } return cty.ObjectVal(attrs), diags case *nullVal: return cty.NullVal(cty.DynamicPseudoType), nil default: // Default to DynamicVal so that ASTs containing invalid nodes can // still be partially-evaluated. return cty.DynamicVal, nil } } func (e *expression) Variables() []hcl.Traversal { var vars []hcl.Traversal switch v := e.src.(type) { case *stringVal: templateSrc := v.Value expr, diags := hclsyntax.ParseTemplate( []byte(templateSrc), v.SrcRange.Filename, // This won't produce _exactly_ the right result, since // the hclsyntax parser can't "see" any escapes we removed // while parsing JSON, but it's better than nothing. hcl.Pos{ Line: v.SrcRange.Start.Line, // skip over the opening quote mark Byte: v.SrcRange.Start.Byte + 1, Column: v.SrcRange.Start.Column + 1, }, ) if diags.HasErrors() { return vars } return expr.Variables() case *arrayVal: for _, jsonVal := range v.Values { vars = append(vars, (&expression{src: jsonVal}).Variables()...) } case *objectVal: for _, jsonAttr := range v.Attrs { keyExpr := &stringVal{ // we're going to treat key as an expression in this context Value: jsonAttr.Name, SrcRange: jsonAttr.NameRange, } vars = append(vars, (&expression{src: keyExpr}).Variables()...) vars = append(vars, (&expression{src: jsonAttr.Value}).Variables()...) } } return vars } func (e *expression) Range() hcl.Range { return e.src.Range() } func (e *expression) StartRange() hcl.Range { return e.src.StartRange() } // Implementation for hcl.AbsTraversalForExpr. func (e *expression) AsTraversal() hcl.Traversal { // In JSON-based syntax a traversal is given as a string containing // traversal syntax as defined by hclsyntax.ParseTraversalAbs. switch v := e.src.(type) { case *stringVal: traversal, diags := hclsyntax.ParseTraversalAbs([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start) if diags.HasErrors() { return nil } return traversal default: return nil } } // Implementation for hcl.ExprCall. func (e *expression) ExprCall() *hcl.StaticCall { // In JSON-based syntax a static call is given as a string containing // an expression in the native syntax that also supports ExprCall. switch v := e.src.(type) { case *stringVal: expr, diags := hclsyntax.ParseExpression([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start) if diags.HasErrors() { return nil } call, diags := hcl.ExprCall(expr) if diags.HasErrors() { return nil } return call default: return nil } } // Implementation for hcl.ExprList. func (e *expression) ExprList() []hcl.Expression { switch v := e.src.(type) { case *arrayVal: ret := make([]hcl.Expression, len(v.Values)) for i, node := range v.Values { ret[i] = &expression{src: node} } return ret default: return nil } } // Implementation for hcl.ExprMap. func (e *expression) ExprMap() []hcl.KeyValuePair { switch v := e.src.(type) { case *objectVal: ret := make([]hcl.KeyValuePair, len(v.Attrs)) for i, jsonAttr := range v.Attrs { ret[i] = hcl.KeyValuePair{ Key: &expression{src: &stringVal{ Value: jsonAttr.Name, SrcRange: jsonAttr.NameRange, }}, Value: &expression{src: jsonAttr.Value}, } } return ret default: return nil } } hcl-2.14.1/json/structure_test.go000066400000000000000000001001711431334125700167470ustar00rootroot00000000000000package json import ( "fmt" "reflect" "strings" "testing" "github.com/davecgh/go-spew/spew" "github.com/go-test/deep" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" ) func TestBodyPartialContent(t *testing.T) { tests := []struct { src string schema *hcl.BodySchema want *hcl.BodyContent diagCount int }{ { `{}`, &hcl.BodySchema{}, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 2, Byte: 1}, End: hcl.Pos{Line: 1, Column: 3, Byte: 2}, }, }, 0, }, { `[]`, &hcl.BodySchema{}, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `[{}]`, &hcl.BodySchema{}, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `[[]]`, &hcl.BodySchema{}, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 1, // elements of root array must be objects }, { `{"//": "comment that should be ignored"}`, &hcl.BodySchema{}, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 40, Byte: 39}, End: hcl.Pos{Line: 1, Column: 41, Byte: 40}, }, }, 0, }, { `{"//": "comment that should be ignored", "//": "another comment"}`, &hcl.BodySchema{}, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 65, Byte: 64}, End: hcl.Pos{Line: 1, Column: 66, Byte: 65}, }, }, 0, }, { `{"name":"Ermintrude"}`, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "name", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{ "name": &hcl.Attribute{ Name: "name", Expr: &expression{ src: &stringVal{ Value: "Ermintrude", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 8, Line: 1, Column: 9, }, End: hcl.Pos{ Byte: 20, Line: 1, Column: 21, }, }, }, }, Range: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 20, Line: 1, Column: 21, }, }, NameRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 7, Line: 1, Column: 8, }, }, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 21, Byte: 20}, End: hcl.Pos{Line: 1, Column: 22, Byte: 21}, }, }, 0, }, { `[{"name":"Ermintrude"}]`, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "name", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{ "name": &hcl.Attribute{ Name: "name", Expr: &expression{ src: &stringVal{ Value: "Ermintrude", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 9, Line: 1, Column: 10, }, End: hcl.Pos{ Byte: 21, Line: 1, Column: 22, }, }, }, }, Range: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 21, Line: 1, Column: 22, }, }, NameRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 8, Line: 1, Column: 9, }, }, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 0, }, { `{"name":"Ermintrude"}`, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "name", Required: true, }, { Name: "age", Required: true, }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{ "name": &hcl.Attribute{ Name: "name", Expr: &expression{ src: &stringVal{ Value: "Ermintrude", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 8, Line: 1, Column: 9, }, End: hcl.Pos{ Byte: 20, Line: 1, Column: 21, }, }, }, }, Range: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 20, Line: 1, Column: 21, }, }, NameRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 7, Line: 1, Column: 8, }, }, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 21, Byte: 20}, End: hcl.Pos{Line: 1, Column: 22, Byte: 21}, }, }, 1, }, { `{"resource": null}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "resource", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, // We don't find any blocks if the value is json null. Blocks: nil, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 18, Byte: 17}, End: hcl.Pos{Line: 1, Column: 19, Byte: 18}, }, }, 0, }, { `{"resource": { "nested": null }}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "resource", LabelNames: []string{"name"}, }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, Blocks: nil, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 32, Byte: 31}, End: hcl.Pos{Line: 1, Column: 33, Byte: 32}, }, }, 0, }, { `{"resource":{}}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "resource", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, Blocks: hcl.Blocks{ { Type: "resource", Labels: []string{}, Body: &body{ val: &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 12, Line: 1, Column: 13, }, End: hcl.Pos{ Byte: 14, Line: 1, Column: 15, }, }, OpenRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 12, Line: 1, Column: 13, }, End: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, }, CloseRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, End: hcl.Pos{ Byte: 14, Line: 1, Column: 15, }, }, }, }, DefRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 12, Line: 1, Column: 13, }, End: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, }, TypeRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, LabelRanges: []hcl.Range{}, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 15, Byte: 14}, End: hcl.Pos{Line: 1, Column: 16, Byte: 15}, }, }, 0, }, { `{"resource":[{},{}]}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "resource", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, Blocks: hcl.Blocks{ { Type: "resource", Labels: []string{}, Body: &body{ val: &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, End: hcl.Pos{ Byte: 15, Line: 1, Column: 16, }, }, OpenRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, End: hcl.Pos{ Byte: 14, Line: 1, Column: 15, }, }, CloseRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 14, Line: 1, Column: 15, }, End: hcl.Pos{ Byte: 15, Line: 1, Column: 16, }, }, }, }, DefRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 12, Line: 1, Column: 13, }, End: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, }, TypeRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, LabelRanges: []hcl.Range{}, }, { Type: "resource", Labels: []string{}, Body: &body{ val: &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 16, Line: 1, Column: 17, }, End: hcl.Pos{ Byte: 18, Line: 1, Column: 19, }, }, OpenRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 16, Line: 1, Column: 17, }, End: hcl.Pos{ Byte: 17, Line: 1, Column: 18, }, }, CloseRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 17, Line: 1, Column: 18, }, End: hcl.Pos{ Byte: 18, Line: 1, Column: 19, }, }, }, }, DefRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 12, Line: 1, Column: 13, }, End: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, }, TypeRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, LabelRanges: []hcl.Range{}, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 20, Byte: 19}, End: hcl.Pos{Line: 1, Column: 21, Byte: 20}, }, }, 0, }, { `{"resource":{"foo_instance":{"bar":{}}}}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "resource", LabelNames: []string{"type", "name"}, }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, Blocks: hcl.Blocks{ { Type: "resource", Labels: []string{"foo_instance", "bar"}, Body: &body{ val: &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 35, Line: 1, Column: 36, }, End: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, }, OpenRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 35, Line: 1, Column: 36, }, End: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, }, CloseRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, End: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, }, }, }, DefRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 35, Line: 1, Column: 36, }, End: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, }, TypeRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, LabelRanges: []hcl.Range{ { Filename: "test.json", Start: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, End: hcl.Pos{ Byte: 27, Line: 1, Column: 28, }, }, { Filename: "test.json", Start: hcl.Pos{ Byte: 29, Line: 1, Column: 30, }, End: hcl.Pos{ Byte: 34, Line: 1, Column: 35, }, }, }, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 40, Byte: 39}, End: hcl.Pos{Line: 1, Column: 41, Byte: 40}, }, }, 0, }, { `{"resource":{"foo_instance":[{"bar":{}}, {"bar":{}}]}}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "resource", LabelNames: []string{"type", "name"}, }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, Blocks: hcl.Blocks{ { Type: "resource", Labels: []string{"foo_instance", "bar"}, Body: &body{ val: &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, End: hcl.Pos{ Byte: 38, Line: 1, Column: 39, }, }, OpenRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, End: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, }, CloseRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, End: hcl.Pos{ Byte: 38, Line: 1, Column: 39, }, }, }, }, DefRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, End: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, }, TypeRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, LabelRanges: []hcl.Range{ { Filename: "test.json", Start: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, End: hcl.Pos{ Byte: 27, Line: 1, Column: 28, }, }, { Filename: "test.json", Start: hcl.Pos{ Byte: 30, Line: 1, Column: 31, }, End: hcl.Pos{ Byte: 35, Line: 1, Column: 36, }, }, }, }, { Type: "resource", Labels: []string{"foo_instance", "bar"}, Body: &body{ val: &objectVal{ Attrs: []*objectAttr{}, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, End: hcl.Pos{ Byte: 38, Line: 1, Column: 39, }, }, OpenRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 36, Line: 1, Column: 37, }, End: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, }, CloseRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 37, Line: 1, Column: 38, }, End: hcl.Pos{ Byte: 38, Line: 1, Column: 39, }, }, }, }, DefRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 48, Line: 1, Column: 49, }, End: hcl.Pos{ Byte: 49, Line: 1, Column: 50, }, }, TypeRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 1, Line: 1, Column: 2, }, End: hcl.Pos{ Byte: 11, Line: 1, Column: 12, }, }, LabelRanges: []hcl.Range{ { Filename: "test.json", Start: hcl.Pos{ Byte: 13, Line: 1, Column: 14, }, End: hcl.Pos{ Byte: 27, Line: 1, Column: 28, }, }, { Filename: "test.json", Start: hcl.Pos{ Byte: 42, Line: 1, Column: 43, }, End: hcl.Pos{ Byte: 47, Line: 1, Column: 48, }, }, }, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 54, Byte: 53}, End: hcl.Pos{Line: 1, Column: 55, Byte: 54}, }, }, 0, }, { `{"name":"Ermintrude"}`, &hcl.BodySchema{ Blocks: []hcl.BlockHeaderSchema{ { Type: "name", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{}, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 21, Byte: 20}, End: hcl.Pos{Line: 1, Column: 22, Byte: 21}, }, }, 1, // name is supposed to be a block }, { `[{"name":"Ermintrude"},{"name":"Ermintrude"}]`, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "name", }, }, }, &hcl.BodyContent{ Attributes: map[string]*hcl.Attribute{ "name": { Name: "name", Expr: &expression{ src: &stringVal{ Value: "Ermintrude", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 8, Line: 1, Column: 9, }, End: hcl.Pos{ Byte: 20, Line: 1, Column: 21, }, }, }, }, Range: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 21, Line: 1, Column: 22, }, }, NameRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{ Byte: 2, Line: 1, Column: 3, }, End: hcl.Pos{ Byte: 8, Line: 1, Column: 9, }, }, }, }, MissingItemRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, End: hcl.Pos{Line: 1, Column: 2, Byte: 1}, }, }, 1, // "name" attribute is defined twice }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d-%s", i, test.src), func(t *testing.T) { file, diags := Parse([]byte(test.src), "test.json") if len(diags) != 0 { t.Fatalf("Parse produced diagnostics: %s", diags) } got, _, diags := file.Body.PartialContent(test.schema) if len(diags) != test.diagCount { t.Errorf("Wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag) } } for _, problem := range deep.Equal(got, test.want) { t.Error(problem) } }) } } func TestBodyContent(t *testing.T) { // We test most of the functionality already in TestBodyPartialContent, so // this test focuses on the handling of extraneous attributes. tests := []struct { src string schema *hcl.BodySchema diagCount int }{ { `{"unknown": true}`, &hcl.BodySchema{}, 1, }, { `{"//": "comment that should be ignored"}`, &hcl.BodySchema{}, 0, }, { `{"unknow": true}`, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "unknown", }, }, }, 1, }, { `{"unknow": true, "unnown": true}`, &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ { Name: "unknown", }, }, }, 2, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d-%s", i, test.src), func(t *testing.T) { file, diags := Parse([]byte(test.src), "test.json") if len(diags) != 0 { t.Fatalf("Parse produced diagnostics: %s", diags) } _, diags = file.Body.Content(test.schema) if len(diags) != test.diagCount { t.Errorf("Wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag) } } }) } } func TestJustAttributes(t *testing.T) { // We test most of the functionality already in TestBodyPartialContent, so // this test focuses on the handling of extraneous attributes. tests := []struct { src string want hcl.Attributes diagCount int }{ { `{}`, map[string]*hcl.Attribute{}, 0, }, { `{"foo": true}`, map[string]*hcl.Attribute{ "foo": { Name: "foo", Expr: &expression{ src: &booleanVal{ Value: true, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 12, Line: 1, Column: 13}, }, }, }, Range: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 12, Line: 1, Column: 13}, }, NameRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, }, 0, }, { `{"//": "comment that should be ignored"}`, map[string]*hcl.Attribute{}, 0, }, { `{"foo": true, "foo": true}`, map[string]*hcl.Attribute{ "foo": { Name: "foo", Expr: &expression{ src: &booleanVal{ Value: true, SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Byte: 8, Line: 1, Column: 9}, End: hcl.Pos{Byte: 12, Line: 1, Column: 13}, }, }, }, Range: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 12, Line: 1, Column: 13}, }, NameRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Byte: 1, Line: 1, Column: 2}, End: hcl.Pos{Byte: 6, Line: 1, Column: 7}, }, }, }, 1, // attribute foo was already defined }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d-%s", i, test.src), func(t *testing.T) { file, diags := Parse([]byte(test.src), "test.json") if len(diags) != 0 { t.Fatalf("Parse produced diagnostics: %s", diags) } got, diags := file.Body.JustAttributes() if len(diags) != test.diagCount { t.Errorf("Wrong number of diagnostics %d; want %d", len(diags), test.diagCount) for _, diag := range diags { t.Logf(" - %s", diag) } } if !reflect.DeepEqual(got, test.want) { t.Errorf("wrong result\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(test.want)) } }) } } func TestExpressionVariables(t *testing.T) { tests := []struct { Src string Want []hcl.Traversal }{ { `{"a":true}`, nil, }, { `{"a":"${foo}"}`, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, End: hcl.Pos{Line: 1, Column: 12, Byte: 11}, }, }, }, }, }, { `{"a":["${foo}"]}`, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 10, Byte: 9}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, }, }, { `{"a":{"b":"${foo}"}}`, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 14, Byte: 13}, End: hcl.Pos{Line: 1, Column: 17, Byte: 16}, }, }, }, }, }, { `{"a":{"${foo}":"b"}}`, []hcl.Traversal{ { hcl.TraverseRoot{ Name: "foo", SrcRange: hcl.Range{ Filename: "test.json", Start: hcl.Pos{Line: 1, Column: 10, Byte: 9}, End: hcl.Pos{Line: 1, Column: 13, Byte: 12}, }, }, }, }, }, } for _, test := range tests { t.Run(test.Src, func(t *testing.T) { file, diags := Parse([]byte(test.Src), "test.json") if len(diags) != 0 { t.Fatalf("Parse produced diagnostics: %s", diags) } attrs, diags := file.Body.JustAttributes() if len(diags) != 0 { t.Fatalf("JustAttributes produced diagnostics: %s", diags) } got := attrs["a"].Expr.Variables() if !reflect.DeepEqual(got, test.Want) { t.Errorf("wrong result\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(test.Want)) } }) } } func TestExpressionAsTraversal(t *testing.T) { e := &expression{ src: &stringVal{ Value: "foo.bar[0]", }, } traversal := e.AsTraversal() if len(traversal) != 3 { t.Fatalf("incorrect traversal %#v; want length 3", traversal) } } func TestStaticExpressionList(t *testing.T) { e := &expression{ src: &arrayVal{ Values: []node{ &stringVal{ Value: "hello", }, }, }, } exprs := e.ExprList() if len(exprs) != 1 { t.Fatalf("incorrect exprs %#v; want length 1", exprs) } if exprs[0].(*expression).src != e.src.(*arrayVal).Values[0] { t.Fatalf("wrong first expression node") } } func TestExpression_Value(t *testing.T) { src := `{ "string": "string_val", "number": 5, "bool_true": true, "bool_false": false, "array": ["a"], "object": {"key": "value"}, "null": null }` expected := map[string]cty.Value{ "string": cty.StringVal("string_val"), "number": cty.NumberIntVal(5), "bool_true": cty.BoolVal(true), "bool_false": cty.BoolVal(false), "array": cty.TupleVal([]cty.Value{cty.StringVal("a")}), "object": cty.ObjectVal(map[string]cty.Value{ "key": cty.StringVal("value"), }), "null": cty.NullVal(cty.DynamicPseudoType), } file, diags := Parse([]byte(src), "") if len(diags) != 0 { t.Errorf("got %d diagnostics on parse; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Fatalf("got nil Body; want actual body") } attrs, diags := file.Body.JustAttributes() if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } for ek, ev := range expected { val, diags := attrs[ek].Expr.Value(&hcl.EvalContext{}) if len(diags) != 0 { t.Errorf("got %d diagnostics on eval; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } } if !val.RawEquals(ev) { t.Errorf("wrong result %#v; want %#v", val, ev) } } } // TestExpressionValue_Diags asserts that Value() returns diagnostics // from nested evaluations for complex objects (e.g. ObjectVal, ArrayVal) func TestExpressionValue_Diags(t *testing.T) { cases := []struct { name string src string expected cty.Value error string }{ { name: "string: happy", src: `{"v": "happy ${VAR1}"}`, expected: cty.StringVal("happy case"), }, { name: "string: unhappy", src: `{"v": "happy ${UNKNOWN}"}`, expected: cty.UnknownVal(cty.String), error: "Unknown variable", }, { name: "object_val: happy", src: `{"v": {"key": "happy ${VAR1}"}}`, expected: cty.ObjectVal(map[string]cty.Value{ "key": cty.StringVal("happy case"), }), }, { name: "object_val: unhappy", src: `{"v": {"key": "happy ${UNKNOWN}"}}`, expected: cty.ObjectVal(map[string]cty.Value{ "key": cty.UnknownVal(cty.String), }), error: "Unknown variable", }, { name: "object_key: happy", src: `{"v": {"happy ${VAR1}": "val"}}`, expected: cty.ObjectVal(map[string]cty.Value{ "happy case": cty.StringVal("val"), }), }, { name: "object_key: unhappy", src: `{"v": {"happy ${UNKNOWN}": "val"}}`, expected: cty.DynamicVal, error: "Unknown variable", }, { name: "array: happy", src: `{"v": ["happy ${VAR1}"]}`, expected: cty.TupleVal([]cty.Value{cty.StringVal("happy case")}), }, { name: "array: unhappy", src: `{"v": ["happy ${UNKNOWN}"]}`, expected: cty.TupleVal([]cty.Value{cty.UnknownVal(cty.String)}), error: "Unknown variable", }, } ctx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "VAR1": cty.StringVal("case"), }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { file, diags := Parse([]byte(c.src), "") if len(diags) != 0 { t.Errorf("got %d diagnostics on parse; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } t.FailNow() } if file == nil { t.Errorf("got nil File; want actual file") } if file.Body == nil { t.Fatalf("got nil Body; want actual body") } attrs, diags := file.Body.JustAttributes() if len(diags) != 0 { t.Errorf("got %d diagnostics on decode; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } t.FailNow() } val, diags := attrs["v"].Expr.Value(ctx) if c.error == "" && len(diags) != 0 { t.Errorf("got %d diagnostics on eval; want 0", len(diags)) for _, diag := range diags { t.Logf("- %s", diag.Error()) } t.FailNow() } else if c.error != "" && len(diags) == 0 { t.Fatalf("got 0 diagnostics on eval, want 1 with %s", c.error) } else if c.error != "" && len(diags) != 0 { if !strings.Contains(diags[0].Error(), c.error) { t.Fatalf("found error: %s; want %s", diags[0].Error(), c.error) } } if !val.RawEquals(c.expected) { t.Errorf("wrong result %#v; want %#v", val, c.expected) } }) } } hcl-2.14.1/json/tokentype_string.go000066400000000000000000000015101431334125700172550ustar00rootroot00000000000000// Code generated by "stringer -type tokenType scanner.go"; DO NOT EDIT. package json import "strconv" const _tokenType_name = "tokenInvalidtokenCommatokenColontokenEqualstokenKeywordtokenNumbertokenStringtokenBrackOtokenBrackCtokenBraceOtokenBraceCtokenEOF" var _tokenType_map = map[tokenType]string{ 0: _tokenType_name[0:12], 44: _tokenType_name[12:22], 58: _tokenType_name[22:32], 61: _tokenType_name[32:43], 75: _tokenType_name[43:55], 78: _tokenType_name[55:66], 83: _tokenType_name[66:77], 91: _tokenType_name[77:88], 93: _tokenType_name[88:99], 123: _tokenType_name[99:110], 125: _tokenType_name[110:121], 9220: _tokenType_name[121:129], } func (i tokenType) String() string { if str, ok := _tokenType_map[i]; ok { return str } return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")" } hcl-2.14.1/merged.go000066400000000000000000000144541431334125700141520ustar00rootroot00000000000000package hcl import ( "fmt" ) // MergeFiles combines the given files to produce a single body that contains // configuration from all of the given files. // // The ordering of the given files decides the order in which contained // elements will be returned. If any top-level attributes are defined with // the same name across multiple files, a diagnostic will be produced from // the Content and PartialContent methods describing this error in a // user-friendly way. func MergeFiles(files []*File) Body { var bodies []Body for _, file := range files { bodies = append(bodies, file.Body) } return MergeBodies(bodies) } // MergeBodies is like MergeFiles except it deals directly with bodies, rather // than with entire files. func MergeBodies(bodies []Body) Body { if len(bodies) == 0 { // Swap out for our singleton empty body, to reduce the number of // empty slices we have hanging around. return emptyBody } // If any of the given bodies are already merged bodies, we'll unpack // to flatten to a single mergedBodies, since that's conceptually simpler. // This also, as a side-effect, eliminates any empty bodies, since // empties are merged bodies with no inner bodies. var newLen int var flatten bool for _, body := range bodies { if children, merged := body.(mergedBodies); merged { newLen += len(children) flatten = true } else { newLen++ } } if !flatten { // not just newLen == len, because we might have mergedBodies with single bodies inside return mergedBodies(bodies) } if newLen == 0 { // Don't allocate a new empty when we already have one return emptyBody } new := make([]Body, 0, newLen) for _, body := range bodies { if children, merged := body.(mergedBodies); merged { new = append(new, children...) } else { new = append(new, body) } } return mergedBodies(new) } var emptyBody = mergedBodies([]Body{}) // EmptyBody returns a body with no content. This body can be used as a // placeholder when a body is required but no body content is available. func EmptyBody() Body { return emptyBody } type mergedBodies []Body // Content returns the content produced by applying the given schema to all // of the merged bodies and merging the result. // // Although required attributes _are_ supported, they should be used sparingly // with merged bodies since in this case there is no contextual information // with which to return good diagnostics. Applications working with merged // bodies may wish to mark all attributes as optional and then check for // required attributes afterwards, to produce better diagnostics. func (mb mergedBodies) Content(schema *BodySchema) (*BodyContent, Diagnostics) { // the returned body will always be empty in this case, because mergedContent // will only ever call Content on the child bodies. content, _, diags := mb.mergedContent(schema, false) return content, diags } func (mb mergedBodies) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) { return mb.mergedContent(schema, true) } func (mb mergedBodies) JustAttributes() (Attributes, Diagnostics) { attrs := make(map[string]*Attribute) var diags Diagnostics for _, body := range mb { thisAttrs, thisDiags := body.JustAttributes() if len(thisDiags) != 0 { diags = append(diags, thisDiags...) } if thisAttrs != nil { for name, attr := range thisAttrs { if existing := attrs[name]; existing != nil { diags = diags.Append(&Diagnostic{ Severity: DiagError, Summary: "Duplicate argument", Detail: fmt.Sprintf( "Argument %q was already set at %s", name, existing.NameRange.String(), ), Subject: &attr.NameRange, }) continue } attrs[name] = attr } } } return attrs, diags } func (mb mergedBodies) MissingItemRange() Range { if len(mb) == 0 { // Nothing useful to return here, so we'll return some garbage. return Range{ Filename: "", } } // arbitrarily use the first body's missing item range return mb[0].MissingItemRange() } func (mb mergedBodies) mergedContent(schema *BodySchema, partial bool) (*BodyContent, Body, Diagnostics) { // We need to produce a new schema with none of the attributes marked as // required, since _any one_ of our bodies can contribute an attribute value. // We'll separately check that all required attributes are present at // the end. mergedSchema := &BodySchema{ Blocks: schema.Blocks, } for _, attrS := range schema.Attributes { mergedAttrS := attrS mergedAttrS.Required = false mergedSchema.Attributes = append(mergedSchema.Attributes, mergedAttrS) } var mergedLeftovers []Body content := &BodyContent{ Attributes: map[string]*Attribute{}, } var diags Diagnostics for _, body := range mb { var thisContent *BodyContent var thisLeftovers Body var thisDiags Diagnostics if partial { thisContent, thisLeftovers, thisDiags = body.PartialContent(mergedSchema) } else { thisContent, thisDiags = body.Content(mergedSchema) } if thisLeftovers != nil { mergedLeftovers = append(mergedLeftovers, thisLeftovers) } if len(thisDiags) != 0 { diags = append(diags, thisDiags...) } if thisContent.Attributes != nil { for name, attr := range thisContent.Attributes { if existing := content.Attributes[name]; existing != nil { diags = diags.Append(&Diagnostic{ Severity: DiagError, Summary: "Duplicate argument", Detail: fmt.Sprintf( "Argument %q was already set at %s", name, existing.NameRange.String(), ), Subject: &attr.NameRange, }) continue } content.Attributes[name] = attr } } if len(thisContent.Blocks) != 0 { content.Blocks = append(content.Blocks, thisContent.Blocks...) } } // Finally, we check for required attributes. for _, attrS := range schema.Attributes { if !attrS.Required { continue } if content.Attributes[attrS.Name] == nil { // We don't have any context here to produce a good diagnostic, // which is why we warn in the Content docstring to minimize the // use of required attributes on merged bodies. diags = diags.Append(&Diagnostic{ Severity: DiagError, Summary: "Missing required argument", Detail: fmt.Sprintf( "The argument %q is required, but was not set.", attrS.Name, ), }) } } leftoverBody := MergeBodies(mergedLeftovers) return content, leftoverBody, diags } hcl-2.14.1/merged_test.go000066400000000000000000000301051431334125700152000ustar00rootroot00000000000000package hcl import ( "fmt" "reflect" "testing" "github.com/davecgh/go-spew/spew" ) func TestMergedBodiesContent(t *testing.T) { tests := []struct { Bodies []Body Schema *BodySchema Want *BodyContent DiagCount int }{ { []Body{}, &BodySchema{}, &BodyContent{ Attributes: map[string]*Attribute{}, }, 0, }, { []Body{}, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, }, 0, }, { []Body{}, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", Required: true, }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, }, 1, }, { []Body{ &testMergedBodiesVictim{ HasAttributes: []string{"name"}, }, }, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{ "name": &Attribute{ Name: "name", }, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"name"}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{"name"}, }, }, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{ "name": &Attribute{ Name: "name", NameRange: Range{Filename: "first"}, }, }, }, 1, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"name"}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{"age"}, }, }, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, { Name: "age", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{ "name": &Attribute{ Name: "name", NameRange: Range{Filename: "first"}, }, "age": &Attribute{ Name: "age", NameRange: Range{Filename: "second"}, }, }, }, 0, }, { []Body{}, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, }, 0, }, { []Body{ &testMergedBodiesVictim{ HasBlocks: map[string]int{ "pizza": 1, }, }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, Blocks: Blocks{ { Type: "pizza", }, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ HasBlocks: map[string]int{ "pizza": 2, }, }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, Blocks: Blocks{ { Type: "pizza", }, { Type: "pizza", }, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasBlocks: map[string]int{ "pizza": 1, }, }, &testMergedBodiesVictim{ Name: "second", HasBlocks: map[string]int{ "pizza": 1, }, }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, Blocks: Blocks{ { Type: "pizza", DefRange: Range{Filename: "first"}, }, { Type: "pizza", DefRange: Range{Filename: "second"}, }, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", }, &testMergedBodiesVictim{ Name: "second", HasBlocks: map[string]int{ "pizza": 2, }, }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, Blocks: Blocks{ { Type: "pizza", DefRange: Range{Filename: "second"}, }, { Type: "pizza", DefRange: Range{Filename: "second"}, }, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasBlocks: map[string]int{ "pizza": 2, }, }, &testMergedBodiesVictim{ Name: "second", }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, Blocks: Blocks{ { Type: "pizza", DefRange: Range{Filename: "first"}, }, { Type: "pizza", DefRange: Range{Filename: "first"}, }, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", }, &testMergedBodiesVictim{ Name: "second", }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, }, 0, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { merged := MergeBodies(test.Bodies) got, diags := merged.Content(test.Schema) if len(diags) != test.DiagCount { t.Errorf("Wrong number of diagnostics %d; want %d", len(diags), test.DiagCount) for _, diag := range diags { t.Logf(" - %s", diag) } } if !reflect.DeepEqual(got, test.Want) { t.Errorf("wrong result\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(test.Want)) } }) } } func TestMergeBodiesPartialContent(t *testing.T) { tests := []struct { Bodies []Body Schema *BodySchema WantContent *BodyContent WantRemain Body DiagCount int }{ { []Body{}, &BodySchema{}, &BodyContent{ Attributes: map[string]*Attribute{}, }, mergedBodies{}, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"name", "age"}, }, }, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{ "name": &Attribute{ Name: "name", NameRange: Range{Filename: "first"}, }, }, }, mergedBodies{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"age"}, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"name", "age"}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{"name", "pizza"}, }, }, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{ "name": &Attribute{ Name: "name", NameRange: Range{Filename: "first"}, }, }, }, mergedBodies{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"age"}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{"pizza"}, }, }, 1, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"name", "age"}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{"pizza", "soda"}, }, }, &BodySchema{ Attributes: []AttributeSchema{ { Name: "name", }, { Name: "soda", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{ "name": &Attribute{ Name: "name", NameRange: Range{Filename: "first"}, }, "soda": &Attribute{ Name: "soda", NameRange: Range{Filename: "second"}, }, }, }, mergedBodies{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{"age"}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{"pizza"}, }, }, 0, }, { []Body{ &testMergedBodiesVictim{ Name: "first", HasBlocks: map[string]int{ "pizza": 1, }, }, &testMergedBodiesVictim{ Name: "second", HasBlocks: map[string]int{ "pizza": 1, "soda": 2, }, }, }, &BodySchema{ Blocks: []BlockHeaderSchema{ { Type: "pizza", }, }, }, &BodyContent{ Attributes: map[string]*Attribute{}, Blocks: Blocks{ { Type: "pizza", DefRange: Range{Filename: "first"}, }, { Type: "pizza", DefRange: Range{Filename: "second"}, }, }, }, mergedBodies{ &testMergedBodiesVictim{ Name: "first", HasAttributes: []string{}, HasBlocks: map[string]int{}, }, &testMergedBodiesVictim{ Name: "second", HasAttributes: []string{}, HasBlocks: map[string]int{ "soda": 2, }, }, }, 0, }, } for i, test := range tests { t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) { merged := MergeBodies(test.Bodies) got, gotRemain, diags := merged.PartialContent(test.Schema) if len(diags) != test.DiagCount { t.Errorf("Wrong number of diagnostics %d; want %d", len(diags), test.DiagCount) for _, diag := range diags { t.Logf(" - %s", diag) } } if !reflect.DeepEqual(got, test.WantContent) { t.Errorf("wrong content result\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(test.WantContent)) } if !reflect.DeepEqual(gotRemain, test.WantRemain) { t.Errorf("wrong remaining result\ngot: %s\nwant: %s", spew.Sdump(gotRemain), spew.Sdump(test.WantRemain)) } }) } } type testMergedBodiesVictim struct { Name string HasAttributes []string HasBlocks map[string]int DiagCount int } func (v *testMergedBodiesVictim) Content(schema *BodySchema) (*BodyContent, Diagnostics) { c, _, d := v.PartialContent(schema) return c, d } func (v *testMergedBodiesVictim) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) { remain := &testMergedBodiesVictim{ Name: v.Name, HasAttributes: []string{}, } hasAttrs := map[string]struct{}{} for _, n := range v.HasAttributes { hasAttrs[n] = struct{}{} var found bool for _, attrS := range schema.Attributes { if n == attrS.Name { found = true break } } if !found { remain.HasAttributes = append(remain.HasAttributes, n) } } content := &BodyContent{ Attributes: map[string]*Attribute{}, } rng := Range{ Filename: v.Name, } for _, attrS := range schema.Attributes { _, has := hasAttrs[attrS.Name] if has { content.Attributes[attrS.Name] = &Attribute{ Name: attrS.Name, NameRange: rng, } } } if v.HasBlocks != nil { for _, blockS := range schema.Blocks { num := v.HasBlocks[blockS.Type] for i := 0; i < num; i++ { content.Blocks = append(content.Blocks, &Block{ Type: blockS.Type, DefRange: rng, }) } } remain.HasBlocks = map[string]int{} for n := range v.HasBlocks { var found bool for _, blockS := range schema.Blocks { if blockS.Type == n { found = true break } } if !found { remain.HasBlocks[n] = v.HasBlocks[n] } } } diags := make(Diagnostics, v.DiagCount) for i := range diags { diags[i] = &Diagnostic{ Severity: DiagError, Summary: fmt.Sprintf("Fake diagnostic %d", i), Detail: "For testing only.", Context: &rng, } } return content, remain, diags } func (v *testMergedBodiesVictim) JustAttributes() (Attributes, Diagnostics) { attrs := make(map[string]*Attribute) rng := Range{ Filename: v.Name, } for _, name := range v.HasAttributes { attrs[name] = &Attribute{ Name: name, NameRange: rng, } } diags := make(Diagnostics, v.DiagCount) for i := range diags { diags[i] = &Diagnostic{ Severity: DiagError, Summary: fmt.Sprintf("Fake diagnostic %d", i), Detail: "For testing only.", Context: &rng, } } return attrs, diags } func (v *testMergedBodiesVictim) MissingItemRange() Range { return Range{ Filename: v.Name, } } hcl-2.14.1/ops.go000066400000000000000000000332411431334125700135030ustar00rootroot00000000000000package hcl import ( "fmt" "math/big" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" ) // Index is a helper function that performs the same operation as the index // operator in the HCL expression language. That is, the result is the // same as it would be for collection[key] in a configuration expression. // // This is exported so that applications can perform indexing in a manner // consistent with how the language does it, including handling of null and // unknown values, etc. // // Diagnostics are produced if the given combination of values is not valid. // Therefore a pointer to a source range must be provided to use in diagnostics, // though nil can be provided if the calling application is going to // ignore the subject of the returned diagnostics anyway. func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics) { const invalidIndex = "Invalid index" if collection.IsNull() { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: "Attempt to index null value", Detail: "This value is null, so it does not have any indices.", Subject: srcRange, }, } } if key.IsNull() { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "Can't use a null value as an indexing key.", Subject: srcRange, }, } } ty := collection.Type() kty := key.Type() if kty == cty.DynamicPseudoType || ty == cty.DynamicPseudoType { return cty.DynamicVal, nil } switch { case ty.IsListType() || ty.IsTupleType() || ty.IsMapType(): var wantType cty.Type switch { case ty.IsListType() || ty.IsTupleType(): wantType = cty.Number case ty.IsMapType(): wantType = cty.String default: // should never happen panic("don't know what key type we want") } key, keyErr := convert.Convert(key, wantType) if keyErr != nil { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: fmt.Sprintf( "The given key does not identify an element in this collection value: %s.", keyErr.Error(), ), Subject: srcRange, }, } } // Here we drop marks from HasIndex result, in order to allow basic // traversal of a marked list, tuple, or map in the same way we can // traverse a marked object has, _ := collection.HasIndex(key).Unmark() if !has.IsKnown() { if ty.IsTupleType() { return cty.DynamicVal, nil } else { return cty.UnknownVal(ty.ElementType()), nil } } if has.False() { if (ty.IsListType() || ty.IsTupleType()) && key.Type().Equals(cty.Number) { if key.IsKnown() && !key.IsNull() { // NOTE: we don't know what any marks might've represented // up at the calling application layer, so we must avoid // showing the literal number value in these error messages // in case the mark represents something important, such as // a value being "sensitive". key, _ := key.Unmark() bf := key.AsBigFloat() if _, acc := bf.Int(nil); acc != big.Exact { // We have a more specialized error message for the // situation of using a fractional number to index into // a sequence, because that will tend to happen if the // user is trying to use division to calculate an index // and not realizing that HCL does float division // rather than integer division. return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "The given key does not identify an element in this collection value: indexing a sequence requires a whole number, but the given index has a fractional part.", Subject: srcRange, }, } } if bf.Sign() < 0 { // Some other languages allow negative indices to // select "backwards" from the end of the sequence, // but HCL doesn't do that in order to give better // feedback if a dynamic index is calculated // incorrectly. return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "The given key does not identify an element in this collection value: a negative number is not a valid index for a sequence.", Subject: srcRange, }, } } if lenVal := collection.Length(); lenVal.IsKnown() && !lenVal.IsMarked() { // Length always returns a number, and we already // checked that it's a known number, so this is safe. lenBF := lenVal.AsBigFloat() var result big.Float result.Sub(bf, lenBF) if result.Sign() < 1 { if lenBF.Sign() == 0 { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "The given key does not identify an element in this collection value: the collection has no elements.", Subject: srcRange, }, } } else { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "The given key does not identify an element in this collection value: the given index is greater than or equal to the length of the collection.", Subject: srcRange, }, } } } } } } // If this is not one of the special situations we handled above // then we'll fall back on a very generic message. return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "The given key does not identify an element in this collection value.", Subject: srcRange, }, } } return collection.Index(key), nil case ty.IsObjectType(): wasNumber := key.Type() == cty.Number key, keyErr := convert.Convert(key, cty.String) if keyErr != nil { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: fmt.Sprintf( "The given key does not identify an element in this collection value: %s.", keyErr.Error(), ), Subject: srcRange, }, } } if !collection.IsKnown() { return cty.DynamicVal, nil } if !key.IsKnown() { return cty.DynamicVal, nil } key, _ = key.Unmark() attrName := key.AsString() if !ty.HasAttribute(attrName) { var suggestion string if wasNumber { // We note this only as an addendum to an error we would've // already returned anyway, because it is valid (albeit weird) // to have an attribute whose name is just decimal digits // and then access that attribute using a number whose // decimal representation is the same digits. suggestion = " An object only supports looking up attributes by name, not by numeric index." } return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: fmt.Sprintf("The given key does not identify an element in this collection value.%s", suggestion), Subject: srcRange, }, } } return collection.GetAttr(attrName), nil case ty.IsSetType(): return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "Elements of a set are identified only by their value and don't have any separate index or key to select with, so it's only possible to perform operations across all elements of the set.", Subject: srcRange, }, } default: return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: invalidIndex, Detail: "This value does not have any indices.", Subject: srcRange, }, } } } // GetAttr is a helper function that performs the same operation as the // attribute access in the HCL expression language. That is, the result is the // same as it would be for obj.attr in a configuration expression. // // This is exported so that applications can access attributes in a manner // consistent with how the language does it, including handling of null and // unknown values, etc. // // Diagnostics are produced if the given combination of values is not valid. // Therefore a pointer to a source range must be provided to use in diagnostics, // though nil can be provided if the calling application is going to // ignore the subject of the returned diagnostics anyway. func GetAttr(obj cty.Value, attrName string, srcRange *Range) (cty.Value, Diagnostics) { if obj.IsNull() { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: "Attempt to get attribute from null value", Detail: "This value is null, so it does not have any attributes.", Subject: srcRange, }, } } const unsupportedAttr = "Unsupported attribute" ty := obj.Type() switch { case ty.IsObjectType(): if !ty.HasAttribute(attrName) { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: unsupportedAttr, Detail: fmt.Sprintf("This object does not have an attribute named %q.", attrName), Subject: srcRange, }, } } if !obj.IsKnown() { return cty.UnknownVal(ty.AttributeType(attrName)), nil } return obj.GetAttr(attrName), nil case ty.IsMapType(): if !obj.IsKnown() { return cty.UnknownVal(ty.ElementType()), nil } idx := cty.StringVal(attrName) // Here we drop marks from HasIndex result, in order to allow basic // traversal of a marked map in the same way we can traverse a marked // object hasIndex, _ := obj.HasIndex(idx).Unmark() if hasIndex.False() { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: "Missing map element", Detail: fmt.Sprintf("This map does not have an element with the key %q.", attrName), Subject: srcRange, }, } } return obj.Index(idx), nil case ty == cty.DynamicPseudoType: return cty.DynamicVal, nil case ty.IsListType() && ty.ElementType().IsObjectType(): // It seems a common mistake to try to access attributes on a whole // list of objects rather than on a specific individual element, so // we have some extra hints for that case. switch { case ty.ElementType().HasAttribute(attrName): // This is a very strong indication that the user mistook the list // of objects for a single object, so we can be a little more // direct in our suggestion here. return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: unsupportedAttr, Detail: fmt.Sprintf("Can't access attributes on a list of objects. Did you mean to access attribute %q for a specific element of the list, or across all elements of the list?", attrName), Subject: srcRange, }, } default: return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: unsupportedAttr, Detail: "Can't access attributes on a list of objects. Did you mean to access an attribute for a specific element of the list, or across all elements of the list?", Subject: srcRange, }, } } case ty.IsSetType() && ty.ElementType().IsObjectType(): // This is similar to the previous case, but we can't give such a // direct suggestion because there is no mechanism to select a single // item from a set. // We could potentially suggest using a for expression or splat // operator here, but we typically don't get into syntax specifics // in hcl.GetAttr suggestions because it's a general function used in // various other situations, such as in application-specific operations // that might have a more constraint set of alternative approaches. return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: unsupportedAttr, Detail: "Can't access attributes on a set of objects. Did you mean to access an attribute across all elements of the set?", Subject: srcRange, }, } case ty.IsPrimitiveType(): return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: unsupportedAttr, Detail: fmt.Sprintf("Can't access attributes on a primitive-typed value (%s).", ty.FriendlyName()), Subject: srcRange, }, } default: return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: unsupportedAttr, Detail: "This value does not have any attributes.", Subject: srcRange, }, } } } // ApplyPath is a helper function that applies a cty.Path to a value using the // indexing and attribute access operations from HCL. // // This is similar to calling the path's own Apply method, but ApplyPath uses // the more relaxed typing rules that apply to these operations in HCL, rather // than cty's relatively-strict rules. ApplyPath is implemented in terms of // Index and GetAttr, and so it has the same behavior for individual steps // but will stop and return any errors returned by intermediate steps. // // Diagnostics are produced if the given path cannot be applied to the given // value. Therefore a pointer to a source range must be provided to use in // diagnostics, though nil can be provided if the calling application is going // to ignore the subject of the returned diagnostics anyway. func ApplyPath(val cty.Value, path cty.Path, srcRange *Range) (cty.Value, Diagnostics) { var diags Diagnostics for _, step := range path { var stepDiags Diagnostics switch ts := step.(type) { case cty.IndexStep: val, stepDiags = Index(val, ts.Key, srcRange) case cty.GetAttrStep: val, stepDiags = GetAttr(val, ts.Name, srcRange) default: // Should never happen because the above are all of the step types. diags = diags.Append(&Diagnostic{ Severity: DiagError, Summary: "Invalid path step", Detail: fmt.Sprintf("Go type %T is not a valid path step. This is a bug in this program.", step), Subject: srcRange, }) return cty.DynamicVal, diags } diags = append(diags, stepDiags...) if stepDiags.HasErrors() { return cty.DynamicVal, diags } } return val, diags } hcl-2.14.1/ops_test.go000066400000000000000000000260351431334125700145450ustar00rootroot00000000000000package hcl import ( "fmt" "testing" "github.com/zclconf/go-cty/cty" ) func TestApplyPath(t *testing.T) { tests := []struct { Start cty.Value Path cty.Path Want cty.Value WantErr string }{ { cty.StringVal("hello"), nil, cty.StringVal("hello"), ``, }, { cty.StringVal("hello"), (cty.Path)(nil).Index(cty.StringVal("boop")), cty.NilVal, `Invalid index: This value does not have any indices.`, }, { cty.StringVal("hello"), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.NilVal, `Invalid index: This value does not have any indices.`, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.StringVal("hello"), ``, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }).Mark("x"), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.StringVal("hello").Mark("x"), ``, }, { cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.StringVal("hello"), ``, }, { cty.MapVal(map[string]cty.Value{ "a": cty.StringVal("foo").Mark("x"), "b": cty.StringVal("bar").Mark("x"), }).Mark("x"), cty.GetAttrPath("a"), cty.StringVal("foo").Mark("x"), ``, }, { cty.ListValEmpty(cty.String), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: the collection has no elements.`, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(1)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: the given index is greater than or equal to the length of the collection.`, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }).Mark("boop"), // prevents us from making statements about the length of the list (cty.Path)(nil).Index(cty.NumberIntVal(1)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value.`, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(-1)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: a negative number is not a valid index for a sequence.`, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberFloatVal(0.5)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: indexing a sequence requires a whole number, but the given index has a fractional part.`, }, { cty.ListVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(0)).GetAttr("foo"), cty.NilVal, `Unsupported attribute: Can't access attributes on a primitive-typed value (string).`, }, { cty.ListVal([]cty.Value{ cty.EmptyObjectVal, }), (cty.Path)(nil).Index(cty.NumberIntVal(0)).GetAttr("foo"), cty.NilVal, `Unsupported attribute: This object does not have an attribute named "foo".`, }, { cty.ListVal([]cty.Value{ cty.EmptyObjectVal, }), (cty.Path)(nil).GetAttr("foo"), cty.NilVal, `Unsupported attribute: Can't access attributes on a list of objects. Did you mean to access an attribute for a specific element of the list, or across all elements of the list?`, }, { cty.ListVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "foo": cty.True, }), }), (cty.Path)(nil).GetAttr("foo"), cty.NilVal, `Unsupported attribute: Can't access attributes on a list of objects. Did you mean to access attribute "foo" for a specific element of the list, or across all elements of the list?`, }, { cty.EmptyTupleVal, (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: the collection has no elements.`, }, { cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(1)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: the given index is greater than or equal to the length of the collection.`, }, { cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }).Mark("boop"), (cty.Path)(nil).Index(cty.NumberIntVal(1)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value.`, }, { cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(-1)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: a negative number is not a valid index for a sequence.`, }, { cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberFloatVal(0.5)), cty.NilVal, `Invalid index: The given key does not identify an element in this collection value: indexing a sequence requires a whole number, but the given index has a fractional part.`, }, { cty.TupleVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(0)).GetAttr("foo"), cty.NilVal, `Unsupported attribute: Can't access attributes on a primitive-typed value (string).`, }, { cty.TupleVal([]cty.Value{ cty.EmptyObjectVal, }), (cty.Path)(nil).Index(cty.NumberIntVal(0)).GetAttr("foo"), cty.NilVal, `Unsupported attribute: This object does not have an attribute named "foo".`, }, { cty.TupleVal([]cty.Value{ cty.EmptyObjectVal, }), (cty.Path)(nil).GetAttr("foo"), cty.NilVal, `Unsupported attribute: This value does not have any attributes.`, }, { cty.TupleVal([]cty.Value{ cty.ObjectVal(map[string]cty.Value{ "foo": cty.True, }), }), (cty.Path)(nil).GetAttr("foo"), cty.NilVal, `Unsupported attribute: This value does not have any attributes.`, }, { cty.SetVal([]cty.Value{ cty.StringVal("hello"), }), (cty.Path)(nil).Index(cty.NumberIntVal(1)), cty.NilVal, `Invalid index: Elements of a set are identified only by their value and don't have any separate index or key to select with, so it's only possible to perform operations across all elements of the set.`, }, { cty.SetVal([]cty.Value{ cty.EmptyObjectVal, }), (cty.Path)(nil).GetAttr("foo"), cty.NilVal, `Unsupported attribute: Can't access attributes on a set of objects. Did you mean to access an attribute across all elements of the set?`, }, { cty.NullVal(cty.List(cty.String)), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.NilVal, `Attempt to index null value: This value is null, so it does not have any indices.`, }, { cty.NullVal(cty.Map(cty.String)), (cty.Path)(nil).Index(cty.NumberIntVal(0)), cty.NilVal, `Attempt to index null value: This value is null, so it does not have any indices.`, }, { cty.NullVal(cty.EmptyObject), (cty.Path)(nil).GetAttr("foo"), cty.NilVal, `Attempt to get attribute from null value: This value is null, so it does not have any attributes.`, }, } for _, test := range tests { t.Run(fmt.Sprintf("%#v %#v", test.Start, test.Path), func(t *testing.T) { got, diags := ApplyPath(test.Start, test.Path, nil) t.Logf("testing ApplyPath\nstart: %#v\npath: %#v", test.Start, test.Path) for _, diag := range diags { t.Logf(diag.Error()) } if test.WantErr != "" { if !diags.HasErrors() { t.Fatalf("succeeded, but want error\nwant error: %s", test.WantErr) } if len(diags) != 1 { t.Fatalf("wrong number of diagnostics %d; want 1", len(diags)) } if gotErrStr := diags[0].Summary + ": " + diags[0].Detail; gotErrStr != test.WantErr { t.Fatalf("wrong error\ngot error: %s\nwant error: %s", gotErrStr, test.WantErr) } return } if diags.HasErrors() { t.Fatalf("failed, but want success\ngot diagnostics:\n%s", diags.Error()) } if !test.Want.RawEquals(got) { t.Fatalf("wrong result\ngot: %#v\nwant: %#v", got, test.Want) } }) } } func TestIndex(t *testing.T) { tests := map[string]struct { coll cty.Value key cty.Value want cty.Value err string }{ "marked key to maked value": { coll: cty.ListVal([]cty.Value{ cty.StringVal("a"), }), key: cty.NumberIntVal(0).Mark("marked"), want: cty.StringVal("a").Mark("marked"), }, "missing list key": { coll: cty.ListVal([]cty.Value{ cty.StringVal("a"), }), key: cty.NumberIntVal(1).Mark("marked"), want: cty.DynamicVal, err: "Invalid index", }, "null marked key": { coll: cty.ListVal([]cty.Value{ cty.StringVal("a"), }), key: cty.NullVal(cty.Number).Mark("marked"), want: cty.DynamicVal, err: "Invalid index", }, "dynamic key": { coll: cty.ListVal([]cty.Value{ cty.StringVal("a"), }), key: cty.DynamicVal, want: cty.DynamicVal, }, "invalid marked key type": { coll: cty.ListVal([]cty.Value{ cty.StringVal("a"), }), key: cty.StringVal("foo").Mark("marked"), want: cty.DynamicVal, err: "Invalid index", }, "marked map key": { coll: cty.MapVal(map[string]cty.Value{ "foo": cty.StringVal("a"), }), key: cty.StringVal("foo").Mark("marked"), want: cty.StringVal("a").Mark("marked"), }, "missing marked map key": { coll: cty.MapVal(map[string]cty.Value{ "foo": cty.StringVal("a"), }), key: cty.StringVal("bar").Mark("mark"), want: cty.DynamicVal, err: "Invalid index", }, "marked object key": { coll: cty.ObjectVal(map[string]cty.Value{ "foo": cty.StringVal("a"), }), key: cty.StringVal("foo").Mark("marked"), // an object attribute is fetched by string index, and the marks // are not maintained want: cty.StringVal("a"), }, "invalid marked object key type": { coll: cty.ObjectVal(map[string]cty.Value{ "foo": cty.StringVal("a"), }), key: cty.ListVal([]cty.Value{cty.NullVal(cty.String)}).Mark("marked"), want: cty.DynamicVal, err: "Invalid index", }, "invalid marked object key": { coll: cty.ObjectVal(map[string]cty.Value{ "foo": cty.StringVal("a"), }), key: cty.NumberIntVal(0).Mark("marked"), want: cty.DynamicVal, err: "Invalid index", }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { t.Logf("testing Index\ncollection: %#v\nkey: %#v", tc.coll, tc.key) got, diags := Index(tc.coll, tc.key, nil) for _, diag := range diags { t.Logf(diag.Error()) } if tc.err != "" { if !diags.HasErrors() { t.Fatalf("succeeded, but want error\nwant error: %s", tc.err) } if len(diags) != 1 { t.Fatalf("wrong number of diagnostics %d; want 1", len(diags)) } if gotErrStr := diags[0].Summary; gotErrStr != tc.err { t.Fatalf("wrong error\ngot error: %s\nwant error: %s", gotErrStr, tc.err) } return } if diags.HasErrors() { t.Fatalf("failed, but want success\ngot diagnostics:\n%s", diags.Error()) } if !tc.want.RawEquals(got) { t.Fatalf("wrong result\ngot: %#v\nwant: %#v", got, tc.want) } }) } } hcl-2.14.1/pos.go000066400000000000000000000176101431334125700135050ustar00rootroot00000000000000package hcl import "fmt" // Pos represents a single position in a source file, by addressing the // start byte of a unicode character encoded in UTF-8. // // Pos is generally used only in the context of a Range, which then defines // which source file the position is within. type Pos struct { // Line is the source code line where this position points. Lines are // counted starting at 1 and incremented for each newline character // encountered. Line int // Column is the source code column where this position points, in // unicode characters, with counting starting at 1. // // Column counts characters as they appear visually, so for example a // latin letter with a combining diacritic mark counts as one character. // This is intended for rendering visual markers against source code in // contexts where these diacritics would be rendered in a single character // cell. Technically speaking, Column is counting grapheme clusters as // used in unicode normalization. Column int // Byte is the byte offset into the file where the indicated character // begins. This is a zero-based offset to the first byte of the first // UTF-8 codepoint sequence in the character, and thus gives a position // that can be resolved _without_ awareness of Unicode characters. Byte int } // InitialPos is a suitable position to use to mark the start of a file. var InitialPos = Pos{Byte: 0, Line: 1, Column: 1} // Range represents a span of characters between two positions in a source // file. // // This struct is usually used by value in types that represent AST nodes, // but by pointer in types that refer to the positions of other objects, // such as in diagnostics. type Range struct { // Filename is the name of the file into which this range's positions // point. Filename string // Start and End represent the bounds of this range. Start is inclusive // and End is exclusive. Start, End Pos } // RangeBetween returns a new range that spans from the beginning of the // start range to the end of the end range. // // The result is meaningless if the two ranges do not belong to the same // source file or if the end range appears before the start range. func RangeBetween(start, end Range) Range { return Range{ Filename: start.Filename, Start: start.Start, End: end.End, } } // RangeOver returns a new range that covers both of the given ranges and // possibly additional content between them if the two ranges do not overlap. // // If either range is empty then it is ignored. The result is empty if both // given ranges are empty. // // The result is meaningless if the two ranges to not belong to the same // source file. func RangeOver(a, b Range) Range { if a.Empty() { return b } if b.Empty() { return a } var start, end Pos if a.Start.Byte < b.Start.Byte { start = a.Start } else { start = b.Start } if a.End.Byte > b.End.Byte { end = a.End } else { end = b.End } return Range{ Filename: a.Filename, Start: start, End: end, } } // ContainsPos returns true if and only if the given position is contained within // the receiving range. // // In the unlikely case that the line/column information disagree with the byte // offset information in the given position or receiving range, the byte // offsets are given priority. func (r Range) ContainsPos(pos Pos) bool { return r.ContainsOffset(pos.Byte) } // ContainsOffset returns true if and only if the given byte offset is within // the receiving Range. func (r Range) ContainsOffset(offset int) bool { return offset >= r.Start.Byte && offset < r.End.Byte } // Ptr returns a pointer to a copy of the receiver. This is a convenience when // ranges in places where pointers are required, such as in Diagnostic, but // the range in question is returned from a method. Go would otherwise not // allow one to take the address of a function call. func (r Range) Ptr() *Range { return &r } // String returns a compact string representation of the receiver. // Callers should generally prefer to present a range more visually, // e.g. via markers directly on the relevant portion of source code. func (r Range) String() string { if r.Start.Line == r.End.Line { return fmt.Sprintf( "%s:%d,%d-%d", r.Filename, r.Start.Line, r.Start.Column, r.End.Column, ) } else { return fmt.Sprintf( "%s:%d,%d-%d,%d", r.Filename, r.Start.Line, r.Start.Column, r.End.Line, r.End.Column, ) } } func (r Range) Empty() bool { return r.Start.Byte == r.End.Byte } // CanSliceBytes returns true if SliceBytes could return an accurate // sub-slice of the given slice. // // This effectively tests whether the start and end offsets of the range // are within the bounds of the slice, and thus whether SliceBytes can be // trusted to produce an accurate start and end position within that slice. func (r Range) CanSliceBytes(b []byte) bool { switch { case r.Start.Byte < 0 || r.Start.Byte > len(b): return false case r.End.Byte < 0 || r.End.Byte > len(b): return false case r.End.Byte < r.Start.Byte: return false default: return true } } // SliceBytes returns a sub-slice of the given slice that is covered by the // receiving range, assuming that the given slice is the source code of the // file indicated by r.Filename. // // If the receiver refers to any byte offsets that are outside of the slice // then the result is constrained to the overlapping portion only, to avoid // a panic. Use CanSliceBytes to determine if the result is guaranteed to // be an accurate span of the requested range. func (r Range) SliceBytes(b []byte) []byte { start := r.Start.Byte end := r.End.Byte if start < 0 { start = 0 } else if start > len(b) { start = len(b) } if end < 0 { end = 0 } else if end > len(b) { end = len(b) } if end < start { end = start } return b[start:end] } // Overlaps returns true if the receiver and the other given range share any // characters in common. func (r Range) Overlaps(other Range) bool { switch { case r.Filename != other.Filename: // If the ranges are in different files then they can't possibly overlap return false case r.Empty() || other.Empty(): // Empty ranges can never overlap return false case r.ContainsOffset(other.Start.Byte) || r.ContainsOffset(other.End.Byte): return true case other.ContainsOffset(r.Start.Byte) || other.ContainsOffset(r.End.Byte): return true default: return false } } // Overlap finds a range that is either identical to or a sub-range of both // the receiver and the other given range. It returns an empty range // within the receiver if there is no overlap between the two ranges. // // A non-empty result is either identical to or a subset of the receiver. func (r Range) Overlap(other Range) Range { if !r.Overlaps(other) { // Start == End indicates an empty range return Range{ Filename: r.Filename, Start: r.Start, End: r.Start, } } var start, end Pos if r.Start.Byte > other.Start.Byte { start = r.Start } else { start = other.Start } if r.End.Byte < other.End.Byte { end = r.End } else { end = other.End } return Range{ Filename: r.Filename, Start: start, End: end, } } // PartitionAround finds the portion of the given range that overlaps with // the reciever and returns three ranges: the portion of the reciever that // precedes the overlap, the overlap itself, and then the portion of the // reciever that comes after the overlap. // // If the two ranges do not overlap then all three returned ranges are empty. // // If the given range aligns with or extends beyond either extent of the // reciever then the corresponding outer range will be empty. func (r Range) PartitionAround(other Range) (before, overlap, after Range) { overlap = r.Overlap(other) if overlap.Empty() { return overlap, overlap, overlap } before = Range{ Filename: r.Filename, Start: r.Start, End: overlap.Start, } after = Range{ Filename: r.Filename, Start: overlap.End, End: r.End, } return before, overlap, after } hcl-2.14.1/pos_scanner.go000066400000000000000000000112241431334125700152110ustar00rootroot00000000000000package hcl import ( "bufio" "bytes" "github.com/apparentlymart/go-textseg/v13/textseg" ) // RangeScanner is a helper that will scan over a buffer using a bufio.SplitFunc // and visit a source range for each token matched. // // For example, this can be used with bufio.ScanLines to find the source range // for each line in the file, skipping over the actual newline characters, which // may be useful when printing source code snippets as part of diagnostic // messages. // // The line and column information in the returned ranges is produced by // counting newline characters and grapheme clusters respectively, which // mimics the behavior we expect from a parser when producing ranges. type RangeScanner struct { filename string b []byte cb bufio.SplitFunc pos Pos // position of next byte to process in b cur Range // latest range tok []byte // slice of b that is covered by cur err error // error from last scan, if any } // NewRangeScanner creates a new RangeScanner for the given buffer, producing // ranges for the given filename. // // Since ranges have grapheme-cluster granularity rather than byte granularity, // the scanner will produce incorrect results if the given SplitFunc creates // tokens between grapheme cluster boundaries. In particular, it is incorrect // to use RangeScanner with bufio.ScanRunes because it will produce tokens // around individual UTF-8 sequences, which will split any multi-sequence // grapheme clusters. func NewRangeScanner(b []byte, filename string, cb bufio.SplitFunc) *RangeScanner { return NewRangeScannerFragment(b, filename, InitialPos, cb) } // NewRangeScannerFragment is like NewRangeScanner but the ranges it produces // will be offset by the given starting position, which is appropriate for // sub-slices of a file, whereas NewRangeScanner assumes it is scanning an // entire file. func NewRangeScannerFragment(b []byte, filename string, start Pos, cb bufio.SplitFunc) *RangeScanner { return &RangeScanner{ filename: filename, b: b, cb: cb, pos: start, } } func (sc *RangeScanner) Scan() bool { if sc.pos.Byte >= len(sc.b) || sc.err != nil { // All done return false } // Since we're operating on an in-memory buffer, we always pass the whole // remainder of the buffer to our SplitFunc and set isEOF to let it know // that it has the whole thing. advance, token, err := sc.cb(sc.b[sc.pos.Byte:], true) // Since we are setting isEOF to true this should never happen, but // if it does we will just abort and assume the SplitFunc is misbehaving. if advance == 0 && token == nil && err == nil { return false } if err != nil { sc.err = err sc.cur = Range{ Filename: sc.filename, Start: sc.pos, End: sc.pos, } sc.tok = nil return false } sc.tok = token start := sc.pos end := sc.pos new := sc.pos // adv is similar to token but it also includes any subsequent characters // we're being asked to skip over by the SplitFunc. // adv is a slice covering any additional bytes we are skipping over, based // on what the SplitFunc told us to do with advance. adv := sc.b[sc.pos.Byte : sc.pos.Byte+advance] // We now need to scan over our token to count the grapheme clusters // so we can correctly advance Column, and count the newlines so we // can correctly advance Line. advR := bytes.NewReader(adv) gsc := bufio.NewScanner(advR) advanced := 0 gsc.Split(textseg.ScanGraphemeClusters) for gsc.Scan() { gr := gsc.Bytes() new.Byte += len(gr) new.Column++ // We rely here on the fact that \r\n is considered a grapheme cluster // and so we don't need to worry about miscounting additional lines // on files with Windows-style line endings. if len(gr) != 0 && (gr[0] == '\r' || gr[0] == '\n') { new.Column = 1 new.Line++ } if advanced < len(token) { // If we've not yet found the end of our token then we'll // also push our "end" marker along. // (if advance > len(token) then we'll stop moving "end" early // so that the caller only sees the range covered by token.) end = new } advanced += len(gr) } sc.cur = Range{ Filename: sc.filename, Start: start, End: end, } sc.pos = new return true } // Range returns a range that covers the latest token obtained after a call // to Scan returns true. func (sc *RangeScanner) Range() Range { return sc.cur } // Bytes returns the slice of the input buffer that is covered by the range // that would be returned by Range. func (sc *RangeScanner) Bytes() []byte { return sc.tok } // Err can be called after Scan returns false to determine if the latest read // resulted in an error, and obtain that error if so. func (sc *RangeScanner) Err() error { return sc.err } hcl-2.14.1/pos_scanner_test.go000066400000000000000000000100031431334125700162420ustar00rootroot00000000000000package hcl import ( "bufio" "reflect" "testing" "github.com/davecgh/go-spew/spew" ) func TestPosScanner(t *testing.T) { tests := map[string]struct { Input string Want []Range WantToks [][]byte }{ "empty": { "", []Range{}, [][]byte{}, }, "single line": { "hello", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, [][]byte{ []byte("hello"), }, }, "single line with trailing UNIX newline": { "hello\n", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, [][]byte{ []byte("hello"), }, }, "single line with trailing Windows newline": { "hello\r\n", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, [][]byte{ []byte("hello"), }, }, "two lines with UNIX newline": { "hello\nworld", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, { Start: Pos{Byte: 6, Line: 2, Column: 1}, End: Pos{Byte: 11, Line: 2, Column: 6}, }, }, [][]byte{ []byte("hello"), []byte("world"), }, }, "two lines with Windows newline": { "hello\r\nworld", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, { Start: Pos{Byte: 7, Line: 2, Column: 1}, End: Pos{Byte: 12, Line: 2, Column: 6}, }, }, [][]byte{ []byte("hello"), []byte("world"), }, }, "blank line with UNIX newlines": { "hello\n\nworld", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, { Start: Pos{Byte: 6, Line: 2, Column: 1}, End: Pos{Byte: 6, Line: 2, Column: 1}, }, { Start: Pos{Byte: 7, Line: 3, Column: 1}, End: Pos{Byte: 12, Line: 3, Column: 6}, }, }, [][]byte{ []byte("hello"), []byte(""), []byte("world"), }, }, "blank line with Windows newlines": { "hello\r\n\r\nworld", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, { Start: Pos{Byte: 7, Line: 2, Column: 1}, End: Pos{Byte: 7, Line: 2, Column: 1}, }, { Start: Pos{Byte: 9, Line: 3, Column: 1}, End: Pos{Byte: 14, Line: 3, Column: 6}, }, }, [][]byte{ []byte("hello"), []byte(""), []byte("world"), }, }, "two lines with combiner and UNIX newline": { "foo \U0001f469\U0001f3ff bar\nbaz", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 16, Line: 1, Column: 10}, }, { Start: Pos{Byte: 17, Line: 2, Column: 1}, End: Pos{Byte: 20, Line: 2, Column: 4}, }, }, [][]byte{ []byte("foo \U0001f469\U0001f3ff bar"), []byte("baz"), }, }, "two lines with combiner and Windows newline": { "foo \U0001f469\U0001f3ff bar\r\nbaz", []Range{ { Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 16, Line: 1, Column: 10}, }, { Start: Pos{Byte: 18, Line: 2, Column: 1}, End: Pos{Byte: 21, Line: 2, Column: 4}, }, }, [][]byte{ []byte("foo \U0001f469\U0001f3ff bar"), []byte("baz"), }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { src := []byte(test.Input) sc := NewRangeScanner(src, "", bufio.ScanLines) got := make([]Range, 0) gotToks := make([][]byte, 0) for sc.Scan() { got = append(got, sc.Range()) gotToks = append(gotToks, sc.Bytes()) } if sc.Err() != nil { t.Fatalf("unexpected error: %s", sc.Err()) } if !reflect.DeepEqual(got, test.Want) { t.Errorf("incorrect ranges\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(test.Want)) } if !reflect.DeepEqual(gotToks, test.WantToks) { t.Errorf("incorrect tokens\ngot: %swant: %s", spew.Sdump(gotToks), spew.Sdump(test.WantToks)) } }) } } hcl-2.14.1/pos_test.go000066400000000000000000000275001431334125700145430ustar00rootroot00000000000000package hcl import ( "bytes" "fmt" "reflect" "testing" ) func TestRangeOver(t *testing.T) { tests := []struct { A Range B Range Want Range }{ { Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // #### Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ##### Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ##### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // ### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // ### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // ## Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // ## Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, Range{ // ###### Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, }, { Range{ // ## Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, Range{ // ## Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // ###### Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, }, } for _, test := range tests { t.Run(fmt.Sprintf("%s<=>%s", test.A, test.B), func(t *testing.T) { got := RangeOver(test.A, test.B) if !reflect.DeepEqual(got, test.Want) { t.Errorf( "wrong result\nA : %-10s %s\nB : %-10s %s\ngot : %-10s %s\nwant: %-10s %s", visRangeOffsets(test.A), test.A, visRangeOffsets(test.B), test.B, visRangeOffsets(got), got, visRangeOffsets(test.Want), test.Want, ) } }) } } func TestPosOverlap(t *testing.T) { tests := []struct { A Range B Range Want Range }{ { Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Range{ // #### Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Range{ // ### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Range{ // ### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // ## Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // ## Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, Range{ // (no overlap) Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 0, Line: 1, Column: 1}, }, }, { Range{ // ## Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 6, Line: 1, Column: 7}, }, Range{ // ## Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // (no overlap) Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, } for _, test := range tests { t.Run(fmt.Sprintf("%s<=>%s", test.A, test.B), func(t *testing.T) { got := test.A.Overlap(test.B) if !reflect.DeepEqual(got, test.Want) { t.Errorf( "wrong result\nA : %-10s %s\nB : %-10s %s\ngot : %-10s %s\nwant: %-10s %s", visRangeOffsets(test.A), test.A, visRangeOffsets(test.B), test.B, visRangeOffsets(got), got, visRangeOffsets(test.Want), test.Want, ) } }) } } func TestRangePartitionAround(t *testing.T) { tests := []struct { Outer Range Inner Range WantBefore Range WantOverlap Range WantAfter Range }{ { Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // (empty) Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // (empty) Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Range{ // #### Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // # Start: Pos{Byte: 0, Line: 1, Column: 1}, End: Pos{Byte: 1, Line: 1, Column: 2}, }, Range{ // ### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // (empty) Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, }, { Range{ // #### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // (empty) Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // ### Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // # Start: Pos{Byte: 5, Line: 1, Column: 6}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, { Range{ // #### Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // # Start: Pos{Byte: 1, Line: 1, Column: 2}, End: Pos{Byte: 2, Line: 1, Column: 3}, }, Range{ // ## Start: Pos{Byte: 2, Line: 1, Column: 3}, End: Pos{Byte: 4, Line: 1, Column: 5}, }, Range{ // # Start: Pos{Byte: 4, Line: 1, Column: 5}, End: Pos{Byte: 5, Line: 1, Column: 6}, }, }, } for _, test := range tests { t.Run(fmt.Sprintf("%s around %s", test.Outer, test.Inner), func(t *testing.T) { gotBefore, gotOverlap, gotAfter := test.Outer.PartitionAround(test.Inner) if !reflect.DeepEqual(gotBefore, test.WantBefore) { t.Errorf( "wrong before\nA : %-10s %s\nB : %-10s %s\ngot : %-10s %s\nwant: %-10s %s", visRangeOffsets(test.Outer), test.Outer, visRangeOffsets(test.Inner), test.Inner, visRangeOffsets(gotBefore), gotBefore, visRangeOffsets(test.WantBefore), test.WantBefore, ) } if !reflect.DeepEqual(gotOverlap, test.WantOverlap) { t.Errorf( "wrong overlap\nA : %-10s %s\nB : %-10s %s\ngot : %-10s %s\nwant: %-10s %s", visRangeOffsets(test.Outer), test.Outer, visRangeOffsets(test.Inner), test.Inner, visRangeOffsets(gotOverlap), gotOverlap, visRangeOffsets(test.WantOverlap), test.WantOverlap, ) } if !reflect.DeepEqual(gotAfter, test.WantAfter) { t.Errorf( "wrong after\nA : %-10s %s\nB : %-10s %s\ngot : %-10s %s\nwant: %-10s %s", visRangeOffsets(test.Outer), test.Outer, visRangeOffsets(test.Inner), test.Inner, visRangeOffsets(gotAfter), gotAfter, visRangeOffsets(test.WantAfter), test.WantAfter, ) } }) } } // visRangeOffsets is a helper that produces a visual representation of the // start and end byte offsets of the given range, which can then be stacked // with the same for other ranges to more easily see how the ranges relate // to one another. func visRangeOffsets(rng Range) string { var buf bytes.Buffer if rng.End.Byte < rng.Start.Byte { // Should never happen, but we'll visualize it anyway so we can // more easily debug failing tests. for i := 0; i < rng.End.Byte; i++ { buf.WriteByte(' ') } for i := rng.End.Byte; i < rng.Start.Byte; i++ { buf.WriteByte('!') } return buf.String() } for i := 0; i < rng.Start.Byte; i++ { buf.WriteByte(' ') } for i := rng.Start.Byte; i < rng.End.Byte; i++ { buf.WriteByte('#') } return buf.String() } hcl-2.14.1/schema.go000066400000000000000000000010321431334125700141330ustar00rootroot00000000000000package hcl // BlockHeaderSchema represents the shape of a block header, and is // used for matching blocks within bodies. type BlockHeaderSchema struct { Type string LabelNames []string } // AttributeSchema represents the requirements for an attribute, and is used // for matching attributes within bodies. type AttributeSchema struct { Name string Required bool } // BodySchema represents the desired shallow structure of a body. type BodySchema struct { Attributes []AttributeSchema Blocks []BlockHeaderSchema } hcl-2.14.1/spec.md000066400000000000000000000775111431334125700136370ustar00rootroot00000000000000# HCL Syntax-Agnostic Information Model This is the specification for the general information model (abstract types and semantics) for hcl. HCL is a system for defining configuration languages for applications. The HCL information model is designed to support multiple concrete syntaxes for configuration, each with a mapping to the model defined in this specification. The two primary syntaxes intended for use in conjunction with this model are [the HCL native syntax](./hclsyntax/spec.md) and [the JSON syntax](./json/spec.md). In principle other syntaxes are possible as long as either their language model is sufficiently rich to express the concepts described in this specification or the language targets a well-defined subset of the specification. ## Structural Elements The primary structural element is the _body_, which is a container representing a set of zero or more _attributes_ and a set of zero or more _blocks_. A _configuration file_ is the top-level object, and will usually be produced by reading a file from disk and parsing it as a particular syntax. A configuration file has its own _body_, representing the top-level attributes and blocks. An _attribute_ is a name and value pair associated with a body. Attribute names are unique within a given body. Attribute values are provided as _expressions_, which are discussed in detail in a later section. A _block_ is a nested structure that has a _type name_, zero or more string _labels_ (e.g. identifiers), and a nested body. Together the structural elements create a hierarchical data structure, with attributes intended to represent the direct properties of a particular object in the calling application, and blocks intended to represent child objects of a particular object. ## Body Content To support the expression of the HCL concepts in languages whose information model is a subset of HCL's, such as JSON, a _body_ is an opaque container whose content can only be accessed by providing information on the expected structure of the content. The specification for each syntax must describe how its physical constructs are mapped on to body content given a schema. For syntaxes that have first-class syntax distinguishing attributes and bodies this can be relatively straightforward, while more detailed mapping rules may be required in syntaxes where the representation of attributes vs. blocks is ambiguous. ### Schema-driven Processing Schema-driven processing is the primary way to access body content. A _body schema_ is a description of what is expected within a particular body, which can then be used to extract the _body content_, which then provides access to the specific attributes and blocks requested. A _body schema_ consists of a list of _attribute schemata_ and _block header schemata_: - An _attribute schema_ provides the name of an attribute and whether its presence is required. - A _block header schema_ provides a block type name and the semantic names assigned to each of the labels of that block type, if any. Within a schema, it is an error to request the same attribute name twice or to request a block type whose name is also an attribute name. While this can in principle be supported in some syntaxes, in other syntaxes the attribute and block namespaces are combined and so an attribute cannot coexist with a block whose type name is identical to the attribute name. The result of applying a body schema to a body is _body content_, which consists of an _attribute map_ and a _block sequence_: - The _attribute map_ is a map data structure whose keys are attribute names and whose values are _expressions_ that represent the corresponding attribute values. - The _block sequence_ is an ordered sequence of blocks, with each specifying a block _type name_, the sequence of _labels_ specified for the block, and the body object (not body _content_) representing the block's own body. After obtaining _body content_, the calling application may continue processing by evaluating attribute expressions and/or recursively applying further schema-driven processing to the child block bodies. **Note:** The _body schema_ is intentionally minimal, to reduce the set of mapping rules that must be defined for each syntax. Higher-level utility libraries may be provided to assist in the construction of a schema and perform additional processing, such as automatically evaluating attribute expressions and assigning their result values into a data structure, or recursively applying a schema to child blocks. Such utilities are not part of this core specification and will vary depending on the capabilities and idiom of the implementation language. ### _Dynamic Attributes_ Processing The _schema-driven_ processing model is useful when the expected structure of a body is known a priori by the calling application. Some blocks are instead more free-form, such as a user-provided set of arbitrary key/value pairs. The alternative _dynamic attributes_ processing mode allows for this more ad-hoc approach. Processing in this mode behaves as if a schema had been constructed without any _block header schemata_ and with an attribute schema for each distinct key provided within the physical representation of the body. The means by which _distinct keys_ are identified is dependent on the physical syntax; this processing mode assumes that the syntax has a way to enumerate keys provided by the author and identify expressions that correspond with those keys, but does not define the means by which this is done. The result of _dynamic attributes_ processing is an _attribute map_ as defined in the previous section. No _block sequence_ is produced in this processing mode. ### Partial Processing of Body Content Under _schema-driven processing_, by default the given schema is assumed to be exhaustive, such that any attribute or block not matched by schema elements is considered an error. This allows feedback about unsupported attributes and blocks (such as typos) to be provided. An alternative is _partial processing_, where any additional elements within the body are not considered an error. Under partial processing, the result is both body content as described above _and_ a new body that represents any body elements that remain after the schema has been processed. Specifically: - Any attribute whose name is specified in the schema is returned in body content and elided from the new body. - Any block whose type is specified in the schema is returned in body content and elided from the new body. - Any attribute or block _not_ meeting the above conditions is placed into the new body, unmodified. The new body can then be recursively processed using any of the body processing models. This facility allows different subsets of body content to be processed by different parts of the calling application. Processing a body in two steps — first partial processing of a source body, then exhaustive processing of the returned body — is equivalent to single-step processing with a schema that is the union of the schemata used across the two steps. ## Expressions Attribute values are represented by _expressions_. Depending on the concrete syntax in use, an expression may just be a literal value or it may describe a computation in terms of literal values, variables, and functions. Each syntax defines its own representation of expressions. For syntaxes based in languages that do not have any non-literal expression syntax, it is recommended to embed the template language from [the native syntax](./hclsyntax/spec.md) e.g. as a post-processing step on string literals. ### Expression Evaluation In order to obtain a concrete value, each expression must be _evaluated_. Evaluation is performed in terms of an evaluation context, which consists of the following: - An _evaluation mode_, which is defined below. - A _variable scope_, which provides a set of named variables for use in expressions. - A _function table_, which provides a set of named functions for use in expressions. The _evaluation mode_ allows for two different interpretations of an expression: - In _literal-only mode_, variables and functions are not available and it is assumed that the calling application's intent is to treat the attribute value as a literal. - In _full expression mode_, variables and functions are defined and it is assumed that the calling application wishes to provide a full expression language for definition of the attribute value. The actual behavior of these two modes depends on the syntax in use. For languages with first-class expression syntax, these two modes may be considered equivalent, with _literal-only mode_ simply not defining any variables or functions. For languages that embed arbitrary expressions via string templates, _literal-only mode_ may disable such processing, allowing literal strings to pass through without interpretation as templates. Since literal-only mode does not support variables and functions, it is an error for the calling application to enable this mode and yet provide a variable scope and/or function table. ## Values and Value Types The result of expression evaluation is a _value_. Each value has a _type_, which is dynamically determined during evaluation. The _variable scope_ in the evaluation context is a map from variable name to value, using the same definition of value. The type system for HCL values is intended to be of a level abstraction suitable for configuration of various applications. A well-defined, implementation-language-agnostic type system is defined to allow for consistent processing of configuration across many implementation languages. Concrete implementations may provide additional functionality to lower HCL values and types to corresponding native language types, which may then impose additional constraints on the values outside of the scope of this specification. Two values are _equal_ if and only if they have identical types and their values are equal according to the rules of their shared type. ### Primitive Types The primitive types are _string_, _bool_, and _number_. A _string_ is a sequence of unicode characters. Two strings are equal if NFC normalization ([UAX#15](http://unicode.org/reports/tr15/) of each string produces two identical sequences of characters. NFC normalization ensures that, for example, a precomposed combination of a latin letter and a diacritic compares equal with the letter followed by a combining diacritic. The _bool_ type has only two non-null values: _true_ and _false_. Two bool values are equal if and only if they are either both true or both false. A _number_ is an arbitrary-precision floating point value. An implementation _must_ make the full-precision values available to the calling application for interpretation into any suitable number representation. An implementation may in practice implement numbers with limited precision so long as the following constraints are met: - Integers are represented with at least 256 bits. - Non-integer numbers are represented as floating point values with a mantissa of at least 256 bits and a signed binary exponent of at least 16 bits. - An error is produced if an integer value given in source cannot be represented precisely. - An error is produced if a non-integer value cannot be represented due to overflow. - A non-integer number is rounded to the nearest possible value when a value is of too high a precision to be represented. The _number_ type also requires representation of both positive and negative infinity. A "not a number" (NaN) value is _not_ provided nor used. Two number values are equal if they are numerically equal to the precision associated with the number. Positive infinity and negative infinity are equal to themselves but not to each other. Positive infinity is greater than any other number value, and negative infinity is less than any other number value. Some syntaxes may be unable to represent numeric literals of arbitrary precision. This must be defined in the syntax specification as part of its description of mapping numeric literals to HCL values. ### Structural Types _Structural types_ are types that are constructed by combining other types. Each distinct combination of other types is itself a distinct type. There are two structural type _kinds_: - _Object types_ are constructed of a set of named attributes, each of which has a type. Attribute names are always strings. (_Object_ attributes are a distinct idea from _body_ attributes, though calling applications may choose to blur the distinction by use of common naming schemes.) - _Tuple types_ are constructed of a sequence of elements, each of which has a type. Values of structural types are compared for equality in terms of their attributes or elements. A structural type value is equal to another if and only if all of the corresponding attributes or elements are equal. Two structural types are identical if they are of the same kind and have attributes or elements with identical types. ### Collection Types _Collection types_ are types that combine together an arbitrary number of values of some other single type. There are three collection type _kinds_: - _List types_ represent ordered sequences of values of their element type. - _Map types_ represent values of their element type accessed via string keys. - _Set types_ represent unordered sets of distinct values of their element type. For each of these kinds and each distinct element type there is a distinct collection type. For example, "list of string" is a distinct type from "set of string", and "list of number" is a distinct type from "list of string". Values of collection types are compared for equality in terms of their elements. A collection type value is equal to another if and only if both have the same number of elements and their corresponding elements are equal. Two collection types are identical if they are of the same kind and have the same element type. ### Null values Each type has a null value. The null value of a type represents the absence of a value, but with type information retained to allow for type checking. Null values are used primarily to represent the conditional absence of a body attribute. In a syntax with a conditional operator, one of the result values of that conditional may be null to indicate that the attribute should be considered not present in that case. Calling applications _should_ consider an attribute with a null value as equivalent to the value not being present at all. A null value of a particular type is equal to itself. ### Unknown Values and the Dynamic Pseudo-type An _unknown value_ is a placeholder for a value that is not yet known. Operations on unknown values themselves return unknown values that have a type appropriate to the operation. For example, adding together two unknown numbers yields an unknown number, while comparing two unknown values of any type for equality yields an unknown bool. Each type has a distinct unknown value. For example, an unknown _number_ is a distinct value from an unknown _string_. _The dynamic pseudo-type_ is a placeholder for a type that is not yet known. The only values of this type are its null value and its unknown value. It is referred to as a _pseudo-type_ because it should not be considered a type in its own right, but rather as a placeholder for a type yet to be established. The unknown value of the dynamic pseudo-type is referred to as _the dynamic value_. Operations on values of the dynamic pseudo-type behave as if it is a value of the expected type, optimistically assuming that once the value and type are known they will be valid for the operation. For example, adding together a number and the dynamic value produces an unknown number. Unknown values and the dynamic pseudo-type can be used as a mechanism for partial type checking and semantic checking: by evaluating an expression with all variables set to an unknown value, the expression can be evaluated to produce an unknown value of a given type, or produce an error if any operation is provably invalid with only type information. Unknown values and the dynamic pseudo-type must never be returned from operations unless at least one operand is unknown or dynamic. Calling applications are guaranteed that unless the global scope includes unknown values, or the function table includes functions that return unknown values, no expression will evaluate to an unknown value. The calling application is thus in total control over the use and meaning of unknown values. The dynamic pseudo-type is identical only to itself. ### Capsule Types A _capsule type_ is a custom type defined by the calling application. A value of a capsule type is considered opaque to HCL, but may be accepted by functions provided by the calling application. A particular capsule type is identical only to itself. The equality of two values of the same capsule type is defined by the calling application. No other operations are supported for values of capsule types. Support for capsule types in a HCL implementation is optional. Capsule types are intended to allow calling applications to pass through values that are not part of the standard type system. For example, an application that deals with raw binary data may define a capsule type representing a byte array, and provide functions that produce or operate on byte arrays. ### Type Specifications In certain situations it is necessary to define expectations about the expected type of a value. Whereas two _types_ have a commutative _identity_ relationship, a type has a non-commutative _matches_ relationship with a _type specification_. A type specification is, in practice, just a different interpretation of a type such that: - Any type _matches_ any type that it is identical to. - Any type _matches_ the dynamic pseudo-type. For example, given a type specification "list of dynamic pseudo-type", the concrete types "list of string" and "list of map" match, but the type "set of string" does not. ## Functions and Function Calls The evaluation context used to evaluate an expression includes a function table, which represents an application-defined set of named functions available for use in expressions. Each syntax defines whether function calls are supported and how they are physically represented in source code, but the semantics of function calls are defined here to ensure consistent results across syntaxes and to allow applications to provide functions that are interoperable with all syntaxes. A _function_ is defined from the following elements: - Zero or more _positional parameters_, each with a name used for documentation, a type specification for expected argument values, and a flag for whether each of null values, unknown values, and values of the dynamic pseudo-type are accepted. - Zero or one _variadic parameters_, with the same structure as the _positional_ parameters, which if present collects any additional arguments provided at the function call site. - A _result type definition_, which specifies the value type returned for each valid sequence of argument values. - A _result value definition_, which specifies the value returned for each valid sequence of argument values. A _function call_, regardless of source syntax, consists of a sequence of argument values. The argument values are each mapped to a corresponding parameter as follows: - For each of the function's positional parameters in sequence, take the next argument. If there are no more arguments, the call is erroneous. - If the function has a variadic parameter, take all remaining arguments that where not yet assigned to a positional parameter and collect them into a sequence of variadic arguments that each correspond to the variadic parameter. - If the function has _no_ variadic parameter, it is an error if any arguments remain after taking one argument for each positional parameter. After mapping each argument to a parameter, semantic checking proceeds for each argument: - If the argument value corresponding to a parameter does not match the parameter's type specification, the call is erroneous. - If the argument value corresponding to a parameter is null and the parameter is not specified as accepting nulls, the call is erroneous. - If the argument value corresponding to a parameter is the dynamic value and the parameter is not specified as accepting values of the dynamic pseudo-type, the call is valid but its _result type_ is forced to be the dynamic pseudo type. - If neither of the above conditions holds for any argument, the call is valid and the function's value type definition is used to determine the call's _result type_. A function _may_ vary its result type depending on the argument _values_ as well as the argument _types_; for example, a function that decodes a JSON value will return a different result type depending on the data structure described by the given JSON source code. If semantic checking succeeds without error, the call is _executed_: - For each argument, if its value is unknown and its corresponding parameter is not specified as accepting unknowns, the _result value_ is forced to be an unknown value of the result type. - If the previous condition does not apply, the function's result value definition is used to determine the call's _result value_. The result of a function call expression is either an error, if one of the erroneous conditions above applies, or the _result value_. ## Type Conversions and Unification Values given in configuration may not always match the expectations of the operations applied to them or to the calling application. In such situations, automatic type conversion is attempted as a convenience to the user. Along with conversions to a _specified_ type, it is sometimes necessary to ensure that a selection of values are all of the _same_ type, without any constraint on which type that is. This is the process of _type unification_, which attempts to find the most general type that all of the given types can be converted to. Both type conversions and unification are defined in the syntax-agnostic model to ensure consistency of behavior between syntaxes. Type conversions are broadly characterized into two categories: _safe_ and _unsafe_. A conversion is "safe" if any distinct value of the source type has a corresponding distinct value in the target type. A conversion is "unsafe" if either the target type values are _not_ distinct (information may be lost in conversion) or if some values of the source type do not have any corresponding value in the target type. An unsafe conversion may result in an error. A given type can always be converted to itself, which is a no-op. ### Conversion of Null Values All null values are safely convertable to a null value of any other type, regardless of other type-specific rules specified in the sections below. ### Conversion to and from the Dynamic Pseudo-type Conversion _from_ the dynamic pseudo-type _to_ any other type always succeeds, producing an unknown value of the target type. Conversion of any value _to_ the dynamic pseudo-type is a no-op. The result is the input value, verbatim. This is the only situation where the conversion result value is not of the given target type. ### Primitive Type Conversions Bidirectional conversions are available between the string and number types, and between the string and boolean types. The bool value true corresponds to the string containing the characters "true", while the bool value false corresponds to the string containing the characters "false". Conversion from bool to string is safe, while the converse is unsafe. The strings "1" and "0" are alternative string representations of true and false respectively. It is an error to convert a string other than the four in this paragraph to type bool. A number value is converted to string by translating its integer portion into a sequence of decimal digits (`0` through `9`), and then if it has a non-zero fractional part, a period `.` followed by a sequence of decimal digits representing its fractional part. No exponent portion is included. The number is converted at its full precision. Conversion from number to string is safe. A string is converted to a number value by reversing the above mapping. No exponent portion is allowed. Conversion from string to number is unsafe. It is an error to convert a string that does not comply with the expected syntax to type number. No direct conversion is available between the bool and number types. ### Collection and Structural Type Conversions Conversion from set types to list types is _safe_, as long as their element types are safely convertable. If the element types are _unsafely_ convertable, then the collection conversion is also unsafe. Each set element becomes a corresponding list element, in an undefined order. Although no particular ordering is required, implementations _should_ produce list elements in a consistent order for a given input set, as a convenience to calling applications. Conversion from list types to set types is _unsafe_, as long as their element types are convertable. Each distinct list item becomes a distinct set item. If two list items are equal, one of the two is lost in the conversion. Conversion from tuple types to list types permitted if all of the tuple element types are convertable to the target list element type. The safety of the conversion depends on the safety of each of the element conversions. Each element in turn is converted to the list element type, producing a list of identical length. Conversion from tuple types to set types is permitted, behaving as if the tuple type was first converted to a list of the same element type and then that list converted to the target set type. Conversion from object types to map types is permitted if all of the object attribute types are convertable to the target map element type. The safety of the conversion depends on the safety of each of the attribute conversions. Each attribute in turn is converted to the map element type, and map element keys are set to the name of each corresponding object attribute. Conversion from list and set types to tuple types is permitted, following the opposite steps as the converse conversions. Such conversions are _unsafe_. It is an error to convert a list or set to a tuple type whose number of elements does not match the list or set length. Conversion from map types to object types is permitted if each map key corresponds to an attribute in the target object type. It is an error to convert from a map value whose set of keys does not exactly match the target type's attributes. The conversion takes the opposite steps of the converse conversion. Conversion from one object type to another is permitted as long as the common attribute names have convertable types. Any attribute present in the target type but not in the source type is populated with a null value of the appropriate type. Conversion from one tuple type to another is permitted as long as the tuples have the same length and the elements have convertable types. ### Type Unification Type unification is an operation that takes a list of types and attempts to find a single type to which they can all be converted. Since some type pairs have bidirectional conversions, preference is given to _safe_ conversions. In technical terms, all possible types are arranged into a lattice, from which a most general supertype is selected where possible. The type resulting from type unification may be one of the input types, or it may be an entirely new type produced by combination of two or more input types. The following rules do not guarantee a valid result. In addition to these rules, unification fails if any of the given types are not convertable (per the above rules) to the selected result type. The following unification rules apply transitively. That is, if a rule is defined from A to B, and one from B to C, then A can unify to C. Number and bool types both unify with string by preferring string. Two collection types of the same kind unify according to the unification of their element types. List and set types unify by preferring the list type. Map and object types unify by preferring the object type. List, set and tuple types unify by preferring the tuple type. The dynamic pseudo-type unifies with any other type by selecting that other type. The dynamic pseudo-type is the result type only if _all_ input types are the dynamic pseudo-type. Two object types unify by constructing a new type whose attributes are the union of those of the two input types. Any common attributes themselves have their types unified. Two tuple types of the same length unify constructing a new type of the same length whose elements are the unification of the corresponding elements in the two input types. ## Static Analysis In most applications, full expression evaluation is sufficient for understanding the provided configuration. However, some specialized applications require more direct access to the physical structures in the expressions, which can for example allow the construction of new language constructs in terms of the existing syntax elements. Since static analysis analyses the physical structure of configuration, the details will vary depending on syntax. Each syntax must decide which of its physical structures corresponds to the following analyses, producing error diagnostics if they are applied to inappropriate expressions. The following are the required static analysis functions: - **Static List**: Require list/tuple construction syntax to be used and return a list of expressions for each of the elements given. - **Static Map**: Require map/object construction syntax to be used and return a list of key/value pairs -- both expressions -- for each of the elements given. The usual constraint that a map key must be a string must not apply to this analysis, thus allowing applications to interpret arbitrary keys as they see fit. - **Static Call**: Require function call syntax to be used and return an object describing the called function name and a list of expressions representing each of the call arguments. - **Static Traversal**: Require a reference to a symbol in the variable scope and return a description of the path from the root scope to the accessed attribute or index. The intent of a calling application using these features is to require a more rigid interpretation of the configuration than in expression evaluation. Syntax implementations should make use of the extra contextual information provided in order to make an intuitive mapping onto the constructs of the underlying syntax, possibly interpreting the expression slightly differently than it would be interpreted in normal evaluation. Each syntax must define which of its expression elements each of the analyses above applies to, and how those analyses behave given those expression elements. ## Implementation Considerations Implementations of this specification are free to adopt any strategy that produces behavior consistent with the specification. This non-normative section describes some possible implementation strategies that are consistent with the goals of this specification. ### Language-agnosticism The language-agnosticism of this specification assumes that certain behaviors are implemented separately for each syntax: - Matching of a body schema with the physical elements of a body in the source language, to determine correspondence between physical constructs and schema elements. - Implementing the _dynamic attributes_ body processing mode by either interpreting all physical constructs as attributes or producing an error if non-attribute constructs are present. - Providing an evaluation function for all possible expressions that produces a value given an evaluation context. - Providing the static analysis functionality described above in a manner that makes sense within the convention of the syntax. The suggested implementation strategy is to use an implementation language's closest concept to an _abstract type_, _virtual type_ or _interface type_ to represent both Body and Expression. Each language-specific implementation can then provide an implementation of each of these types wrapping AST nodes or other physical constructs from the language parser. hcl-2.14.1/specsuite/000077500000000000000000000000001431334125700143545ustar00rootroot00000000000000hcl-2.14.1/specsuite/.gitignore000066400000000000000000000000061431334125700163400ustar00rootroot00000000000000tmp_* hcl-2.14.1/specsuite/README.md000066400000000000000000000027261431334125700156420ustar00rootroot00000000000000# HCL Language Test Suite This directory contains an implementation-agnostic test suite that can be used to verify the correct behavior not only of the HCL implementation in _this_ repository but also of possible other implementations. The harness for running this suite -- a Go program in this directory -- uses the `hcldec` program as a level of abstraction to avoid depending directly on the Go implementation. As a result, other HCL implementations must also include a version of `hcldec` in order to run this spec. The tests defined in this suite each correspond to a detail of [the HCL spec](../spec.md). This suite is separate from and not a substitute for direct unit tests of a given implementation that would presumably also exercise that implementation's own programmatic API. To run the suite, first build the harness using Go: ``` go install github.com/hashicorp/hcl/v2/cmd/hclspecsuite ``` Then run it, passing it the directory containing the test definitions (the "tests" subdirectory of this directory) and the path to the `hcldec` executable to use. For example, if working in the root of this repository and using the `hcldec` implementation from here: ``` go install ./cmd/hcldec hclspecsuite ./specsuite/tests $GOPATH/bin/hcldec ``` For developers working on the Go implementation of HCL from this repository, please note that this spec suite is run as part of a normal `go test ./...` execution for this whole repository and so does not need to be run separately. hcl-2.14.1/specsuite/spec_test.go000066400000000000000000000041251431334125700166760ustar00rootroot00000000000000package spectests import ( "bufio" "bytes" "fmt" "os" "os/exec" "path/filepath" "runtime" "strings" "testing" ) func TestMain(m *testing.M) { // The test harness is an external program that also expects to have // hcldec built as an external program, so we'll build both into // temporary files in our working directory before running our tests // here, to ensure that we're always running a build of the latest code. err := build() if err != nil { fmt.Fprintf(os.Stderr, "%s\n", err.Error()) os.Exit(1) } // Now we can run the tests os.Exit(m.Run()) } func build() error { err := goBuild("github.com/hashicorp/hcl/v2/cmd/hcldec", "tmp_hcldec") if err != nil { return fmt.Errorf("error building hcldec: %s", err) } err = goBuild("github.com/hashicorp/hcl/v2/cmd/hclspecsuite", "tmp_hclspecsuite") if err != nil { return fmt.Errorf("error building hcldec: %s", err) } return nil } func TestSpec(t *testing.T) { suiteDir := filepath.Clean("../specsuite/tests") harness := "./tmp_hclspecsuite" hcldec := "./tmp_hcldec" cmd := exec.Command(harness, suiteDir, hcldec) out, err := cmd.CombinedOutput() if _, isExit := err.(*exec.ExitError); err != nil && !isExit { t.Errorf("failed to run harness: %s", err) } failed := err != nil sc := bufio.NewScanner(bytes.NewReader(out)) var lines []string for sc.Scan() { lines = append(lines, sc.Text()) } i := 0 for i < len(lines) { cur := lines[i] if strings.HasPrefix(cur, "- ") { testName := cur[2:] t.Run(testName, func(t *testing.T) { i++ for i < len(lines) { cur := lines[i] if strings.HasPrefix(cur, "- ") || strings.HasPrefix(cur, "==") { return } t.Error(cur) i++ } }) } else { if !strings.HasPrefix(cur, "==") { // not the "test harness problems" report, then t.Log(cur) } i++ } } if failed { t.Error("specsuite failed") } } func goBuild(pkg, outFile string) error { if runtime.GOOS == "windows" { outFile += ".exe" } cmd := exec.Command("go", "build", "-o", outFile, pkg) cmd.Stderr = os.Stderr cmd.Stdout = os.Stdout return cmd.Run() } hcl-2.14.1/specsuite/tests/000077500000000000000000000000001431334125700155165ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/comments/000077500000000000000000000000001431334125700173435ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/comments/hash_comment.hcl000066400000000000000000000000171431334125700224760ustar00rootroot00000000000000# Hash comment hcl-2.14.1/specsuite/tests/comments/hash_comment.hcldec000066400000000000000000000000331431334125700231500ustar00rootroot00000000000000literal { value = "ok" } hcl-2.14.1/specsuite/tests/comments/hash_comment.t000066400000000000000000000002471431334125700222000ustar00rootroot00000000000000# This test parses a file containing only a comment. It is a parsing-only test, # so the hcldec spec for this test is just a literal value given below. result = "ok" hcl-2.14.1/specsuite/tests/comments/multiline_comment.hcl000066400000000000000000000000331431334125700235530ustar00rootroot00000000000000/* Multi-line comment */ hcl-2.14.1/specsuite/tests/comments/multiline_comment.hcldec000066400000000000000000000000331431334125700242270ustar00rootroot00000000000000literal { value = "ok" } hcl-2.14.1/specsuite/tests/comments/multiline_comment.t000066400000000000000000000002471431334125700232570ustar00rootroot00000000000000# This test parses a file containing only a comment. It is a parsing-only test, # so the hcldec spec for this test is just a literal value given below. result = "ok" hcl-2.14.1/specsuite/tests/comments/slash_comment.hcl000066400000000000000000000000211431334125700226600ustar00rootroot00000000000000// Slash comment hcl-2.14.1/specsuite/tests/comments/slash_comment.hcldec000066400000000000000000000000331431334125700233370ustar00rootroot00000000000000literal { value = "ok" } hcl-2.14.1/specsuite/tests/comments/slash_comment.t000066400000000000000000000002471431334125700223670ustar00rootroot00000000000000# This test parses a file containing only a comment. It is a parsing-only test, # so the hcldec spec for this test is just a literal value given below. result = "ok" hcl-2.14.1/specsuite/tests/empty.hcl000066400000000000000000000000001431334125700173320ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/empty.hcl.json000066400000000000000000000000031431334125700203050ustar00rootroot00000000000000{} hcl-2.14.1/specsuite/tests/empty.hcldec000066400000000000000000000000331431334125700200140ustar00rootroot00000000000000literal { value = "ok" } hcl-2.14.1/specsuite/tests/empty.t000066400000000000000000000003621431334125700170420ustar00rootroot00000000000000# This test ensures that we can successfully parse an empty file. # Since an empty file has no content, the hcldec spec for this test is # just a literal value, which we test below. result = "ok" traversals { # Explicitly no traversals } hcl-2.14.1/specsuite/tests/expressions/000077500000000000000000000000001431334125700201005ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/expressions/heredoc.hcl000066400000000000000000000017611431334125700222060ustar00rootroot00000000000000normal = { basic = <" { lt = 1 > 2 gt = 2 > 1 eq = 1 > 1 } inequality ">=" { lt = 1 >= 2 gt = 2 >= 1 eq = 1 >= 1 } arithmetic { add = 2 + 3.5 add_big = 3.14159265358979323846264338327950288419716939937510582097494459 + 1 sub = 3.5 - 2 sub_neg = 2 - 3.5 mul = 2 * 4.5 div = 1 / 10 mod = 11 % 5 mod_frac = 11 % 5.1 } logical_binary "&&" { tt = true && true ft = false && true tf = true && false ff = false && false } logical_binary "||" { tt = true || true ft = false || true tf = true || false ff = false || false } logical_unary "!" { t = !true f = !false } conditional { t = true ? "a" : "b" f = false ? "a" : "b" } hcl-2.14.1/specsuite/tests/expressions/operators.hcldec000066400000000000000000000022701431334125700232630ustar00rootroot00000000000000object { block_map "equality" { labels = ["operator"] object { attr "exactly" { type = bool } attr "not" { type = bool } attr "type_mismatch_number" { type = bool } attr "type_mismatch_bool" { type = bool } } } block_map "inequality" { labels = ["operator"] object { attr "lt" { type = bool } attr "gt" { type = bool } attr "eq" { type = bool } } } block "arithmetic" { object { attr "add" { type = any } attr "add_big" { type = any } attr "sub" { type = any } attr "sub_neg" { type = any } attr "mul" { type = any } attr "div" { type = any } attr "mod" { type = any } attr "mod_frac" { type = any } } } block_map "logical_binary" { labels = ["operator"] object { attr "tt" { type = bool } attr "ft" { type = bool } attr "tf" { type = bool } attr "ff" { type = bool } } } block_map "logical_unary" { labels = ["operator"] object { attr "t" { type = bool } attr "f" { type = bool } } } block "conditional" { object { attr "t" { type = any } attr "f" { type = any } } } } hcl-2.14.1/specsuite/tests/expressions/operators.t000066400000000000000000000037651431334125700223160ustar00rootroot00000000000000result = { equality = { "==" = { exactly = true not = false type_mismatch_number = false type_mismatch_bool = false } "!=" = { exactly = false not = true type_mismatch_number = true type_mismatch_bool = true } } inequality = { "<" = { lt = true gt = false eq = false } "<=" = { lt = true gt = false eq = true } ">" = { lt = false gt = true eq = false } ">=" = { lt = false gt = true eq = true } } arithmetic = { add = 5.5 add_big = 4.14159265358979323846264338327950288419716939937510582097494459 sub = 1.5 sub_neg = -1.5 mul = 9 div = 0.1 mod = 1 mod_frac = 0.80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024 } logical_binary = { "&&" = { tt = true tf = false ft = false ff = false } "||" = { tt = true tf = true ft = true ff = false } } logical_unary = { "!" = { t = false f = true } } conditional = { t = "a" f = "b" } } result_type = object({ equality = map(object({ exactly = bool not = bool type_mismatch_number = bool type_mismatch_bool = bool })) inequality = map(object({ lt = bool gt = bool eq = bool })) arithmetic = object({ add = number add_big = number sub = number sub_neg = number mul = number div = number mod = number mod_frac = number }) logical_binary = map(object({ tt = bool tf = bool ft = bool ff = bool })) logical_unary = map(object({ t = bool f = bool })) conditional = object({ t = string f = string }) }) hcl-2.14.1/specsuite/tests/expressions/primitive_literals.hcl000066400000000000000000000007011431334125700244750ustar00rootroot00000000000000# Numbers whole_number = 5 fractional_number = 3.2 fractional_number_precision = 3.14159265358979323846264338327950288419716939937510582097494459 # Strings string_ascii = "hello" string_unicode_bmp = "ЖЖ" string_unicode_astral = "👩‍👩‍👧‍👦" string_unicode_nonnorm = "años" # This is intentionally a combining tilde followed by n # Booleans true = true false = false # Null null = null hcl-2.14.1/specsuite/tests/expressions/primitive_literals.hcldec000066400000000000000000000006231431334125700251540ustar00rootroot00000000000000object { attr "whole_number" { type = any } attr "fractional_number" { type = any } attr "fractional_number_precision" { type = any } attr "string_ascii" { type = any } attr "string_unicode_bmp" { type = any } attr "string_unicode_astral" { type = any } attr "string_unicode_nonnorm" { type = any } attr "true" { type = any } attr "false" { type = any } attr "null" { type = any } } hcl-2.14.1/specsuite/tests/expressions/primitive_literals.t000066400000000000000000000024141431334125700241750ustar00rootroot00000000000000result_type = object({ whole_number = number fractional_number = number fractional_number_precision = number string_ascii = string string_unicode_bmp = string string_unicode_astral = string string_unicode_nonnorm = string true = bool false = bool null = any }) result = { # Numbers whole_number = 5 fractional_number = 3.2 fractional_number_precision = 3.14159265358979323846264338327950288419716939937510582097494459 # Strings string_ascii = "hello" string_unicode_bmp = "ЖЖ" string_unicode_astral = "👩‍👩‍👧‍👦" string_unicode_nonnorm = "años" # now a precomposed ñ, because HCL imposes NFC normalization # FIXME: The above normalization test doesn't necessarily test what it thinks # it is testing, because this file is also HCL and thus subject to # normalization; as long as the parser normalizes consistently this could # pass even if it's using a different normalization form. # The left hand side of these are quoted to make it clear that we're expecting # to get strings here, not really true/false/null. "true" = true "false" = false "null" = null } hcl-2.14.1/specsuite/tests/structure/000077500000000000000000000000001431334125700175565ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/structure/attributes/000077500000000000000000000000001431334125700217445ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/structure/attributes/expected.hcl000066400000000000000000000000521431334125700242320ustar00rootroot00000000000000a = "a value" b = "b value" c = "c value" hcl-2.14.1/specsuite/tests/structure/attributes/expected.hcldec000066400000000000000000000001641431334125700247120ustar00rootroot00000000000000object { attr "a" { type = string } attr "b" { type = string } attr "c" { type = string } } hcl-2.14.1/specsuite/tests/structure/attributes/expected.t000066400000000000000000000001761431334125700237360ustar00rootroot00000000000000result_type = object({ a = string b = string c = string }) result = { a = "a value" b = "b value" c = "c value" } hcl-2.14.1/specsuite/tests/structure/attributes/singleline_bad.hcl000066400000000000000000000000351431334125700253710ustar00rootroot00000000000000a = "a value", b = "b value" hcl-2.14.1/specsuite/tests/structure/attributes/singleline_bad.hcldec000066400000000000000000000000331431334125700260430ustar00rootroot00000000000000literal { value = null } hcl-2.14.1/specsuite/tests/structure/attributes/singleline_bad.t000066400000000000000000000006541431334125700250750ustar00rootroot00000000000000# This test verifies that comma-separated attributes on the same line are # reported as an error, rather than being parsed like an object constructor # expression. diagnostics { error { # Message like "missing newline after argument" or "each argument must be on its own line" from { line = 1 column = 14 byte = 13 } to { line = 1 column = 15 byte = 14 } } } hcl-2.14.1/specsuite/tests/structure/attributes/unexpected.hcl000066400000000000000000000000701431334125700245750ustar00rootroot00000000000000a = "a value" b = "b value" c = "c value" d = "d value" hcl-2.14.1/specsuite/tests/structure/attributes/unexpected.hcldec000066400000000000000000000001641431334125700252550ustar00rootroot00000000000000object { attr "a" { type = string } attr "b" { type = string } attr "d" { type = string } } hcl-2.14.1/specsuite/tests/structure/attributes/unexpected.t000066400000000000000000000003301431334125700242710ustar00rootroot00000000000000diagnostics { error { # An argument named "c" is not expected here. from { line = 3 column = 1 byte = 28 } to { line = 3 column = 2 byte = 29 } } } hcl-2.14.1/specsuite/tests/structure/blocks/000077500000000000000000000000001431334125700210335ustar00rootroot00000000000000hcl-2.14.1/specsuite/tests/structure/blocks/single_empty_oneline.hcl000066400000000000000000000000051431334125700257260ustar00rootroot00000000000000a {} hcl-2.14.1/specsuite/tests/structure/blocks/single_empty_oneline.hcldec000066400000000000000000000000511431334125700264030ustar00rootroot00000000000000block { block_type = "a" object {} } hcl-2.14.1/specsuite/tests/structure/blocks/single_empty_oneline.t000066400000000000000000000000311431334125700254220ustar00rootroot00000000000000result_type = object({}) hcl-2.14.1/specsuite/tests/structure/blocks/single_expected.hcl000066400000000000000000000000071431334125700246620ustar00rootroot00000000000000a { } hcl-2.14.1/specsuite/tests/structure/blocks/single_expected.hcldec000066400000000000000000000000511431334125700253350ustar00rootroot00000000000000block { block_type = "a" object {} } hcl-2.14.1/specsuite/tests/structure/blocks/single_expected.t000066400000000000000000000000311431334125700243540ustar00rootroot00000000000000result_type = object({}) hcl-2.14.1/specsuite/tests/structure/blocks/single_oneline.hcl000066400000000000000000000000201431334125700245050ustar00rootroot00000000000000a { b = "foo" } hcl-2.14.1/specsuite/tests/structure/blocks/single_oneline.hcldec000066400000000000000000000001251431334125700251670ustar00rootroot00000000000000block { block_type = "a" object { attr "b" { type = string } } } hcl-2.14.1/specsuite/tests/structure/blocks/single_oneline.t000066400000000000000000000001001431334125700242010ustar00rootroot00000000000000result_type = object({ b = string }) result = { b = "foo" } hcl-2.14.1/specsuite/tests/structure/blocks/single_oneline_invalid.hcl000066400000000000000000000001561431334125700262250ustar00rootroot00000000000000a { b = "foo", c = "bar" } a { b = "foo" } a { b = "foo" c = "bar" } a { b = "foo" c = "bar" } a { d {} } hcl-2.14.1/specsuite/tests/structure/blocks/single_oneline_invalid.hcldec000066400000000000000000000002561431334125700267020ustar00rootroot00000000000000block_list { block_type = "a" object { attr "b" { type = string } attr "c" { type = string } block_list "d" { object {} } } } hcl-2.14.1/specsuite/tests/structure/blocks/single_oneline_invalid.t000066400000000000000000000023361431334125700257240ustar00rootroot00000000000000diagnostics { error { # Message like "Only one argument is allowed in a single-line block definition" from { line = 1 column = 14 byte = 13 } to { line = 1 column = 15 byte = 14 } } error { # Message like "The closing brace for a single-line block definition must be on the same line" from { line = 2 column = 14 byte = 40 } to { line = 3 column = 1 byte = 41 } } error { # Message like "The closing brace for a single-line block definition must be on the same line" from { line = 4 column = 14 byte = 56 } to { line = 5 column = 1 byte = 57 } } error { # Message like "The closing brace for a single-line block definition must be on the same line" from { line = 6 column = 14 byte = 84 } to { line = 7 column = 1 byte = 85 } } error { # Message like "A single-line block definition cannot contain another block definition" from { line = 9 column = 5 byte = 103 } to { line = 9 column = 8 byte = 106 } } } hcl-2.14.1/specsuite/tests/structure/blocks/single_unclosed.hcl000066400000000000000000000000041431334125700246720ustar00rootroot00000000000000a { hcl-2.14.1/specsuite/tests/structure/blocks/single_unclosed.hcldec000066400000000000000000000000511431334125700253500ustar00rootroot00000000000000block { block_type = "a" object {} } hcl-2.14.1/specsuite/tests/structure/blocks/single_unclosed.t000066400000000000000000000002441431334125700243750ustar00rootroot00000000000000diagnostics { error { from { line = 1 column = 3 byte = 2 } to { line = 1 column = 4 byte = 3 } } } hcl-2.14.1/static_expr.go000066400000000000000000000016731431334125700152330ustar00rootroot00000000000000package hcl import ( "github.com/zclconf/go-cty/cty" ) type staticExpr struct { val cty.Value rng Range } // StaticExpr returns an Expression that always evaluates to the given value. // // This is useful to substitute default values for expressions that are // not explicitly given in configuration and thus would otherwise have no // Expression to return. // // Since expressions are expected to have a source range, the caller must // provide one. Ideally this should be a real source range, but it can // be a synthetic one (with an empty-string filename) if no suitable range // is available. func StaticExpr(val cty.Value, rng Range) Expression { return staticExpr{val, rng} } func (e staticExpr) Value(ctx *EvalContext) (cty.Value, Diagnostics) { return e.val, nil } func (e staticExpr) Variables() []Traversal { return nil } func (e staticExpr) Range() Range { return e.rng } func (e staticExpr) StartRange() Range { return e.rng } hcl-2.14.1/structure.go000066400000000000000000000123711431334125700147430ustar00rootroot00000000000000package hcl import ( "github.com/zclconf/go-cty/cty" ) // File is the top-level node that results from parsing a HCL file. type File struct { Body Body Bytes []byte // Nav is used to integrate with the "hcled" editor integration package, // and with diagnostic information formatters. It is not for direct use // by a calling application. Nav interface{} } // Block represents a nested block within a Body. type Block struct { Type string Labels []string Body Body DefRange Range // Range that can be considered the "definition" for seeking in an editor TypeRange Range // Range for the block type declaration specifically. LabelRanges []Range // Ranges for the label values specifically. } // Blocks is a sequence of Block. type Blocks []*Block // Attributes is a set of attributes keyed by their names. type Attributes map[string]*Attribute // Body is a container for attributes and blocks. It serves as the primary // unit of hierarchical structure within configuration. // // The content of a body cannot be meaningfully interpreted without a schema, // so Body represents the raw body content and has methods that allow the // content to be extracted in terms of a given schema. type Body interface { // Content verifies that the entire body content conforms to the given // schema and then returns it, and/or returns diagnostics. The returned // body content is valid if non-nil, regardless of whether Diagnostics // are provided, but diagnostics should still be eventually shown to // the user. Content(schema *BodySchema) (*BodyContent, Diagnostics) // PartialContent is like Content except that it permits the configuration // to contain additional blocks or attributes not specified in the // schema. If any are present, the returned Body is non-nil and contains // the remaining items from the body that were not selected by the schema. PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) // JustAttributes attempts to interpret all of the contents of the body // as attributes, allowing for the contents to be accessed without a priori // knowledge of the structure. // // The behavior of this method depends on the body's source language. // Some languages, like JSON, can't distinguish between attributes and // blocks without schema hints, but for languages that _can_ error // diagnostics will be generated if any blocks are present in the body. // // Diagnostics may be produced for other reasons too, such as duplicate // declarations of the same attribute. JustAttributes() (Attributes, Diagnostics) // MissingItemRange returns a range that represents where a missing item // might hypothetically be inserted. This is used when producing // diagnostics about missing required attributes or blocks. Not all bodies // will have an obvious single insertion point, so the result here may // be rather arbitrary. MissingItemRange() Range } // BodyContent is the result of applying a BodySchema to a Body. type BodyContent struct { Attributes Attributes Blocks Blocks MissingItemRange Range } // Attribute represents an attribute from within a body. type Attribute struct { Name string Expr Expression Range Range NameRange Range } // Expression is a literal value or an expression provided in the // configuration, which can be evaluated within a scope to produce a value. type Expression interface { // Value returns the value resulting from evaluating the expression // in the given evaluation context. // // The context may be nil, in which case the expression may contain // only constants and diagnostics will be produced for any non-constant // sub-expressions. (The exact definition of this depends on the source // language.) // // The context may instead be set but have either its Variables or // Functions maps set to nil, in which case only use of these features // will return diagnostics. // // Different diagnostics are provided depending on whether the given // context maps are nil or empty. In the former case, the message // tells the user that variables/functions are not permitted at all, // while in the latter case usage will produce a "not found" error for // the specific symbol in question. Value(ctx *EvalContext) (cty.Value, Diagnostics) // Variables returns a list of variables referenced in the receiving // expression. These are expressed as absolute Traversals, so may include // additional information about how the variable is used, such as // attribute lookups, which the calling application can potentially use // to only selectively populate the scope. Variables() []Traversal Range() Range StartRange() Range } // OfType filters the receiving block sequence by block type name, // returning a new block sequence including only the blocks of the // requested type. func (els Blocks) OfType(typeName string) Blocks { ret := make(Blocks, 0) for _, el := range els { if el.Type == typeName { ret = append(ret, el) } } return ret } // ByType transforms the receiving block sequence into a map from type // name to block sequences of only that type. func (els Blocks) ByType() map[string]Blocks { ret := make(map[string]Blocks) for _, el := range els { ty := el.Type if ret[ty] == nil { ret[ty] = make(Blocks, 0, 1) } ret[ty] = append(ret[ty], el) } return ret } hcl-2.14.1/structure_at_pos.go000066400000000000000000000103521431334125700163050ustar00rootroot00000000000000package hcl // ----------------------------------------------------------------------------- // The methods in this file all have the general pattern of making a best-effort // to find one or more constructs that contain a given source position. // // These all operate by delegating to an optional method of the same name and // signature on the file's root body, allowing each syntax to potentially // provide its own implementations of these. For syntaxes that don't implement // them, the result is always nil. // ----------------------------------------------------------------------------- // BlocksAtPos attempts to find all of the blocks that contain the given // position, ordered so that the outermost block is first and the innermost // block is last. This is a best-effort method that may not be able to produce // a complete result for all positions or for all HCL syntaxes. // // If the returned slice is non-empty, the first element is guaranteed to // represent the same block as would be the result of OutermostBlockAtPos and // the last element the result of InnermostBlockAtPos. However, the // implementation may return two different objects describing the same block, // so comparison by pointer identity is not possible. // // The result is nil if no blocks at all contain the given position. func (f *File) BlocksAtPos(pos Pos) []*Block { // The root body of the file must implement this interface in order // to support BlocksAtPos. type Interface interface { BlocksAtPos(pos Pos) []*Block } impl, ok := f.Body.(Interface) if !ok { return nil } return impl.BlocksAtPos(pos) } // OutermostBlockAtPos attempts to find a top-level block in the receiving file // that contains the given position. This is a best-effort method that may not // be able to produce a result for all positions or for all HCL syntaxes. // // The result is nil if no single block could be selected for any reason. func (f *File) OutermostBlockAtPos(pos Pos) *Block { // The root body of the file must implement this interface in order // to support OutermostBlockAtPos. type Interface interface { OutermostBlockAtPos(pos Pos) *Block } impl, ok := f.Body.(Interface) if !ok { return nil } return impl.OutermostBlockAtPos(pos) } // InnermostBlockAtPos attempts to find the most deeply-nested block in the // receiving file that contains the given position. This is a best-effort // method that may not be able to produce a result for all positions or for // all HCL syntaxes. // // The result is nil if no single block could be selected for any reason. func (f *File) InnermostBlockAtPos(pos Pos) *Block { // The root body of the file must implement this interface in order // to support InnermostBlockAtPos. type Interface interface { InnermostBlockAtPos(pos Pos) *Block } impl, ok := f.Body.(Interface) if !ok { return nil } return impl.InnermostBlockAtPos(pos) } // OutermostExprAtPos attempts to find an expression in the receiving file // that contains the given position. This is a best-effort method that may not // be able to produce a result for all positions or for all HCL syntaxes. // // Since expressions are often nested inside one another, this method returns // the outermost "root" expression that is not contained by any other. // // The result is nil if no single expression could be selected for any reason. func (f *File) OutermostExprAtPos(pos Pos) Expression { // The root body of the file must implement this interface in order // to support OutermostExprAtPos. type Interface interface { OutermostExprAtPos(pos Pos) Expression } impl, ok := f.Body.(Interface) if !ok { return nil } return impl.OutermostExprAtPos(pos) } // AttributeAtPos attempts to find an attribute definition in the receiving // file that contains the given position. This is a best-effort method that may // not be able to produce a result for all positions or for all HCL syntaxes. // // The result is nil if no single attribute could be selected for any reason. func (f *File) AttributeAtPos(pos Pos) *Attribute { // The root body of the file must implement this interface in order // to support OutermostExprAtPos. type Interface interface { AttributeAtPos(pos Pos) *Attribute } impl, ok := f.Body.(Interface) if !ok { return nil } return impl.AttributeAtPos(pos) } hcl-2.14.1/traversal.go000066400000000000000000000176751431334125700147220ustar00rootroot00000000000000package hcl import ( "fmt" "github.com/zclconf/go-cty/cty" ) // A Traversal is a description of traversing through a value through a // series of operations such as attribute lookup, index lookup, etc. // // It is used to look up values in scopes, for example. // // The traversal operations are implementations of interface Traverser. // This is a closed set of implementations, so the interface cannot be // implemented from outside this package. // // A traversal can be absolute (its first value is a symbol name) or relative // (starts from an existing value). type Traversal []Traverser // TraversalJoin appends a relative traversal to an absolute traversal to // produce a new absolute traversal. func TraversalJoin(abs Traversal, rel Traversal) Traversal { if abs.IsRelative() { panic("first argument to TraversalJoin must be absolute") } if !rel.IsRelative() { panic("second argument to TraversalJoin must be relative") } ret := make(Traversal, len(abs)+len(rel)) copy(ret, abs) copy(ret[len(abs):], rel) return ret } // TraverseRel applies the receiving traversal to the given value, returning // the resulting value. This is supported only for relative traversals, // and will panic if applied to an absolute traversal. func (t Traversal) TraverseRel(val cty.Value) (cty.Value, Diagnostics) { if !t.IsRelative() { panic("can't use TraverseRel on an absolute traversal") } current := val var diags Diagnostics for _, tr := range t { var newDiags Diagnostics current, newDiags = tr.TraversalStep(current) diags = append(diags, newDiags...) if newDiags.HasErrors() { return cty.DynamicVal, diags } } return current, diags } // TraverseAbs applies the receiving traversal to the given eval context, // returning the resulting value. This is supported only for absolute // traversals, and will panic if applied to a relative traversal. func (t Traversal) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) { if t.IsRelative() { panic("can't use TraverseAbs on a relative traversal") } split := t.SimpleSplit() root := split.Abs[0].(TraverseRoot) name := root.Name thisCtx := ctx hasNonNil := false for thisCtx != nil { if thisCtx.Variables == nil { thisCtx = thisCtx.parent continue } hasNonNil = true val, exists := thisCtx.Variables[name] if exists { return split.Rel.TraverseRel(val) } thisCtx = thisCtx.parent } if !hasNonNil { return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: "Variables not allowed", Detail: "Variables may not be used here.", Subject: &root.SrcRange, }, } } suggestions := make([]string, 0, len(ctx.Variables)) thisCtx = ctx for thisCtx != nil { for k := range thisCtx.Variables { suggestions = append(suggestions, k) } thisCtx = thisCtx.parent } suggestion := nameSuggestion(name, suggestions) if suggestion != "" { suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) } return cty.DynamicVal, Diagnostics{ { Severity: DiagError, Summary: "Unknown variable", Detail: fmt.Sprintf("There is no variable named %q.%s", name, suggestion), Subject: &root.SrcRange, }, } } // IsRelative returns true if the receiver is a relative traversal, or false // otherwise. func (t Traversal) IsRelative() bool { if len(t) == 0 { return true } if _, firstIsRoot := t[0].(TraverseRoot); firstIsRoot { return false } return true } // SimpleSplit returns a TraversalSplit where the name lookup is the absolute // part and the remainder is the relative part. Supported only for // absolute traversals, and will panic if applied to a relative traversal. // // This can be used by applications that have a relatively-simple variable // namespace where only the top-level is directly populated in the scope, with // everything else handled by relative lookups from those initial values. func (t Traversal) SimpleSplit() TraversalSplit { if t.IsRelative() { panic("can't use SimpleSplit on a relative traversal") } return TraversalSplit{ Abs: t[0:1], Rel: t[1:], } } // RootName returns the root name for a absolute traversal. Will panic if // called on a relative traversal. func (t Traversal) RootName() string { if t.IsRelative() { panic("can't use RootName on a relative traversal") } return t[0].(TraverseRoot).Name } // SourceRange returns the source range for the traversal. func (t Traversal) SourceRange() Range { if len(t) == 0 { // Nothing useful to return here, but we'll return something // that's correctly-typed at least. return Range{} } return RangeBetween(t[0].SourceRange(), t[len(t)-1].SourceRange()) } // TraversalSplit represents a pair of traversals, the first of which is // an absolute traversal and the second of which is relative to the first. // // This is used by calling applications that only populate prefixes of the // traversals in the scope, with Abs representing the part coming from the // scope and Rel representing the remaining steps once that part is // retrieved. type TraversalSplit struct { Abs Traversal Rel Traversal } // TraverseAbs traverses from a scope to the value resulting from the // absolute traversal. func (t TraversalSplit) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) { return t.Abs.TraverseAbs(ctx) } // TraverseRel traverses from a given value, assumed to be the result of // TraverseAbs on some scope, to a final result for the entire split traversal. func (t TraversalSplit) TraverseRel(val cty.Value) (cty.Value, Diagnostics) { return t.Rel.TraverseRel(val) } // Traverse is a convenience function to apply TraverseAbs followed by // TraverseRel. func (t TraversalSplit) Traverse(ctx *EvalContext) (cty.Value, Diagnostics) { v1, diags := t.TraverseAbs(ctx) if diags.HasErrors() { return cty.DynamicVal, diags } v2, newDiags := t.TraverseRel(v1) diags = append(diags, newDiags...) return v2, diags } // Join concatenates together the Abs and Rel parts to produce a single // absolute traversal. func (t TraversalSplit) Join() Traversal { return TraversalJoin(t.Abs, t.Rel) } // RootName returns the root name for the absolute part of the split. func (t TraversalSplit) RootName() string { return t.Abs.RootName() } // A Traverser is a step within a Traversal. type Traverser interface { TraversalStep(cty.Value) (cty.Value, Diagnostics) SourceRange() Range isTraverserSigil() isTraverser } // Embed this in a struct to declare it as a Traverser type isTraverser struct { } func (tr isTraverser) isTraverserSigil() isTraverser { return isTraverser{} } // TraverseRoot looks up a root name in a scope. It is used as the first step // of an absolute Traversal, and cannot itself be traversed directly. type TraverseRoot struct { isTraverser Name string SrcRange Range } // TraversalStep on a TraverseName immediately panics, because absolute // traversals cannot be directly traversed. func (tn TraverseRoot) TraversalStep(cty.Value) (cty.Value, Diagnostics) { panic("Cannot traverse an absolute traversal") } func (tn TraverseRoot) SourceRange() Range { return tn.SrcRange } // TraverseAttr looks up an attribute in its initial value. type TraverseAttr struct { isTraverser Name string SrcRange Range } func (tn TraverseAttr) TraversalStep(val cty.Value) (cty.Value, Diagnostics) { return GetAttr(val, tn.Name, &tn.SrcRange) } func (tn TraverseAttr) SourceRange() Range { return tn.SrcRange } // TraverseIndex applies the index operation to its initial value. type TraverseIndex struct { isTraverser Key cty.Value SrcRange Range } func (tn TraverseIndex) TraversalStep(val cty.Value) (cty.Value, Diagnostics) { return Index(val, tn.Key, &tn.SrcRange) } func (tn TraverseIndex) SourceRange() Range { return tn.SrcRange } // TraverseSplat applies the splat operation to its initial value. type TraverseSplat struct { isTraverser Each Traversal SrcRange Range } func (tn TraverseSplat) TraversalStep(val cty.Value) (cty.Value, Diagnostics) { panic("TraverseSplat not yet implemented") } func (tn TraverseSplat) SourceRange() Range { return tn.SrcRange } hcl-2.14.1/traversal_for_expr.go000066400000000000000000000112201431334125700166020ustar00rootroot00000000000000package hcl // AbsTraversalForExpr attempts to interpret the given expression as // an absolute traversal, or returns error diagnostic(s) if that is // not possible for the given expression. // // A particular Expression implementation can support this function by // offering a method called AsTraversal that takes no arguments and // returns either a valid absolute traversal or nil to indicate that // no traversal is possible. Alternatively, an implementation can support // UnwrapExpression to delegate handling of this function to a wrapped // Expression object. // // In most cases the calling application is interested in the value // that results from an expression, but in rarer cases the application // needs to see the the name of the variable and subsequent // attributes/indexes itself, for example to allow users to give references // to the variables themselves rather than to their values. An implementer // of this function should at least support attribute and index steps. func AbsTraversalForExpr(expr Expression) (Traversal, Diagnostics) { type asTraversal interface { AsTraversal() Traversal } physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { _, supported := expr.(asTraversal) return supported }) if asT, supported := physExpr.(asTraversal); supported { if traversal := asT.AsTraversal(); traversal != nil { return traversal, nil } } return nil, Diagnostics{ &Diagnostic{ Severity: DiagError, Summary: "Invalid expression", Detail: "A single static variable reference is required: only attribute access and indexing with constant keys. No calculations, function calls, template expressions, etc are allowed here.", Subject: expr.Range().Ptr(), }, } } // RelTraversalForExpr is similar to AbsTraversalForExpr but it returns // a relative traversal instead. Due to the nature of HCL expressions, the // first element of the returned traversal is always a TraverseAttr, and // then it will be followed by zero or more other expressions. // // Any expression accepted by AbsTraversalForExpr is also accepted by // RelTraversalForExpr. func RelTraversalForExpr(expr Expression) (Traversal, Diagnostics) { traversal, diags := AbsTraversalForExpr(expr) if len(traversal) > 0 { ret := make(Traversal, len(traversal)) copy(ret, traversal) root := traversal[0].(TraverseRoot) ret[0] = TraverseAttr{ Name: root.Name, SrcRange: root.SrcRange, } return ret, diags } return traversal, diags } // ExprAsKeyword attempts to interpret the given expression as a static keyword, // returning the keyword string if possible, and the empty string if not. // // A static keyword, for the sake of this function, is a single identifier. // For example, the following attribute has an expression that would produce // the keyword "foo": // // example = foo // // This function is a variant of AbsTraversalForExpr, which uses the same // interface on the given expression. This helper constrains the result // further by requiring only a single root identifier. // // This function is intended to be used with the following idiom, to recognize // situations where one of a fixed set of keywords is required and arbitrary // expressions are not allowed: // // switch hcl.ExprAsKeyword(expr) { // case "allow": // // (take suitable action for keyword "allow") // case "deny": // // (take suitable action for keyword "deny") // default: // diags = append(diags, &hcl.Diagnostic{ // // ... "invalid keyword" diagnostic message ... // }) // } // // The above approach will generate the same message for both the use of an // unrecognized keyword and for not using a keyword at all, which is usually // reasonable if the message specifies that the given value must be a keyword // from that fixed list. // // Note that in the native syntax the keywords "true", "false", and "null" are // recognized as literal values during parsing and so these reserved words // cannot not be accepted as keywords by this function. // // Since interpreting an expression as a keyword bypasses usual expression // evaluation, it should be used sparingly for situations where e.g. one of // a fixed set of keywords is used in a structural way in a special attribute // to affect the further processing of a block. func ExprAsKeyword(expr Expression) string { type asTraversal interface { AsTraversal() Traversal } physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { _, supported := expr.(asTraversal) return supported }) if asT, supported := physExpr.(asTraversal); supported { if traversal := asT.AsTraversal(); len(traversal) == 1 { return traversal.RootName() } } return "" } hcl-2.14.1/traversal_for_expr_test.go000066400000000000000000000100701431334125700176430ustar00rootroot00000000000000package hcl import ( "testing" ) type asTraversalSupported struct { staticExpr RootName string } type asTraversalSupportedAttr struct { staticExpr RootName string AttrName string } type asTraversalNotSupported struct { staticExpr } type asTraversalDeclined struct { staticExpr } type asTraversalWrappedDelegated struct { original Expression staticExpr } func (e asTraversalSupported) AsTraversal() Traversal { return Traversal{ TraverseRoot{ Name: e.RootName, }, } } func (e asTraversalSupportedAttr) AsTraversal() Traversal { return Traversal{ TraverseRoot{ Name: e.RootName, }, TraverseAttr{ Name: e.AttrName, }, } } func (e asTraversalDeclined) AsTraversal() Traversal { return nil } func (e asTraversalWrappedDelegated) UnwrapExpression() Expression { return e.original } func TestAbsTraversalForExpr(t *testing.T) { tests := []struct { Expr Expression WantRootName string }{ { asTraversalSupported{RootName: "foo"}, "foo", }, { asTraversalNotSupported{}, "", }, { asTraversalDeclined{}, "", }, { asTraversalWrappedDelegated{ original: asTraversalSupported{RootName: "foo"}, }, "foo", }, { asTraversalWrappedDelegated{ original: asTraversalWrappedDelegated{ original: asTraversalSupported{RootName: "foo"}, }, }, "foo", }, } for _, test := range tests { t.Run("", func(t *testing.T) { got, diags := AbsTraversalForExpr(test.Expr) switch { case got != nil: if test.WantRootName == "" { t.Fatalf("traversal was returned; want error") } if len(got) != 1 { t.Fatalf("wrong traversal length %d; want 1", len(got)) } gotRoot, ok := got[0].(TraverseRoot) if !ok { t.Fatalf("first traversal step is %T; want hcl.TraverseRoot", got[0]) } if gotRoot.Name != test.WantRootName { t.Errorf("wrong root name %q; want %q", gotRoot.Name, test.WantRootName) } default: if !diags.HasErrors() { t.Errorf("returned nil traversal without error diagnostics") } if test.WantRootName != "" { t.Errorf("traversal was not returned; want TraverseRoot(%q)", test.WantRootName) } } }) } } func TestRelTraversalForExpr(t *testing.T) { tests := []struct { Expr Expression WantFirstName string }{ { asTraversalSupported{RootName: "foo"}, "foo", }, { asTraversalNotSupported{}, "", }, { asTraversalDeclined{}, "", }, } for _, test := range tests { t.Run("", func(t *testing.T) { got, diags := RelTraversalForExpr(test.Expr) switch { case got != nil: if test.WantFirstName == "" { t.Fatalf("traversal was returned; want error") } if len(got) != 1 { t.Fatalf("wrong traversal length %d; want 1", len(got)) } gotRoot, ok := got[0].(TraverseAttr) if !ok { t.Fatalf("first traversal step is %T; want hcl.TraverseAttr", got[0]) } if gotRoot.Name != test.WantFirstName { t.Errorf("wrong root name %q; want %q", gotRoot.Name, test.WantFirstName) } default: if !diags.HasErrors() { t.Errorf("returned nil traversal without error diagnostics") } if test.WantFirstName != "" { t.Errorf("traversal was not returned; want TraverseAttr(%q)", test.WantFirstName) } } }) } } func TestExprAsKeyword(t *testing.T) { tests := []struct { Expr Expression Want string }{ { asTraversalSupported{RootName: "foo"}, "foo", }, { asTraversalSupportedAttr{ RootName: "foo", AttrName: "bar", }, "", }, { asTraversalNotSupported{}, "", }, { asTraversalDeclined{}, "", }, { asTraversalWrappedDelegated{ original: asTraversalSupported{RootName: "foo"}, }, "foo", }, { asTraversalWrappedDelegated{ original: asTraversalWrappedDelegated{ original: asTraversalSupported{RootName: "foo"}, }, }, "foo", }, } for _, test := range tests { t.Run("", func(t *testing.T) { got := ExprAsKeyword(test.Expr) if got != test.Want { t.Errorf("wrong result %q; want %q\ninput: %T", got, test.Want, test.Expr) } }) } }