pax_global_header00006660000000000000000000000064143145551160014517gustar00rootroot0000000000000052 comment=867ee0cca56a62f111942e51676663c8ae1ce77f atlas-0.7.2/000077500000000000000000000000001431455511600126315ustar00rootroot00000000000000atlas-0.7.2/.github/000077500000000000000000000000001431455511600141715ustar00rootroot00000000000000atlas-0.7.2/.github/workflows/000077500000000000000000000000001431455511600162265ustar00rootroot00000000000000atlas-0.7.2/.github/workflows/build-docker.yaml000066400000000000000000000017351431455511600214640ustar00rootroot00000000000000name: Build Docker on: pull_request: push: branches: - master env: CRDB_VERSIONS: v21.2.11 v22.1.0 jobs: build-services: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.4.0 - uses: actions/setup-go@v2 with: go-version: '1.18' - name: Log in to registry run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin - name: "build cockroach image" run: | VER="${{ env.CRDB_VERSIONS }}" for i in $VER do : if ! docker manifest inspect ghcr.io/ariga/cockroachdb-single-node:$i; then go run internal/ci/cockroach/main.go $i > internal/ci/cockroach/Dockerfile docker build -t ghcr.io/ariga/cockroachdb-single-node:$i internal/ci/cockroach/ docker push ghcr.io/ariga/cockroachdb-single-node:$i else echo image already exists fi doneatlas-0.7.2/.github/workflows/ci-website.yaml000066400000000000000000000012731431455511600211500ustar00rootroot00000000000000name: Docsite CI on: push: branches-ignore: - master jobs: docsite_ci: name: verify build doesn't crash runs-on: ubuntu-latest defaults: run: working-directory: ./doc/website steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2.1.5 with: node-version: 16 - name: Cache dependecies uses: actions/cache@v2.1.6 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- - name: Install Dependencies run: npm install - name: Build website run: npm run build atlas-0.7.2/.github/workflows/ci.yml000066400000000000000000000402021431455511600173420ustar00rootroot00000000000000# # # # # # # # # # # # # # # # # CODE GENERATED - DO NOT EDIT # # # # # # # # # # # # # # # # name: Continuous Integration on: pull_request: paths-ignore: - 'doc/**' push: branches: - master paths-ignore: - 'doc/**' jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/setup-go@v2 with: go-version: 1.18 - uses: actions/checkout@v2 - name: Run linters uses: golangci/golangci-lint-action@v3 with: args: --verbose generate-cmp: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.4.0 - uses: actions/setup-go@v2 with: go-version: '1.19' - uses: actions/cache@v2.1.5 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: run "go generate ./..." run: go generate ./... - name: go generate cmd/atlas working-directory: cmd/atlas run: go generate ./... - name: Verify generated files are checked in properly run: | status=$(git status --porcelain | grep -v "go.\(sum\|mod\)" | cat) if [ -n "$status" ]; then echo "you need to run 'go generate ./...' and commit the changes" echo "$status" exit 1 fi unit: runs-on: ubuntu-latest strategy: matrix: go: [ '1.18', '1.19' ] steps: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: go-version: ${{ matrix.go }} - uses: actions/cache@v2.1.5 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run sql tests run: go test -race ./... working-directory: sql - name: Run schemahcl tests run: go test -race ./... working-directory: schemahcl cli: runs-on: ubuntu-latest strategy: matrix: go: [ '1.19' ] steps: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: go-version: ${{ matrix.go }} - uses: actions/cache@v2.1.5 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run cli tests run: go test -race ./... working-directory: cmd/atlas integration: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for HCL working-directory: internal/integration/hclsqlspec run: go test -race -count=2 -v ./... revisions: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.3.4 with: fetch-depth: 0 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Checkout origin/master run: git checkout origin/master - name: Create revisions from master run: go run main.go migrate apply --dir file://internal/cmdapi/testdata/sqlite --url sqlite://db?_fk=1 working-directory: cmd/atlas - name: Checkout previous HEAD run: git checkout - - name: Migrate revisions table to HEAD run: go run main.go migrate apply --dir file://internal/cmdapi/testdata/sqlite --url sqlite://db?_fk=1 working-directory: cmd/atlas integration-mysql56: runs-on: ubuntu-latest services: mysql56: image: mysql:5.6.35 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 3306:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for mysql56 working-directory: internal/integration run: go test -race -count=2 -v -run="MySQL" -version="mysql56" ./... integration-mysql57: runs-on: ubuntu-latest services: mysql57: image: mysql:5.7.26 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 3307:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for mysql57 working-directory: internal/integration run: go test -race -count=2 -v -run="MySQL" -version="mysql57" ./... integration-mysql8: runs-on: ubuntu-latest services: mysql8: image: mysql:8 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 3308:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for mysql8 working-directory: internal/integration run: go test -race -count=2 -v -run="MySQL" -version="mysql8" ./... integration-maria107: runs-on: ubuntu-latest services: maria107: image: mariadb:10.7 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 4306:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for maria107 working-directory: internal/integration run: go test -race -count=2 -v -run="MySQL" -version="maria107" ./... integration-maria102: runs-on: ubuntu-latest services: maria102: image: mariadb:10.2.32 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 4307:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for maria102 working-directory: internal/integration run: go test -race -count=2 -v -run="MySQL" -version="maria102" ./... integration-maria103: runs-on: ubuntu-latest services: maria103: image: mariadb:10.3.13 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 4308:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for maria103 working-directory: internal/integration run: go test -race -count=2 -v -run="MySQL" -version="maria103" ./... integration-postgres10: runs-on: ubuntu-latest services: postgres10: image: postgres:10 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5430:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for postgres10 working-directory: internal/integration run: go test -race -count=2 -v -run="Postgres" -version="postgres10" ./... integration-postgres11: runs-on: ubuntu-latest services: postgres11: image: postgres:11 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5431:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for postgres11 working-directory: internal/integration run: go test -race -count=2 -v -run="Postgres" -version="postgres11" ./... integration-postgres12: runs-on: ubuntu-latest services: postgres12: image: postgres:12.3 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5432:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for postgres12 working-directory: internal/integration run: go test -race -count=2 -v -run="Postgres" -version="postgres12" ./... integration-postgres13: runs-on: ubuntu-latest services: postgres13: image: postgres:13.1 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5433:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for postgres13 working-directory: internal/integration run: go test -race -count=2 -v -run="Postgres" -version="postgres13" ./... integration-postgres14: runs-on: ubuntu-latest services: postgres14: image: postgres:14 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5434:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for postgres14 working-directory: internal/integration run: go test -race -count=2 -v -run="Postgres" -version="postgres14" ./... integration-tidb5: runs-on: ubuntu-latest services: tidb5: image: pingcap/tidb:v5.4.0 ports: - 4309:4000 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for tidb5 working-directory: internal/integration run: go test -race -count=2 -v -run="TiDB" -version="tidb5" ./... integration-tidb6: runs-on: ubuntu-latest services: tidb6: image: pingcap/tidb:v6.0.0 ports: - 4310:4000 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for tidb6 working-directory: internal/integration run: go test -race -count=2 -v -run="TiDB" -version="tidb6" ./... integration-sqlite: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for sqlite working-directory: internal/integration run: go test -race -count=2 -v -run="SQLite.*" -version="sqlite" ./... integration-cockroach: runs-on: ubuntu-latest services: cockroach: image: ghcr.io/ariga/cockroachdb-single-node:v21.2.11 ports: - 26257:26257 steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - name: Run integration tests for cockroach working-directory: internal/integration run: go test -race -count=2 -v -run="Cockroach" -version="cockroach" ./... atlas-0.7.2/.github/workflows/website.yaml000066400000000000000000000021311431455511600205510ustar00rootroot00000000000000name: Continuous Deployment on: push: branches: - master paths: - 'doc/**' jobs: docs: name: deploy atlasgo.io runs-on: ubuntu-latest defaults: run: working-directory: ./doc/website steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2.1.5 with: node-version: 16 - name: Cache dependecies uses: actions/cache@v2.1.6 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- - name: Install Dependencies run: npm install - name: Build website run: npm run build - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: us-east-1 - name: Deploy Website run: npm run upload - name: Invalidate Cache run: npm run invalidate-cdn atlas-0.7.2/.golangci.yml000066400000000000000000000010111431455511600152060ustar00rootroot00000000000000run: timeout: 3m issues: include: - EXC0012 exclude-rules: - path: _test\.go linters: - gosec - path: sql/migrate/dir.go linters: - gosec linters-settings: goheader: template: |- Copyright 2021-present The Atlas Authors. All rights reserved. This source code is licensed under the Apache 2.0 license found in the LICENSE file in the root directory of this source tree. linters: disable-all: true enable: - gosec - revive - goheader atlas-0.7.2/LICENSE000066400000000000000000000261351431455511600136450ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.atlas-0.7.2/README.md000066400000000000000000000051671431455511600141210ustar00rootroot00000000000000

Manage your database schemas with Atlas CLI

Explore the docs »

Discord · Twitter · Report a Bug · Request a Feature

Atlas CLI is an open source tool that helps developers manage their database schemas by applying modern DevOps principles. Contrary to existing tools, Atlas intelligently plans schema migrations for you. Atlas users can use the [Atlas DDL](https://atlasgo.io/guides/ddl#hcl) (data definition language) to describe their desired database schema and use the command-line tool to plan and apply the migrations to their systems. ## Quick Installation On macOS: ```shell brew install ariga/tap/atlas ``` Click [here](https://atlasgo.io/cli/getting-started/setting-up) to read instructions for other platforms. ## Getting Started Get started with Atlas by following the [Getting Started](https://atlasgo.io/cli/getting-started/setting-up) docs. This tutorial teaches you how to inspect a database, generate a migration plan and apply the migration to your database. ## Features - **Inspecting a database**: easily inspect your database schema by providing a database URL. ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > atlas.hcl ``` - **Applying a migration**: generate a migration plan to apply on the database by providing an HCL file with the desired Atlas schema. ```shell atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f atlas.hcl ``` - **Declarative Migrations vs. Versioned Migrations**: Atlas offers two workflows. Declarative migrations allow the user to provide a desired state and Atlas gets the schema there instantly (simply using inspect and apply commands). Alternatively, versioned migrations are explicitly defined and assigned a version. Atlas can then bring a schema to the desired version by following the migrations between the current version and the specified one. ### About the Project Read more about the motivation of the project [here](https://atlasgo.io/blog/2021/11/25/meet-atlas). ### Supported Databases MySQL, MariaDB, PostgresSQL, SQLite, TiDB, CockroachDB atlas-0.7.2/cmd/000077500000000000000000000000001431455511600133745ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/000077500000000000000000000000001431455511600145005ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/doc.go000066400000000000000000000025201431455511600155730ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. //go:build ignore // +build ignore package main import ( "log" "os" "strings" "text/template" "ariga.io/atlas/cmd/atlas/internal/cmdapi" "github.com/spf13/cobra" ) func main() { f, err := os.Create("../../doc/md/reference.md") if err != nil { log.Fatal(err) } t, err := template.New(""). Funcs(template.FuncMap{ "header": func(depth int) string { return strings.Repeat("#", depth+1) }, "subheader": func(depth int) string { return strings.Repeat("#", depth+2) }, }). ParseFiles("doc.tmpl") if err != nil { log.Fatal(err) } if err := t.ExecuteTemplate(f, "header", nil); err != nil { log.Fatal(err) } blocks := prepare(cmdapi.Root, make([]*block, 0), 0) if err := t.ExecuteTemplate(f, "body", struct { Blocks []*block }{Blocks: blocks}); err != nil { log.Fatal(err) } } type block struct { Depth int *cobra.Command } func prepare(cmd *cobra.Command, existing []*block, depth int) []*block { if depth > 0 { existing = append(existing, &block{ Depth: depth, Command: cmd, }) } for _, child := range cmd.Commands() { existing = prepare(child, existing, depth+1) } return existing } atlas-0.7.2/cmd/atlas/doc.tmpl000066400000000000000000000024041431455511600161430ustar00rootroot00000000000000{{- define "header" -}} --- title: CLI Reference id: cli-reference slug: cli-reference --- {{- end }} {{ define "body" }} ## Introduction This document serves as reference documentation for all available commands in the Atlas CLI. Similar information can be obtained by running any atlas command with the `-h` or `--help` flags. For a more detailed introduction to the CLI capabilities, head over to the [Getting Started](/getting-started/) page. ## Distributed Binaries The binaries distributed in official releases are released under the [Ariga End User License](https://ariga.io/legal/atlas/eula). If you would like to build Atlas from source follow the instructions [here](https://atlasgo.io/cli-reference#building-from-source). ### Building from Source If you would like to build Atlas from source without the UI code run: ```shell go get ariga.io/atlas/cmd/atlas ``` {{ range .Blocks }} {{ header .Depth }} {{ .CommandPath }} {{ .Short }} {{- if .UseLine }} #### Usage ``` {{ .UseLine }} ``` {{- end }} {{- if .Long }} #### Details {{ .Long }} {{- end }} {{ if .Example }} #### Example ``` {{ .Example }} ``` {{- end }} {{- $flags := .NonInheritedFlags }} {{- if $flags.HasAvailableFlags }} #### Flags ``` {{ $flags.FlagUsages }} ``` {{ end }} {{ end }} {{ end }} atlas-0.7.2/cmd/atlas/generate.go000066400000000000000000000003621431455511600166220ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package main //go:generate go run doc.go atlas-0.7.2/cmd/atlas/go.mod000066400000000000000000000062221431455511600156100ustar00rootroot00000000000000module ariga.io/atlas/cmd/atlas go 1.18 require ( ariga.io/atlas v0.7.1-0.20220925103247-db83ea0cd178 entgo.io/ent v0.11.1 github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220816024939-bc8df83d7b9d github.com/auxten/postgresql-parser v1.0.1 github.com/fatih/color v1.13.0 github.com/go-sql-driver/mysql v1.6.0 github.com/hashicorp/hcl/v2 v2.13.0 github.com/lib/pq v1.10.6 github.com/manifoldco/promptui v0.9.0 github.com/mattn/go-sqlite3 v1.14.14 github.com/mitchellh/go-homedir v1.1.0 github.com/pingcap/tidb/parser v0.0.0-20220817134052-9709249e523a github.com/spf13/cobra v1.5.0 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.8.0 golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 ) require ( github.com/agext/levenshtein v1.2.1 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/cockroachdb/apd v1.1.1-0.20181017181144-bced77f817b4 // indirect github.com/cockroachdb/errors v1.8.2 // indirect github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f // indirect github.com/cockroachdb/redact v1.0.8 // indirect github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2 // indirect github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dustin/go-humanize v1.0.0 // indirect github.com/getsentry/raven-go v0.2.0 // indirect github.com/go-openapi/inflect v0.19.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/protobuf v1.4.3 // indirect github.com/google/go-cmp v0.5.6 // indirect github.com/google/uuid v1.3.0 // indirect github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/konsorten/go-windows-terminal-sequences v1.0.3 // indirect github.com/kr/pretty v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/mattn/go-colorable v0.1.9 // indirect github.com/mattn/go-isatty v0.0.14 // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63 // indirect github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect github.com/sirupsen/logrus v1.6.0 // indirect github.com/zclconf/go-cty v1.8.0 // indirect go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect go.uber.org/zap v1.18.1 // indirect golang.org/x/exp v0.0.0-20220428152302-39d4317da171 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 // indirect golang.org/x/text v0.3.7 // indirect google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f // indirect google.golang.org/grpc v1.33.1 // indirect google.golang.org/protobuf v1.25.0 // indirect gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) atlas-0.7.2/cmd/atlas/go.sum000066400000000000000000001321251431455511600156370ustar00rootroot00000000000000ariga.io/atlas v0.6.4-0.20220830111134-f2d233c99aa1 h1:/8U7e/qiVREcJHKltZptJoa10gltLcUBVlcqU8gk1V8= ariga.io/atlas v0.6.4-0.20220830111134-f2d233c99aa1/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= ariga.io/atlas v0.6.4-0.20220831192411-1e893f70cbe8 h1:BmgIAqhnFoem+8/jhVAwm+YDRdfeMgcrbOxf/6+3Teg= ariga.io/atlas v0.6.4-0.20220831192411-1e893f70cbe8/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= ariga.io/atlas v0.6.5-0.20220907135005-d80f5e6b79cf h1:sqRKqa3gdunWKmv4G7FC0j4ROKJ8rG+6YoMBPTVGlxo= ariga.io/atlas v0.6.5-0.20220907135005-d80f5e6b79cf/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= ariga.io/atlas v0.7.0 h1:daEFdUsyNm7EHyzcMfjWwq/fVv48fCfad+dIGyobY1k= ariga.io/atlas v0.7.0/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= ariga.io/atlas v0.7.1-0.20220925103247-db83ea0cd178 h1:/fhqvnyyi4h5epqhIsDQW9NLkklDnQ0GRgQqfUhl9Pw= ariga.io/atlas v0.7.1-0.20220925103247-db83ea0cd178/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= entgo.io/ent v0.11.1 h1:im67R+2W3Nee2bNS2YnoYz8oAF0Qz4AOlIvKRIAEISY= entgo.io/ent v0.11.1/go.mod h1:X5b1YfMayrRTgKGO//8IqpL7XJx0uqdeReEkxNpXROA= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/CloudyKit/fastprinter v0.0.0-20170127035650-74b38d55f37a/go.mod h1:EFZQ978U7x8IRnstaskI3IysnWY5Ao3QgZUKOXlsAdw= github.com/CloudyKit/jet v2.1.3-0.20180809161101-62edd43e4f88+incompatible/go.mod h1:HPYO+50pSWkPoj9Q/eq0aRGByCL6ScRlUmiEX5Zgm+w= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= github.com/Joker/jade v1.0.1-0.20190614124447-d475f43051e7/go.mod h1:6E6s8o2AE4KhCrqr6GRJjdC/gNfTdxkIXvuGZZda2VM= github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220816024939-bc8df83d7b9d h1:0xIrH2lJbraclvJT3pvTf3u2oCAL60cAqiv4qRpz4EI= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220816024939-bc8df83d7b9d/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/auxten/postgresql-parser v1.0.1 h1:x+qiEHAe2cH55Kly64dWh4tGvUKEQwMmJgma7a1kbj4= github.com/auxten/postgresql-parser v1.0.1/go.mod h1:Nf27dtv8EU1C+xNkoLD3zEwfgJfDDVi8Zl86gznxPvI= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054 h1:uH66TXeswKn5PW5zdZ39xEwfS9an067BirqA+P4QaLI= github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM= github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cockroachdb/apd v1.1.1-0.20181017181144-bced77f817b4 h1:XWEdfNxDkZI3DXXlpo0hZJ1xdaH/f3CKuZpk93pS/Y0= github.com/cockroachdb/apd v1.1.1-0.20181017181144-bced77f817b4/go.mod h1:mdGz2CnkJrefFtlLevmE7JpL2zB9tKofya/6w7wWzNA= github.com/cockroachdb/datadriven v1.0.0/go.mod h1:5Ib8Meh+jk1RlHIXej6Pzevx/NLlNvQB9pmSBZErGA4= github.com/cockroachdb/errors v1.6.1/go.mod h1:tm6FTP5G81vwJ5lC0SizQo374JNCOPrHyXGitRJoDqM= github.com/cockroachdb/errors v1.8.2 h1:rnnWK9Nn5kEMOGz9531HuDx/FOleL4NVH20VsDexVC8= github.com/cockroachdb/errors v1.8.2/go.mod h1:qGwQn6JmZ+oMjuLwjWzUNqblqk0xl4CVV3SQbGwK7Ac= github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f h1:o/kfcElHqOiXqcou5a3rIlMc7oJbMQkeLk0VQJ7zgqY= github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f/go.mod h1:i/u985jwjWRlyHXQbwatDASoW0RMlZ/3i9yJHE2xLkI= github.com/cockroachdb/redact v1.0.8 h1:8QG/764wK+vmEYoOlfobpe12EQcS81ukx/a4hdVMxNw= github.com/cockroachdb/redact v1.0.8/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2 h1:IKgmqgMQlVJIZj19CdocBeSfSaiCbEBZGKODaixqtHM= github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2/go.mod h1:8BT+cPK6xvFOcRlk0R8eg+OTkcqI6baNH4xAkpiYVvQ= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 h1:iwZdTE0PVqJCos1vaoKsclOGD3ADKpshg3SRtYBbwso= github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= github.com/getsentry/raven-go v0.2.0 h1:no+xWJRb5ZI7eE8TWgIq1jLulQiIoLG0IfYxv5JYMGs= github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= github.com/gogo/googleapis v0.0.0-20180223154316-0cd9801be74a/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY1WM= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hydrogen18/memlistener v0.0.0-20141126152155-54553eb933fb/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0= github.com/iris-contrib/i18n v0.0.0-20171121225848-987a633949d0/go.mod h1:pMCz62A0xJL6I+umB2YTlFRwWXaDFA0jy+5HzGiJjqI= github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/kataras/golog v0.0.9/go.mod h1:12HJgwBIZFNGL0EJnMRhmvGA0PQGx8VFwrZtM4CqbAk= github.com/kataras/iris/v12 v12.0.1/go.mod h1:udK4vLQKkdDqMGJJVd/msuMtN6hpYJhg/lSzuxjhO+U= github.com/kataras/neffos v0.0.10/go.mod h1:ZYmJC07hQPW67eKuzlfY7SO3bC0mw83A3j6im82hfqw= github.com/kataras/pio v0.0.0-20190103105442-ea782b38602d/go.mod h1:NV88laa9UiiDuX9AhMbDPkGYSPugBOV6yTZB1l2K9Z0= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-sqlite3 v1.14.14 h1:qZgc/Rwetq+MtyE18WhzjokPD93dNqLGNT3QJuLvBGw= github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/mediocregopher/mediocre-go-lib v0.0.0-20181029021733-cb65787f37ed/go.mod h1:dSsfyI2zABAdhcbvkXqgxOxrCsbYeHCPgrZkku60dSg= github.com/mediocregopher/radix/v3 v3.3.0/go.mod h1:EmfVyvspXz1uZEyPBMyGK+kjWiKQGvsUt6O3Pj+LDCQ= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= github.com/nats-io/nats.go v1.8.1/go.mod h1:BrFz9vVn0fU3AcH9Vn4Kd7W0NpJ651tD5omQ3M8LwxM= github.com/nats-io/nkeys v0.0.2/go.mod h1:dab7URMsZm6Z/jp9Z5UGa87Uutgc2mVpXLC4B7TDb/4= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.13.0/go.mod h1:+REjRxOmWfHCjfv9TTWB1jD1Frx4XydAD3zm1lskyM0= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= github.com/pingcap/errors v0.11.0/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63 h1:+FZIDR/D97YOPik4N4lPDaUcLDF/EQPogxtlHB2ZZRM= github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63/go.mod h1:X2r9ueLEUZgtx2cIogM0v4Zj5uvvzhuuiu7Pn8HzMPg= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7 h1:k2BbABz9+TNpYRwsCCFS8pEEnFVOdbgEjL/kTlLuzZQ= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7/go.mod h1:8AanEdAHATuRurdGxZXBz0At+9avep+ub7U1AGYLIMM= github.com/pingcap/tidb/parser v0.0.0-20220817134052-9709249e523a h1:KhEhzJLyVSFT13yk/M2BQ7eE6Ofs7jBnALjc0ICZbsQ= github.com/pingcap/tidb/parser v0.0.0-20220817134052-9709249e523a/go.mod h1:wjvp+T3/T9XYt0nKqGX3Kc1AKuyUcfno6LTc6b2A6ew= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU= github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.18.1 h1:CSUJ2mjFszzEWt4CdKISEuChVIXGBn3lAPwkRGyVrc4= go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20220428152302-39d4317da171 h1:TfdoLivD44QwvssI9Sv1xwa5DcL5XQr4au4sZ2F2NV4= golang.org/x/exp v0.0.0-20220428152302-39d4317da171/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 h1:LQmS1nU0twXLA96Kt7U9qtHJEbBk3z6Q0V4UXjZkpr4= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b h1:uwuIcX0g4Yl1NC5XAz37xsr2lTtcqevgzYNVt49waME= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 h1:v6hYoSR9T5oet+pMXwUWkbiVqx/63mlHjefrHmxwfeY= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.9-0.20211216111533-8d383106f7e7 h1:M1gcVrIb2lSn2FIL19DG0+/b8nNVKJ7W7b4WcAGZAYM= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180518175338-11a468237815/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f h1:Yv4xsIx7HZOoyUGSJ2ksDyWE2qIBXROsZKt2ny3hCGM= google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.33.1 h1:DGeFlSan2f+WEtCERJ4J9GJWk15TxUi8QGagfI87Xyc= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= atlas-0.7.2/cmd/atlas/go.work000066400000000000000000000000671431455511600160140ustar00rootroot00000000000000go 1.18 use ( ./ ) replace ariga.io/atlas => ../../ atlas-0.7.2/cmd/atlas/go.work.sum000066400000000000000000000335471431455511600166300ustar00rootroot00000000000000cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9 h1:HD8gA2tkByhMAwYaFAX9w2l7vxvBQ5NMoxDrkhqhtn4= github.com/CloudyKit/fastprinter v0.0.0-20170127035650-74b38d55f37a h1:3SgJcK9l5uPdBC/X17wanyJAMxM33+4ZhEIV96MIH8U= github.com/CloudyKit/jet v2.1.3-0.20180809161101-62edd43e4f88+incompatible h1:rZgFj+Gtf3NMi/U5FvCvhzaxzW/TaPYgUYx3bAPz9DE= github.com/Joker/hpp v1.0.0 h1:65+iuJYdRXv/XyN62C1uEmmOx3432rNG/rKlX6V7Kkc= github.com/Joker/jade v1.0.1-0.20190614124447-d475f43051e7 h1:mreN1m/5VJ/Zc3b4pzj9qU6D9SRQ6Vm+3KfI328t3S8= github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398 h1:WDC6ySpJzbxGWFh4aMxFFC28wwGp5pEuoTtvA4q/qQ4= github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU= github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA= github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6 h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible h1:Ppm0npCCsmuR9oQaBtRuZcmILVE74aXE+AmrJj8L2ns= github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU= github.com/cockroachdb/datadriven v1.0.0 h1:uhZrAfEayBecH2w2tZmhe20HJ7hDvrrA4x2Bg9YdZKM= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0 h1:sDMmm+q/3+BukdIpxwO365v/Rbspp2Nt5XntgQRXq8Q= github.com/coreos/etcd v3.3.10+incompatible h1:jFneRYjIvLMLhDLCzuTuU4rSJUjRplcJQ7pD7MnhC04= github.com/coreos/go-etcd v2.0.0+incompatible h1:bXhRBIXoTm9BYHS3gE0TtQuyNZyeEMux2sDi4oo5YOo= github.com/coreos/go-semver v0.2.0 h1:3Jm3tLmsgAYcjC+4Up7hJrFBPr+n7rAqYeSw/SZazuY= github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= github.com/creack/pty v1.1.9 h1:uDmaGzcdjhF4i/plgjmEsriH11Y0o7RKapEf/LDaM3w= github.com/cznic/sortutil v0.0.0-20181122101858-f5f958428db8 h1:LpMLYGyy67BoAFGda1NeOBQwqlv7nUXpm+rIVHGxZZ4= github.com/cznic/strutil v0.0.0-20171016134553-529a34b1c186 h1:0rkFMAbn5KBKNpJyHQ6Prb95vIKanmAe62KxsrN+sqA= github.com/dgraph-io/badger v1.6.0 h1:DshxFxZWXUcO0xX476VJC07Xsr6ZCBVRHKZ93Oh7Evo= github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o= github.com/envoyproxy/go-control-plane v0.9.4 h1:rEvIZUSZ3fx39WIi3JkQqQBitGwpELBIYWeBVh6wn+E= github.com/envoyproxy/protoc-gen-validate v0.1.0 h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A= github.com/etcd-io/bbolt v1.3.3 h1:gSJmxrs37LgTqR/oyJBWok6k6SvXEUerFTbltIhXkBM= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072 h1:DddqAaWDpywytcG8w/qoQ5sAN8X12d3Z3koB0C3Rxsc= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4 h1:GY1+t5Dr9OKADM64SYnQjw/w99HMYvQ0A8/JoUkxVmc= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/gavv/httpexpect v2.0.0+incompatible h1:1X9kcRshkSKEjNJJxX9Y9mQ5BRfbxU5kORdjhlA1yX8= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3 h1:t8FVkw33L+wilf2QiWkw0UV77qRpcH/JHPKGpKa2E8g= github.com/gin-gonic/gin v1.4.0 h1:3tMoCCfM7ppqsR0ptz/wi1impNpT7/9wQtMZ8lr1mCQ= github.com/go-check/check v0.0.0-20180628173108-788fd7840127 h1:0gkP6mzaMqkmpcJYCFOLkIBwI7xFExG03bbkOkCvUPI= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab h1:xveKWz2iaueeTaUgdetzel+U7exyigDYBryyVfV/rZk= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0= github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= github.com/gogo/googleapis v0.0.0-20180223154316-0cd9801be74a h1:dR8+Q0uO5S2ZBcs2IH6VBKYwSxPo2vYCYq0ot0mu7xA= github.com/gogo/status v1.1.0 h1:+eIkrewn5q6b30y+g/BJINVVdi2xH7je5MPJ3ZPK3JA= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/mock v1.1.1 h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8= github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38 h1:y0Wmhvml7cGnzPa9nocn/fMraMH/lMDdeG+rkx4VgYY= github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hydrogen18/memlistener v0.0.0-20141126152155-54553eb933fb h1:EPRgaDqXpLFUJLXZdGLnBTy1l6CLiNAPnvn2l+kHit0= github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk= github.com/iris-contrib/blackfriday v2.0.0+incompatible h1:o5sHQHHm0ToHUlAJSTjW9UWicjJSDDauOOQ2AHuIVp4= github.com/iris-contrib/go.uuid v2.0.0+incompatible h1:XZubAYg61/JwnJNbZilGjf3b3pB80+OQg2qf6c8BfWE= github.com/iris-contrib/i18n v0.0.0-20171121225848-987a633949d0 h1:Kyp9KiXwsyZRTeoNjgVCrWks7D8ht9+kg6yCjh8K97o= github.com/iris-contrib/schema v0.0.1 h1:10g/WnoRR+U+XXHWKBHeNy/+tZmM2kcAVGLOsz+yaDA= github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5 h1:rhqTjzJlm7EbkELJDKMTU7udov+Se0xZkWmugr6zGok= github.com/juju/loggo v0.0.0-20180524022052-584905176618 h1:MK144iBQF9hTSwBW/9eJm034bVoG30IshVm688T2hi8= github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073 h1:WQM1NildKThwdP7qWrNAFGzp4ijNLw8RlgENkaI4MJs= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 h1:uC1QfSlInpQF+M0ao65imhwqKnz3Q2z/d8PWZRMQvDM= github.com/kataras/golog v0.0.9 h1:J7Dl82843nbKQDrQM/abbNJZvQjS6PfmkkffhOTXEpM= github.com/kataras/iris/v12 v12.0.1 h1:Wo5S7GMWv5OAzJmvFTvss/C4TS1W0uo6LkDlSymT4rM= github.com/kataras/neffos v0.0.10 h1:O06dvQlxjdWvzWbm2Bq+Si6psUhvSmEctAMk9Xujqms= github.com/kataras/pio v0.0.0-20190103105442-ea782b38602d h1:V5Rs9ztEWdp58oayPq/ulmlqJJZeJP6pP79uP3qjcao= github.com/kisielk/errcheck v1.5.0 h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/klauspost/compress v1.9.0 h1:GhthINjveNZAdFUD8QoQYfjxnOONZgztK/Yr6M23UTY= github.com/klauspost/cpuid v1.2.1 h1:vJi+O/nMdFt0vqm8NZBI6wzALWdA2X+egi0ogNyrC/w= github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= github.com/labstack/echo/v4 v4.1.11 h1:z0BZoArY4FqdpUEl+wlHp4hnr/oSR6MTmQmv8OHSoww= github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/goveralls v0.0.2 h1:7eJB6EqsPhRVxvwEXGnqdO2sJI0PTsrWoTMXEk9/OQc= github.com/mediocregopher/mediocre-go-lib v0.0.0-20181029021733-cb65787f37ed h1:3dQJqqDouawQgl3gBE1PNHKFkJYGEuFb1DbSlaxdosE= github.com/mediocregopher/radix/v3 v3.3.0 h1:oacPXPKHJg0hcngVVrdtTnfGJiS+PtwoQwTBZGFlV4k= github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/moul/http2curl v1.0.0 h1:dRMWoAtb+ePxMlLkrCbAqh4TlPHXvoGUSQ323/9Zahs= github.com/nats-io/nats.go v1.8.1 h1:6lF/f1/NN6kzUDBz6pyvQDEXO39jqXcWRLu/tKjtOUQ= github.com/nats-io/nkeys v0.0.2 h1:+qM7QpgXnvDDixitZtQUBDY9w/s9mu1ghS+JIbsrx6M= github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/onsi/ginkgo v1.13.0 h1:M76yO2HkZASFjXL0HSoZJ1AYEmQxNJmY41Jx1zNUq1Y= github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM= github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s= github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/ryanuber/columnize v2.1.0+incompatible h1:j1Wcmh8OrK4Q7GXY+V7SVSY8nUWQxHW5TkBe7YUl+2s= github.com/sclevine/agouti v3.0.0+incompatible h1:8IBJS6PWz3uTlMP3YBIR5f+KAldcGuOeFkFbUWfBgK4= github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= github.com/stretchr/objx v0.4.0 h1:M2gUjqZET1qApGOWNSnZ49BAIMX4F/1plDv3+l31EJ4= github.com/ugorji/go v1.1.4 h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8 h1:3SVOIvH7Ae1KRYyQWRjXWJEA9sS/c/pjvH++55Gr648= github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/fasthttp v1.6.0 h1:uWF8lgKmeaIewWVPwi4GRq2P6+R46IgYZdxWtM+GtEY= github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a h1:0R4NLDRDZX6JcmhJgXi5E4b8Wg84ihbmUKp/GvSPEzc= github.com/vmihailenco/msgpack/v4 v4.3.12 h1:07s4sz9IReOgdikxLTKNbBdqDMLsjPKXwvCazn8G65U= github.com/vmihailenco/tagparser v0.1.1 h1:quXMXlA39OCbd2wAdTsGDlK9RkOk6Wuw+x37wVyIuWY= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77 h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY= github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M= github.com/yudai/pp v2.0.1+incompatible h1:Q4//iY4pNF6yPLZIigmvcl7k/bPgrcTPIFIcmawg5bI= github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167 h1:O8uGbHCqlTp2P6QJSLmCojM4mN6UemYv8K+dCnmHmu0= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 h1:LQmS1nU0twXLA96Kt7U9qtHJEbBk3z6Q0V4UXjZkpr4= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/tools v0.1.9-0.20211216111533-8d383106f7e7 h1:M1gcVrIb2lSn2FIL19DG0+/b8nNVKJ7W7b4WcAGZAYM= google.golang.org/appengine v1.6.5 h1:tycE03LOZYQNhDpS27tcQdAzLCVMaj7QT2SXxebnpCM= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM= gopkg.in/go-playground/validator.v8 v8.18.2 h1:lFB4DoMU6B626w8ny76MV7VX6W2VHct2GVOI3xgiMrQ= gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce h1:xcEWjVhvbDy+nHP67nPDDpbYrY+ILlfndk4bRioVHaU= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= modernc.org/golex v1.0.1 h1:EYKY1a3wStt0RzHaH8mdSRNg78Ub0OHxYfCRWw35YtM= modernc.org/mathutil v1.4.1 h1:ij3fYGe8zBF4Vu+g0oT7mB06r8sqGWKuJu1yXeR4by8= modernc.org/parser v1.0.2 h1:/qHLDn1ezrcRk9/XbErYp84bPPM4+w0kIDuvMdRk6Vc= modernc.org/sortutil v1.0.0 h1:SUTM1sCR0Ldpv7dbB/KCPC2zHHsZ1KrSkhmGmmV22CQ= modernc.org/strutil v1.1.0 h1:+1/yCzZxY2pZwwrsbH+4T7BQMoLQ9QiBshRC9eicYsc= modernc.org/y v1.0.1 h1:+QT+MtLkwkvLkh3fYQq+YD5vw2s5paVE73jdl5R/Py8= atlas-0.7.2/cmd/atlas/internal/000077500000000000000000000000001431455511600163145ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/000077500000000000000000000000001431455511600175515ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/cmdapi.go000066400000000000000000000106341431455511600213410ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Package cmdapi holds the atlas commands used to build // an atlas distribution. package cmdapi import ( "fmt" "os" "strings" "ariga.io/atlas/cmd/atlas/internal/update" "github.com/spf13/cobra" "golang.org/x/mod/semver" ) var ( // Root represents the root command when called without any subcommands. Root = &cobra.Command{ Use: "atlas", Short: "A database toolkit.", SilenceUsage: true, } // GlobalFlags contains flags common to many Atlas sub-commands. GlobalFlags struct { // SelectedEnv contains the environment selected from the active // project via the --env flag. SelectedEnv string // Vars contains the input variables passed from the CLI to // Atlas DDL or project files. Vars map[string]string } // version holds Atlas version. When built with cloud packages // should be set by build flag. "-X 'ariga.io/atlas/cmd/atlas/internal/cmdapi.version=${version}'" version string // schemaCmd represents the subcommand 'atlas version'. versionCmd = &cobra.Command{ Use: "version", Short: "Prints this Atlas CLI version information.", Run: func(cmd *cobra.Command, args []string) { v, u := parse(version) Root.Printf("atlas version %s\n%s\n", v, u) }, } // EnvCmd represents the subcommand 'atlas env'. EnvCmd = &cobra.Command{ Use: "env", Short: "Print atlas environment variables.", Long: `'atlas env' prints atlas environment information. Every set environment param will be printed in the form of NAME=VALUE. List of supported environment parameters: * ATLAS_NO_UPDATE_NOTIFIER: On any command, the CLI will check for new releases using the GitHub API. This check will happen at most once every 24 hours. To cancel this behavior, set the environment variable "ATLAS_NO_UPDATE_NOTIFIER".`, Run: func(cmd *cobra.Command, args []string) { keys := []string{update.AtlasNoUpdateNotifier} for _, k := range keys { if v, ok := os.LookupEnv(k); ok { cmd.Println(fmt.Sprintf("%s=%s", k, v)) } } }, } // license holds Atlas license. When built with cloud packages // should be set by build flag. "-X 'ariga.io/atlas/cmd/atlas/internal/cmdapi.license=${license}'" license = `LICENSE Atlas is licensed under Apache 2.0 as found in https://github.com/ariga/atlas/blob/master/LICENSE.` licenseCmd = &cobra.Command{ Use: "license", Short: "Display license information", Run: func(cmd *cobra.Command, _ []string) { cmd.Println(license) }, } ) // CheckForUpdate exposes internal update logic to CLI. func CheckForUpdate() { update.Check(version, Root.PrintErrln) } func init() { Root.AddCommand(EnvCmd) Root.AddCommand(schemaCmd) Root.AddCommand(versionCmd) Root.AddCommand(licenseCmd) } // receivesEnv configures cmd to receive the common '--env' flag. func receivesEnv(cmd *cobra.Command) { cmd.PersistentFlags().StringVarP(&GlobalFlags.SelectedEnv, "env", "", "", "set which env from the project file to use") cmd.PersistentFlags().StringToStringVarP(&GlobalFlags.Vars, varFlag, "", nil, "input variables") } // inputValsFromEnv populates GlobalFlags.Vars from the active environment. If we are working // inside a project, the "var" flag is not propagated to the schema definition. Instead, it // is used to evaluate the project file which can pass input values via the "values" block // to the schema. func inputValsFromEnv(cmd *cobra.Command) error { activeEnv, err := selectEnv(GlobalFlags.SelectedEnv) if err != nil { return err } if fl := cmd.Flag(varFlag); fl == nil { return nil } values, err := activeEnv.asMap() if err != nil { return err } if len(values) == 0 { return nil } pairs := make([]string, 0, len(values)) for k, v := range values { pairs = append(pairs, fmt.Sprintf("%s=%s", k, v)) } vars := strings.Join(pairs, ",") if err := cmd.Flags().Set(varFlag, vars); err != nil { return err } return nil } // parse returns a user facing version and release notes url func parse(version string) (string, string) { u := "https://github.com/ariga/atlas/releases/latest" if ok := semver.IsValid(version); !ok { return "- development", u } s := strings.Split(version, "-") if len(s) != 0 && s[len(s)-1] != "canary" { u = fmt.Sprintf("https://github.com/ariga/atlas/releases/tag/%s", version) } return version, u } atlas-0.7.2/cmd/atlas/internal/cmdapi/cmdapi_test.go000066400000000000000000000036641431455511600224050ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "bytes" "os" "os/exec" "testing" "ariga.io/atlas/cmd/atlas/internal/update" "github.com/stretchr/testify/require" ) func TestEnv(t *testing.T) { out, err := runCmd(Root, "env") require.NoError(t, err) require.Empty(t, out) } func TestEnv_Set(t *testing.T) { err := os.Setenv(update.AtlasNoUpdateNotifier, "test") require.NoError(t, err) out, err := runCmd(Root, "env") require.NoError(t, err) require.Equal(t, "ATLAS_NO_UPDATE_NOTIFIER=test\n", out) } func TestCLI_Version(t *testing.T) { // Required to have a clean "stderr" while running first time. tests := []struct { name string cmd *exec.Cmd expected string }{ { name: "dev mode", cmd: exec.Command("go", "run", "ariga.io/atlas/cmd/atlas", "version", ), expected: "atlas version - development\nhttps://github.com/ariga/atlas/releases/latest\n", }, { name: "release", cmd: exec.Command("go", "run", "-ldflags", "-X ariga.io/atlas/cmd/atlas/internal/cmdapi.version=v1.2.3", "ariga.io/atlas/cmd/atlas", "version", ), expected: "atlas version v1.2.3\nhttps://github.com/ariga/atlas/releases/tag/v1.2.3\n", }, { name: "canary", cmd: exec.Command("go", "run", "-ldflags", "-X ariga.io/atlas/cmd/atlas/internal/cmdapi.version=v0.3.0-6539f2704b5d-canary", "ariga.io/atlas/cmd/atlas", "version", ), expected: "atlas version v0.3.0-6539f2704b5d-canary\nhttps://github.com/ariga/atlas/releases/latest\n", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Setenv("ATLAS_NO_UPDATE_NOTIFIER", "true") stdout := bytes.NewBuffer(nil) tt.cmd.Stdout = stdout require.NoError(t, tt.cmd.Run()) require.Equal(t, tt.expected, stdout.String()) }) } } atlas-0.7.2/cmd/atlas/internal/cmdapi/diff.go000066400000000000000000000062671431455511600210230ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "fmt" "strings" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" "github.com/spf13/cobra" ) type diffCmdOpts struct { fromURL string toURL string } // newDiffCmd returns a new *cobra.Command that runs cmdDiffRun with the given flags and mux. func newDiffCmd() *cobra.Command { var opts diffCmdOpts cmd := &cobra.Command{ Use: "diff", Short: "Calculate and print the diff between two schemas.", Long: `'atlas schema diff' connects to two given databases, inspects them, calculates the difference in their schemas, and prints a plan of SQL statements to migrate the "from" database to the schema of the "to" database.`, Run: func(cmd *cobra.Command, args []string) { cmdDiffRun(cmd, &opts) }, } cmd.Flags().StringVarP(&opts.fromURL, "from", "", "", "[driver://username:password@protocol(address)/dbname?param=value] select a database using the URL format") cmd.Flags().StringVarP(&opts.toURL, "to", "", "", "[driver://username:password@protocol(address)/dbname?param=value] select a database using the URL format") cobra.CheckErr(cmd.MarkFlagRequired("from")) cobra.CheckErr(cmd.MarkFlagRequired("to")) return cmd } func init() { diffCmd := newDiffCmd() schemaCmd.AddCommand(diffCmd) } // cmdDiffRun connects to the given databases, and prints an SQL plan to get from // the "from" schema to the "to" schema. func cmdDiffRun(cmd *cobra.Command, flags *diffCmdOpts) { ctx := cmd.Context() fromC, err := sqlclient.Open(cmd.Context(), flags.fromURL) cobra.CheckErr(err) defer fromC.Close() toC, err := sqlclient.Open(cmd.Context(), flags.toURL) cobra.CheckErr(err) defer toC.Close() fromS := fromC.URL.Schema toS := toC.URL.Schema var diff []schema.Change switch { case fromS == "" && toS == "": // compare realm. fromRealm, err := fromC.InspectRealm(ctx, nil) cobra.CheckErr(err) toRealm, err := toC.InspectRealm(ctx, nil) cobra.CheckErr(err) diff, err = toC.RealmDiff(fromRealm, toRealm) cobra.CheckErr(err) case fromS == "": cobra.CheckErr(fmt.Errorf("cannot diff schema %q with a database connection", fromS)) case toS == "": cobra.CheckErr(fmt.Errorf("cannot diff database connection with a schema %q", toS)) default: // compare schemas. fromSchema, err := fromC.InspectSchema(ctx, fromS, nil) cobra.CheckErr(err) toSchema, err := toC.InspectSchema(ctx, toS, nil) cobra.CheckErr(err) // SchemaDiff checks for name equality which is irrelevant in the case // the user wants to compare their contents, if the names are different // we reset them to allow the comparison. if fromS != toS { toSchema.Name = "" fromSchema.Name = "" } diff, err = toC.SchemaDiff(fromSchema, toSchema) cobra.CheckErr(err) } p, err := toC.PlanChanges(ctx, "plan", diff) cobra.CheckErr(err) if len(p.Changes) == 0 { cmd.Println("Schemas are synced, no changes to be made.") return } for _, c := range p.Changes { if c.Comment != "" { cmd.Println("--", strings.ToUpper(c.Comment[:1])+c.Comment[1:]) } cmd.Println(c.Cmd) } } atlas-0.7.2/cmd/atlas/internal/cmdapi/diff_test.go000066400000000000000000000034531431455511600220540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "bytes" "context" "database/sql" "fmt" "os" "testing" "ariga.io/atlas/sql/sqlite" _ "github.com/mattn/go-sqlite3" "github.com/spf13/cobra" "github.com/stretchr/testify/require" ) func TestDiffCmd_Diff(t *testing.T) { from := openSQLite(t, "") to := openSQLite(t, "create table t1 (id int);") cmd := newDiffCmd() s, err := runCmd(cmd, "schema", "diff", "--from", from, "--to", to) require.NoError(t, err) require.EqualValues(t, "-- Create \"t1\" table\nCREATE TABLE `t1` (`id` int NULL)\n", s) } func TestDiffCmd_Synced(t *testing.T) { from := openSQLite(t, "") to := openSQLite(t, "") cmd := newDiffCmd() s, err := runCmd(cmd, "schema", "diff", "--from", from, "--to", to) require.NoError(t, err) require.EqualValues(t, "Schemas are synced, no changes to be made.\n", s) } // openSQLite creates a sqlite db, seeds it with the seed query and returns the url to it. func openSQLite(t *testing.T, seed string) string { f, err := os.CreateTemp("", "sqlite.db") require.NoError(t, err) t.Cleanup(func() { os.Remove(f.Name()) }) dsn := fmt.Sprintf("file:%s?cache=shared&_fk=1", f.Name()) db, err := sql.Open("sqlite3", dsn) require.NoError(t, err) t.Cleanup(func() { db.Close() }) drv, err := sqlite.Open(db) require.NoError(t, err) if len(seed) > 0 { _, err := drv.ExecContext(context.Background(), seed) require.NoError(t, err) } return fmt.Sprintf("sqlite://%s", dsn) } func runCmd(cmd *cobra.Command, args ...string) (string, error) { var out bytes.Buffer cmd.SetOut(&out) cmd.SetErr(&out) cmd.SetArgs(args) err := cmd.Execute() return out.String(), err } atlas-0.7.2/cmd/atlas/internal/cmdapi/migrate.go000066400000000000000000001451261431455511600215410ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "context" "database/sql" "errors" "fmt" "io" "io/fs" "os" "path/filepath" "strconv" "strings" "text/template" "time" "ariga.io/atlas/cmd/atlas/internal/lint" entmigrate "ariga.io/atlas/cmd/atlas/internal/migrate" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlclient" "ariga.io/atlas/sql/sqltool" "github.com/fatih/color" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/spf13/cobra" "github.com/spf13/pflag" ) const ( migrateFlagURL = "url" migrateFlagDevURL = "dev-url" migrateFlagDir = "dir" migrateFlagDirFormat = "dir-format" migrateFlagLog = "log" migrateFlagRevisionsSchema = "revisions-schema" migrateFlagDryRun = "dry-run" migrateFlagTo = "to" migrateFlagFrom = "from" migrateFlagSchema = "schema" migrateLintLatest = "latest" migrateLintGitDir = "git-dir" migrateLintGitBase = "git-base" migrateDiffQualifier = "qualifier" migrateApplyAllowDirty = "allow-dirty" migrateApplyBaselineVersion = "baseline" migrateApplyTxMode = "tx-mode" ) var ( // MigrateFlags are the flags used in MigrateCmd (and sub-commands). MigrateFlags struct { URL string DevURL string ToURLs []string Schemas []string DirURL string DirFormat string RevisionSchema string Apply struct { DryRun bool LogFormat string AllowDirty bool FromVersion string BaselineVersion string TxMode string } Diff struct { Qualifier string // optional table qualifier } Import struct { FromURL string ToURL string } Lint struct { Format string // log formatting Latest uint // latest N migration files GitDir string // repository working dir GitBase string // branch name to compare with } } // MigrateCmd represents the migrate command. It wraps several other sub-commands. MigrateCmd = &cobra.Command{ Use: "migrate", Short: "Manage versioned migration files", Long: "'atlas migrate' wraps several sub-commands for migration management.", PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { if err := migrateFlagsFromEnv(cmd, nil); err != nil { return err } dir, err := dir(false) if err != nil { return err } if err := migrate.Validate(dir); err != nil { printChecksumErr(cmd.OutOrStderr()) cmd.SilenceUsage = true return err } return nil }, } // MigrateApplyCmd represents the 'atlas migrate apply' subcommand. MigrateApplyCmd = &cobra.Command{ Use: "apply [flags] [count]", Short: "Applies pending migration files on the connected database.", Long: `'atlas migrate apply' reads the migration state of the connected database and computes what migrations are pending. It then attempts to apply the pending migration files in the correct order onto the database. The first argument denotes the maximum number of migration files to apply. As a safety measure 'atlas migrate apply' will abort with an error, if: - the migration directory is not in sync with the 'atlas.sum' file - the migration and database history do not match each other If run with the "--dry-run" flag, atlas will not execute any SQL.`, Example: ` atlas migrate apply -u mysql://user:pass@localhost:3306/dbname atlas migrate apply --dir file:///path/to/migration/directory --url mysql://user:pass@localhost:3306/dbname 1 atlas migrate apply --env dev 1 atlas migrate apply --dry-run --env dev 1`, Args: cobra.MaximumNArgs(1), RunE: CmdMigrateApplyRun, } // MigrateDiffCmd represents the 'atlas migrate diff' subcommand. MigrateDiffCmd = &cobra.Command{ Use: "diff [flags] [name]", Short: "Compute the diff between the migration directory and a desired state and create a new migration file.", Long: `'atlas migrate diff' uses the dev-database to re-run all migration files in the migration directory, compares it to a given desired state and create a new migration file containing SQL statements to migrate the migration directory state to the desired schema. The desired state can be another connected database or an HCL file.`, Example: ` atlas migrate diff --dev-url mysql://user:pass@localhost:3306/dev --to file://atlas.hcl atlas migrate diff --dev-url mysql://user:pass@localhost:3306/dev --to file://atlas.hcl add_users_table atlas migrate diff --dev-url mysql://user:pass@localhost:3306/dev --to mysql://user:pass@localhost:3306/dbname atlas migrate diff --env dev`, Args: cobra.MaximumNArgs(1), // If the migration directory does not exist on the validation attempt, this command will create it and // consider the new migration directory "valid". PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { if err := migrateFlagsFromEnv(cmd, nil); err != nil { return err } dir, err := dir(true) if err != nil { return err } if err := migrate.Validate(dir); err != nil { printChecksumErr(cmd.OutOrStderr()) cmd.SilenceUsage = true return err } return nil }, RunE: CmdMigrateDiffRun, } // MigrateHashCmd represents the 'atlas migrate hash' command. MigrateHashCmd = &cobra.Command{ Use: "hash", Short: "Hash (re-)creates an integrity hash file for the migration directory.", Long: `'atlas migrate hash' computes the integrity hash sum of the migration directory and stores it in the atlas.sum file. This command should be used whenever a manual change in the migration directory was made.`, Example: ` atlas migrate hash`, PersistentPreRunE: migrateFlagsFromEnv, RunE: CmdMigrateHashRun, } // MigrateImportCmd represents the 'atlas migrate import' command. MigrateImportCmd = &cobra.Command{ Use: "import", Short: "Import a migration directory from another migration management tool to the Atlas format.", Example: ` atlas migrate import --dir-format liquibase --from file:///path/to/source/directory --to file:///path/to/migration/directory`, // Validate the source directory. Consider a directory with no sum file valid, since it might be an import // from an existing project. PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { if err := migrateFlagsFromEnv(cmd, nil); err != nil { return err } MigrateFlags.DirURL = MigrateFlags.Import.FromURL dir, err := dir(false) if err != nil { return err } if err := migrate.Validate(dir); err != nil && !errors.Is(err, migrate.ErrChecksumNotFound) { printChecksumErr(cmd.OutOrStderr()) cmd.SilenceUsage = true return err } return nil }, RunE: CmdMigrateImportRun, } // MigrateNewCmd represents the 'atlas migrate new' command. MigrateNewCmd = &cobra.Command{ Use: "new [name]", Short: "Creates a new empty migration file in the migration directory.", Long: `'atlas migrate new' creates a new migration according to the configured formatter without any statements in it.`, Example: ` atlas migrate new my-new-migration`, Args: cobra.MaximumNArgs(1), RunE: CmdMigrateNewRun, } // MigrateSetCmd represents the 'atlas migrate set' command. MigrateSetCmd = &cobra.Command{ Use: "set [flags]", Short: "Set the current version of the migration history table.", Long: `'atlas migrate set' edits the revision table to consider all migrations up to and including the given version to be applied. This command is usually used after manually making changes to the managed database.`, Example: ` atlas migrate set-revision 3 --url mysql://user:pass@localhost:3306/ atlas migrate set-revision 4 --env local atlas migrate set-revision 1.2.4 --url mysql://user:pass@localhost:3306/my_db --revision-schema my_revisions`, Args: cobra.ExactArgs(1), RunE: CmdMigrateSetRun, } // MigrateStatusCmd represents the 'atlas migrate status' command. MigrateStatusCmd = &cobra.Command{ Use: "status [flags]", Short: "Get information about the current migration status.", Long: `'atlas migrate status' reports information about the current status of a connected database compared to the migration directory.`, Example: ` atlas migrate status --url mysql://user:pass@localhost:3306/ atlas migrate status --url mysql://user:pass@localhost:3306/ --dir file:///path/to/migration/directory`, RunE: CmdMigrateStatusRun, } // MigrateValidateCmd represents the 'atlas migrate validate' command. MigrateValidateCmd = &cobra.Command{ Use: "validate [flags]", Short: "Validates the migration directories checksum and SQL statements.", Long: `'atlas migrate validate' computes the integrity hash sum of the migration directory and compares it to the atlas.sum file. If there is a mismatch it will be reported. If the --dev-url flag is given, the migration files are executed on the connected database in order to validate SQL semantics.`, Example: ` atlas migrate validate atlas migrate validate --dir file:///path/to/migration/directory atlas migrate validate --dir file:///path/to/migration/directory --dev-url mysql://user:pass@localhost:3306/dev atlas migrate validate --env dev`, RunE: CmdMigrateValidateRun, } // MigrateLintCmd represents the 'atlas migrate lint' command. MigrateLintCmd = &cobra.Command{ Use: "lint", Short: "Run analysis on the migration directory", Example: ` atlas migrate lint --env dev atlas migrate lint --dir file:///path/to/migration/directory --dev-url mysql://root:pass@localhost:3306 --latest 1 atlas migrate lint --dir file:///path/to/migration/directory --dev-url mysql://root:pass@localhost:3306 --git-base master atlas migrate lint --dir file:///path/to/migration/directory --dev-url mysql://root:pass@localhost:3306 --log '{{ json .Files }}'`, // Override the parent 'migrate' pre-run function to allow executing // 'migrate lint' on directories that are not maintained by Atlas. PersistentPreRunE: migrateFlagsFromEnv, RunE: CmdMigrateLintRun, } ) func init() { // Add sub-commands. Root.AddCommand(MigrateCmd) MigrateCmd.AddCommand(MigrateApplyCmd) MigrateCmd.AddCommand(MigrateDiffCmd) MigrateCmd.AddCommand(MigrateHashCmd) MigrateCmd.AddCommand(MigrateNewCmd) MigrateCmd.AddCommand(MigrateValidateCmd) MigrateCmd.AddCommand(MigrateStatusCmd) MigrateCmd.AddCommand(MigrateLintCmd) MigrateCmd.AddCommand(MigrateImportCmd) MigrateCmd.AddCommand(MigrateSetCmd) // Reusable flags. urlFlag := func(f *string, name, short string, set *pflag.FlagSet) { set.StringVarP(f, name, short, "", "[driver://username:password@address/dbname?param=value] select a database using the URL format") } revisionsFlag := func(set *pflag.FlagSet) { set.StringVarP(&MigrateFlags.RevisionSchema, migrateFlagRevisionsSchema, "", "", "schema name where the revisions table resides") } dirURLFlag := func(f *string, name, short string, set *pflag.FlagSet) { set.StringVarP(f, name, short, "file://migrations", "select migration directory using URL format") } // Global flags. dirURLFlag(&MigrateFlags.DirURL, migrateFlagDir, "", MigrateCmd.PersistentFlags()) MigrateCmd.PersistentFlags().StringSliceVarP(&MigrateFlags.Schemas, migrateFlagSchema, "", nil, "set schema names") MigrateCmd.PersistentFlags().StringVarP(&MigrateFlags.DirFormat, migrateFlagDirFormat, "", formatAtlas, "set migration file format") MigrateCmd.PersistentFlags().SortFlags = false // Apply flags. MigrateApplyCmd.Flags().StringVarP(&MigrateFlags.Apply.LogFormat, migrateFlagLog, "", logFormatTTY, "log format to use") revisionsFlag(MigrateApplyCmd.Flags()) MigrateApplyCmd.Flags().BoolVarP(&MigrateFlags.Apply.DryRun, migrateFlagDryRun, "", false, "do not actually execute any SQL but show it on screen") MigrateApplyCmd.Flags().StringVarP(&MigrateFlags.Apply.FromVersion, migrateFlagFrom, "", "", "calculate pending files from the given version (including it)") MigrateApplyCmd.Flags().StringVarP(&MigrateFlags.Apply.BaselineVersion, migrateApplyBaselineVersion, "", "", "start the first migration after the given baseline version") MigrateApplyCmd.Flags().StringVarP(&MigrateFlags.Apply.TxMode, migrateApplyTxMode, "", txModeFile, "set transaction mode [none, file, all]") MigrateApplyCmd.Flags().BoolVarP(&MigrateFlags.Apply.AllowDirty, migrateApplyAllowDirty, "", false, "allow start working on a non-clean database") urlFlag(&MigrateFlags.URL, migrateFlagURL, "u", MigrateApplyCmd.Flags()) MigrateApplyCmd.Flags().SortFlags = false cobra.CheckErr(MigrateApplyCmd.MarkFlagRequired(migrateFlagURL)) MigrateApplyCmd.MarkFlagsMutuallyExclusive(migrateFlagFrom, migrateApplyBaselineVersion) cobra.CheckErr(MigrateApplyCmd.Flags().MarkHidden(migrateFlagDirFormat)) cobra.CheckErr(MigrateApplyCmd.Flags().MarkHidden(migrateFlagSchema)) // Diff flags. urlFlag(&MigrateFlags.DevURL, migrateFlagDevURL, "", MigrateDiffCmd.Flags()) MigrateDiffCmd.Flags().StringSliceVarP(&MigrateFlags.ToURLs, migrateFlagTo, "", nil, "[driver://username:password@address/dbname?param=value ...] select a desired state using the URL format") MigrateDiffCmd.Flags().StringVarP(&MigrateFlags.Diff.Qualifier, migrateDiffQualifier, "", "", "qualify tables with custom qualifier when working on a single schema") MigrateDiffCmd.Flags().SortFlags = false cobra.CheckErr(MigrateDiffCmd.MarkFlagRequired(migrateFlagDevURL)) cobra.CheckErr(MigrateDiffCmd.MarkFlagRequired(migrateFlagTo)) // Import flags. dirURLFlag(&MigrateFlags.Import.FromURL, migrateFlagFrom, "", MigrateImportCmd.Flags()) dirURLFlag(&MigrateFlags.Import.ToURL, migrateFlagTo, "", MigrateImportCmd.Flags()) MigrateImportCmd.Flags().SortFlags = false // Validate flags. urlFlag(&MigrateFlags.DevURL, migrateFlagDevURL, "", MigrateValidateCmd.Flags()) // Status flags. urlFlag(&MigrateFlags.URL, migrateFlagURL, "u", MigrateStatusCmd.Flags()) revisionsFlag(MigrateStatusCmd.Flags()) // Set flags. urlFlag(&MigrateFlags.URL, migrateFlagURL, "u", MigrateSetCmd.Flags()) // Hash flags. MigrateHashCmd.Flags().Bool("force", false, "") cobra.CheckErr(MigrateHashCmd.Flags().MarkDeprecated("force", "you can safely omit it.")) // Lint flags. urlFlag(&MigrateFlags.DevURL, migrateFlagDevURL, "", MigrateLintCmd.Flags()) MigrateLintCmd.PersistentFlags().StringVarP(&MigrateFlags.Lint.Format, migrateFlagLog, "", "", "custom logging using a Go template") MigrateLintCmd.PersistentFlags().UintVarP(&MigrateFlags.Lint.Latest, migrateLintLatest, "", 0, "run analysis on the latest N migration files") MigrateLintCmd.PersistentFlags().StringVarP(&MigrateFlags.Lint.GitBase, migrateLintGitBase, "", "", "run analysis against the base Git branch") MigrateLintCmd.PersistentFlags().StringVarP(&MigrateFlags.Lint.GitDir, migrateLintGitDir, "", ".", "path to the repository working directory") cobra.CheckErr(MigrateLintCmd.MarkFlagRequired(migrateFlagDevURL)) receivesEnv(MigrateCmd) } const applyLockValue = "atlas_migrate_execute" // CmdMigrateApplyRun is the command executed when running the CLI with 'migrate apply' args. func CmdMigrateApplyRun(cmd *cobra.Command, args []string) error { var ( n int err error ) if len(args) > 0 { n, err = strconv.Atoi(args[0]) if err != nil { return err } if n < 1 { return fmt.Errorf("cannot apply '%d' migration files", n) } } // Open the migration directory. dir, err := dir(false) if err != nil { return err } // Open a client to the database. c, err := sqlclient.Open(cmd.Context(), MigrateFlags.URL) if err != nil { return err } defer c.Close() // Acquire a lock. if l, ok := c.Driver.(schema.Locker); ok { unlock, err := l.Lock(cmd.Context(), applyLockValue, 0) if err != nil { return fmt.Errorf("acquiring database lock: %w", err) } // If unlocking fails notify the user about it. defer cobra.CheckErr(unlock()) } if err := checkRevisionSchemaClarity(cmd, c); err != nil { return err } // Get the correct log format and destination. Currently, only os.Stdout is supported. l, err := logFormat(cmd.OutOrStdout()) if err != nil { return err } var rrw migrate.RevisionReadWriter rrw, err = entRevisions(cmd.Context(), c) if err != nil { return err } if err := rrw.(*entmigrate.EntRevisions).Migrate(cmd.Context()); err != nil { return err } // Determine pending files and lock the database while working. ex, err := migrate.NewExecutor(c.Driver, dir, rrw, executorOptions(l)...) if err != nil { return err } pending, err := ex.Pending(cmd.Context()) if err != nil && !errors.Is(err, migrate.ErrNoPendingFiles) { return err } if errors.Is(err, migrate.ErrNoPendingFiles) { cmd.Println("No migration files to execute") return nil } if n > 0 { // Cannot apply more than len(pending) files. if n >= len(pending) { n = len(pending) } pending = pending[:n] } revs, err := rrw.ReadRevisions(cmd.Context()) if err != nil { return err } if err := migrate.LogIntro(l, revs, pending); err != nil { return err } var ( mux = tx{c: c, rrw: rrw} drv migrate.Driver ) for _, f := range pending { drv, rrw, err = mux.driver(cmd.Context()) if err != nil { return err } ex, err := migrate.NewExecutor(drv, dir, rrw, executorOptions(l)...) if err != nil { return err } if err := mux.mayRollback(ex.Execute(cmd.Context(), f)); err != nil { return err } if err := mux.mayCommit(); err != nil { return err } } if err := mux.commit(); err != nil { return err } l.Log(migrate.LogDone{}) return mux.commit() } func checkRevisionSchemaClarity(cmd *cobra.Command, c *sqlclient.Client) error { // The "old" default behavior for the revision schema location was to store the revision table in its own schema. // Now, the table is saved in the connected schema, if any. To keep the backwards compatability, we now require // for schema bound connections to have the schema-revision flag present if there is no revision table in the schema // but the old default schema does have one. if c.URL.Schema != "" && MigrateFlags.RevisionSchema == "" { // If the schema does not contain a revision table, but we can find a table in the previous default schema, // abort and tell the user to specify the intention. opts := &schema.InspectOptions{Tables: []string{revision.Table}} s, err := c.InspectSchema(cmd.Context(), "", opts) var ok bool switch { case schema.IsNotExistError(err): // If the schema does not exist, the table does not as well. case err != nil: return err default: // Connected schema does exist, check if the table does. _, ok = s.Table(revision.Table) } if !ok { // Either schema or table does not exist. // Check for the old default schema. If it does not exist, we have no problem. s, err := c.InspectSchema(cmd.Context(), defaultRevisionSchema, opts) switch { case schema.IsNotExistError(err): // Schema does not exist, we can proceed. case err != nil: return err default: if _, ok := s.Table(revision.Table); ok { fmt.Fprintf(cmd.OutOrStderr(), `We couldn't find a revision table in the connected schema but found one in the schema 'atlas_schema_revisions' and cannot determine the desired behavior. As a safety guard, we require you to specify whether to use the existing table in 'atlas_schema_revisions' or create a new one in the connected schema by providing the '--revisions-schema' flag or deleting the 'atlas_schema_revisions' schema if it is unused. `) cmd.SilenceUsage = true cmd.SilenceErrors = true return errors.New("ambiguous revision table") } } } } return nil } func entRevisions(ctx context.Context, c *sqlclient.Client) (*entmigrate.EntRevisions, error) { return entmigrate.NewEntRevisions(ctx, c, entmigrate.WithSchema(revisionSchemaName(c))) } // defaultRevisionSchema is the default schema for storing revisions table. const defaultRevisionSchema = "atlas_schema_revisions" func revisionSchemaName(c *sqlclient.Client) string { switch { case MigrateFlags.RevisionSchema != "": return MigrateFlags.RevisionSchema case c.URL.Schema != "": return c.URL.Schema default: return defaultRevisionSchema } } // tx handles wrapping migration execution in transactions. type tx struct { c *sqlclient.Client tx *sqlclient.TxClient rrw migrate.RevisionReadWriter } // driver returns the migrate.Driver to use to execute migration statements. func (tx *tx) driver(ctx context.Context) (migrate.Driver, migrate.RevisionReadWriter, error) { if MigrateFlags.Apply.DryRun { // If the --dry-run flag is given we don't want to execute any statements on the database. return &dryRunDriver{tx.c.Driver}, &dryRunRevisions{tx.rrw}, nil } switch MigrateFlags.Apply.TxMode { case txModeNone: return tx.c.Driver, tx.rrw, nil case txModeFile: // In file-mode, this function is called each time a new file is executed. Open a transaction. if tx.tx != nil { return nil, nil, errors.New("unexpected active transaction") } var err error tx.tx, err = tx.c.Tx(ctx, nil) if err != nil { return nil, nil, err } tx.rrw, err = entRevisions(ctx, tx.tx.Client) if err != nil { return nil, nil, err } return tx.tx.Driver, tx.rrw, nil case txModeAll: // In file-mode, this function is called each time a new file is executed. Since we wrap all files into one // huge transaction, if there already is an opened one, use that. if tx.tx == nil { var err error tx.tx, err = tx.c.Tx(ctx, nil) if err != nil { return nil, nil, err } tx.rrw, err = entRevisions(ctx, tx.tx.Client) if err != nil { return nil, nil, err } } return tx.tx.Driver, tx.rrw, nil default: return nil, nil, fmt.Errorf("unknown tx-mode %q", MigrateFlags.Apply.TxMode) } } // mayRollback may roll back a transaction depending on the given transaction mode. func (tx *tx) mayRollback(err error) error { if tx.tx != nil && err != nil { if err2 := tx.tx.Rollback(); err2 != nil { err = fmt.Errorf("%v: %w", err2, err) } } return err } // mayCommit may commit a transaction depending on the given transaction mode. func (tx *tx) mayCommit() error { // Only commit if each file is wrapped in a transaction. if !MigrateFlags.Apply.DryRun && MigrateFlags.Apply.TxMode == txModeFile { return tx.commit() } return nil } // commit the transaction, if one is active. func (tx *tx) commit() error { if tx.tx == nil { return nil } defer func() { tx.tx = nil }() return tx.tx.Commit() } func executorOptions(l migrate.Logger) []migrate.ExecutorOption { opts := []migrate.ExecutorOption{ migrate.WithLogger(l), migrate.WithOperatorVersion(operatorVersion()), } if MigrateFlags.Apply.AllowDirty { opts = append(opts, migrate.WithAllowDirty(true)) } if v := MigrateFlags.Apply.BaselineVersion; v != "" { opts = append(opts, migrate.WithBaselineVersion(v)) } if v := MigrateFlags.Apply.FromVersion; v != "" { opts = append(opts, migrate.WithFromVersion(v)) } return opts } func operatorVersion() string { v, _ := parse(version) return "Atlas CLI - " + v } // CmdMigrateDiffRun is the command executed when running the CLI with 'migrate diff' args. func CmdMigrateDiffRun(cmd *cobra.Command, args []string) error { // Open a dev driver. dev, err := sqlclient.Open(cmd.Context(), MigrateFlags.DevURL) if err != nil { return err } defer dev.Close() // Acquire a lock. if l, ok := dev.Driver.(schema.Locker); ok { unlock, err := l.Lock(cmd.Context(), "atlas_migrate_diff", 0) if err != nil { return fmt.Errorf("acquiring database lock: %w", err) } // If unlocking fails notify the user about it. defer cobra.CheckErr(unlock()) } // Open the migration directory. dir, err := dir(false) if err != nil { return err } // Get a state reader for the desired state. desired, err := to(cmd.Context(), dev) if err != nil { return err } defer desired.Close() f, err := formatter() if err != nil { return err } opts := []migrate.PlannerOption{migrate.PlanFormat(f)} if dev.URL.Schema != "" { // Disable tables qualifier in schema-mode. opts = append(opts, migrate.PlanWithSchemaQualifier(MigrateFlags.Diff.Qualifier)) } // Plan the changes and create a new migration file. pl := migrate.NewPlanner(dev.Driver, dir, opts...) var name string if len(args) > 0 { name = args[0] } plan, err := func() (*migrate.Plan, error) { if dev.URL.Schema != "" { return pl.PlanSchema(cmd.Context(), name, desired.StateReader) } return pl.Plan(cmd.Context(), name, desired.StateReader) }() var cerr migrate.NotCleanError switch { case errors.Is(err, migrate.ErrNoPlan): cmd.Println("The migration directory is synced with the desired state, no changes to be made") return nil case errors.As(err, &cerr) && dev.URL.Schema == "" && desired.Schema != "": return fmt.Errorf("dev database is not clean (%s). Add a schema to the URL to limit the scope of the connection", cerr.Reason) case err != nil: return err default: // Write the plan to a new file. return pl.WritePlan(plan) } } // CmdMigrateHashRun is the command executed when running the CLI with 'migrate hash' args. func CmdMigrateHashRun(*cobra.Command, []string) error { dir, err := dir(false) if err != nil { return err } sum, err := dir.Checksum() if err != nil { return err } return migrate.WriteSumFile(dir, sum) } // CmdMigrateImportRun is the command executed when running the CLI with 'migrate import' args. func CmdMigrateImportRun(cmd *cobra.Command, _ []string) error { if MigrateFlags.DirFormat == formatAtlas { return fmt.Errorf("cannot import a migration directory already in %q format", formatAtlas) } MigrateFlags.DirURL = MigrateFlags.Import.FromURL src, err := dir(false) if err != nil { return err } MigrateFlags.DirFormat = formatAtlas MigrateFlags.DirURL = MigrateFlags.Import.ToURL trgt, err := dir(true) if err != nil { return err } // Target must be empty. ff, err := trgt.Files() switch { case err != nil: return err case len(ff) != 0: return errors.New("target migration directory must be empty") } ff, err = src.Files() switch { case err != nil: return err case len(ff) == 0: fmt.Fprint(cmd.OutOrStderr(), "nothing to import") cmd.SilenceUsage = true return nil } // Fix version numbers for Flyway repeatable migrations. if _, ok := src.(*sqltool.FlywayDir); ok { sqltool.SetRepeatableVersion(ff) } // Extract the statements for each of the migration files, add them to a plan to format with the // migrate.DefaultFormatter. for _, f := range ff { stmts, err := f.StmtDecls() if err != nil { return err } plan := &migrate.Plan{ Version: f.Version(), Name: f.Desc(), Changes: make([]*migrate.Change, len(stmts)), } var buf strings.Builder for i, s := range stmts { for _, c := range s.Comments { buf.WriteString(c) if !strings.HasSuffix(c, "\n") { buf.WriteString("\n") } } buf.WriteString(strings.TrimSuffix(s.Text, ";")) plan.Changes[i] = &migrate.Change{Cmd: buf.String()} buf.Reset() } files, err := migrate.DefaultFormatter.Format(plan) if err != nil { return err } for _, f := range files { if err := trgt.WriteFile(f.Name(), f.Bytes()); err != nil { return err } } } sum, err := trgt.Checksum() if err != nil { return err } return migrate.WriteSumFile(trgt, sum) } // CmdMigrateNewRun is the command executed when running the CLI with 'migrate new' args. func CmdMigrateNewRun(_ *cobra.Command, args []string) error { dir, err := dir(true) if err != nil { return err } f, err := formatter() if err != nil { return err } var name string if len(args) > 0 { name = args[0] } return migrate.NewPlanner(nil, dir, migrate.PlanFormat(f)).WritePlan(&migrate.Plan{Name: name}) } // CmdMigrateSetRun is the command executed when running the CLI with 'migrate set' args. func CmdMigrateSetRun(cmd *cobra.Command, args []string) error { dir, err := dir(false) if err != nil { return err } avail, err := dir.Files() if err != nil { return err } // Check if the target version does exist in the migration directory. if idx := migrate.FilesLastIndex(avail, func(f migrate.File) bool { return f.Version() == args[0] }); idx == -1 { return fmt.Errorf("migration with version %q not found", args[0]) } client, err := sqlclient.Open(cmd.Context(), MigrateFlags.URL) if err != nil { return err } defer client.Close() // Acquire a lock. if l, ok := client.Driver.(schema.Locker); ok { unlock, err := l.Lock(cmd.Context(), applyLockValue, 0) if err != nil { return fmt.Errorf("acquiring database lock: %w", err) } // If unlocking fails notify the user about it. defer cobra.CheckErr(unlock()) } if err := checkRevisionSchemaClarity(cmd, client); err != nil { return err } // Ensure revision table exists. rrw, err := entRevisions(cmd.Context(), client) if err != nil { return err } if err := rrw.Migrate(cmd.Context()); err != nil { return err } // Wrap manipulation in a transaction. tx, err := client.Tx(cmd.Context(), nil) if err != nil { return err } rrw, err = entRevisions(cmd.Context(), tx.Client) if err != nil { return err } revs, err := rrw.ReadRevisions(cmd.Context()) if err != nil { return err } if err := func() error { for _, r := range revs { // Check all existing revisions and ensure they precede the given version. If we encounter a partially // applied revision, or one with errors, mark them "fixed". switch { // remove revision to keep linear history case r.Version > args[0]: if err := rrw.DeleteRevision(cmd.Context(), r.Version); err != nil { return err } // keep, but if with error mark "fixed" case r.Version == args[0] && (r.Error != "" || r.Total != r.Applied): r.Type = migrate.RevisionTypeExecute | migrate.RevisionTypeResolved if err := rrw.WriteRevision(cmd.Context(), r); err != nil { return err } } } revs, err = rrw.ReadRevisions(cmd.Context()) if err != nil { return err } // If the target version succeeds the last revision, mark // migrations applied, until we reach the target version. var pending []migrate.File switch { case len(revs) == 0: // Take every file until we reach target version. for _, f := range avail { if f.Version() > args[0] { break } pending = append(pending, f) } case args[0] > revs[len(revs)-1].Version: loop: // Take every file succeeding the last revision until we reach target version. for _, f := range avail { switch { case f.Version() <= revs[len(revs)-1].Version: // Migration precedes last revision. case f.Version() > args[0]: // Migration succeeds target revision. break loop default: // between last revision and target pending = append(pending, f) } } } // Mark every pending file as applied. sum, err := dir.Checksum() if err != nil { return err } for _, f := range pending { h, err := sum.SumByName(f.Name()) if err != nil { return err } if err := rrw.WriteRevision(cmd.Context(), &migrate.Revision{ Version: f.Version(), Description: f.Desc(), Type: migrate.RevisionTypeResolved, ExecutedAt: time.Now(), Hash: h, OperatorVersion: operatorVersion(), }); err != nil { return err } } return nil }(); err != nil { if err2 := tx.Rollback(); err2 != nil { err = fmt.Errorf("%v: %w", err2, err) } return err } return tx.Commit() } // CmdMigrateStatusRun is the command executed when running the CLI with 'migrate status' args. func CmdMigrateStatusRun(cmd *cobra.Command, _ []string) error { // Open the migration directory. dir, err := dir(false) if err != nil { return err } avail, err := dir.Files() if err != nil { return err } // Open a client to the database. client, err := sqlclient.Open(cmd.Context(), MigrateFlags.URL) if err != nil { return err } defer client.Close() if err := checkRevisionSchemaClarity(cmd, client); err != nil { return err } // Inspect schema and check if the table does already exist. s, err := client.InspectSchema( cmd.Context(), revisionSchemaName(client), &schema.InspectOptions{Tables: []string{revision.Table}}, ) switch { case err != nil && !schema.IsNotExistError(err): return err case schema.IsNotExistError(err): return statusPrint(cmd.OutOrStdout(), avail, avail, nil) } if _, ok := s.Table(revision.Table); !ok { // Table does not exist. return statusPrint(cmd.OutOrStdout(), avail, avail, nil) } // Currently, only in DB revisions are supported. rrw, err := entRevisions(cmd.Context(), client) if err != nil { return err } // Executor can give us insights on the revision state. ex, err := migrate.NewExecutor(client.Driver, dir, rrw) if err != nil { return err } pending, err := ex.Pending(cmd.Context()) if err != nil && !errors.Is(err, migrate.ErrNoPendingFiles) { return err } revs, err := rrw.ReadRevisions(cmd.Context()) if err != nil { return err } return statusPrint(cmd.OutOrStdout(), avail, pending, revs) } func statusPrint(out io.Writer, avail, pending []migrate.File, revs []*migrate.Revision) (err error) { var ( cur, next, state string applied = avail[: len(avail)-len(pending) : len(avail)-len(pending)] partial = len(revs) != 0 && revs[len(revs)-1].Applied < revs[len(revs)-1].Total ) switch len(pending) { case len(avail): cur = "No version applied yet" default: cur = cyan(applied[len(applied)-1].Version()) // If the last pending version is partially applied, tell so. if partial { cur += fmt.Sprintf(" (%d statements applied)", revs[len(revs)-1].Applied) } } if len(pending) == 0 { state = green("OK") next = "Already at latest version" } else { state = yellow("PENDING") next = cyan(pending[0].Version()) if partial { next += fmt.Sprintf(" (%d statements left)", revs[len(revs)-1].Total-revs[len(revs)-1].Applied) } } exec := cyan(strconv.Itoa(len(applied))) if partial { exec += " + 1 partially" } c := cyan if len(pending) == 0 { c = green } fmt.Fprintf(out, "Migration Status: %s\n", state) fmt.Fprintf(out, "%s%s Current Version: %s\n", indent2, dash, cur) fmt.Fprintf(out, "%s%s Next Version: %s\n", indent2, dash, next) fmt.Fprintf(out, "%s%s Executed Files: %s\n", indent2, dash, exec) fmt.Fprintf(out, "%s%s Pending Files: %s", indent2, dash, c(strconv.Itoa(len(pending)))) if partial { fmt.Fprintf(out, " (partially)") } fmt.Fprintf(out, "\n") return nil } // CmdMigrateValidateRun is the command executed when running the CLI with 'migrate validate' args. func CmdMigrateValidateRun(cmd *cobra.Command, _ []string) error { // Validating the integrity is done by the PersistentPreRun already. if MigrateFlags.DevURL == "" { // If there is no --dev-url given do not attempt to replay the migration directory. return nil } // Open a client for the dev-db. dev, err := sqlclient.Open(cmd.Context(), MigrateFlags.DevURL) if err != nil { return err } defer dev.Close() // Currently, only our own migration file format is supported. dir, err := dir(false) if err != nil { return err } ex, err := migrate.NewExecutor(dev.Driver, dir, migrate.NopRevisionReadWriter{}) if err != nil { return err } if _, err := ex.Replay(cmd.Context(), func() migrate.StateReader { if dev.URL.Schema != "" { return migrate.SchemaConn(dev, "", nil) } return migrate.RealmConn(dev, nil) }()); err != nil && !errors.Is(err, migrate.ErrNoPendingFiles) { return fmt.Errorf("replaying the migration directory: %w", err) } return nil } // CmdMigrateLintRun is the command executed when running the CLI with 'migrate lint' args. func CmdMigrateLintRun(cmd *cobra.Command, _ []string) error { dev, err := sqlclient.Open(cmd.Context(), MigrateFlags.DevURL) if err != nil { return err } defer dev.Close() dir, err := dir(false) if err != nil { return err } var detect lint.ChangeDetector switch { case MigrateFlags.Lint.Latest == 0 && MigrateFlags.Lint.GitBase == "": return fmt.Errorf("--%s or --%s is required", migrateLintLatest, migrateLintGitBase) case MigrateFlags.Lint.Latest > 0 && MigrateFlags.Lint.GitBase != "": return fmt.Errorf("--%s and --%s are mutually exclusive", migrateLintLatest, migrateLintGitBase) case MigrateFlags.Lint.Latest > 0: detect = lint.LatestChanges(dir, int(MigrateFlags.Lint.Latest)) case MigrateFlags.Lint.GitBase != "": detect, err = lint.NewGitChangeDetector( dir, lint.WithWorkDir(MigrateFlags.Lint.GitDir), lint.WithBase(MigrateFlags.Lint.GitBase), lint.WithMigrationsPath(dir.(interface{ Path() string }).Path()), ) if err != nil { return err } } format := lint.DefaultTemplate if f := MigrateFlags.Lint.Format; f != "" { format, err = template.New("format").Funcs(lint.TemplateFuncs).Parse(f) if err != nil { return fmt.Errorf("parse log format: %w", err) } } env, err := selectEnv(GlobalFlags.SelectedEnv) if err != nil { return err } az, err := sqlcheck.AnalyzerFor(dev.Name, env.Lint.Remain()) if err != nil { return err } r := &lint.Runner{ Dev: dev, Dir: dir, ChangeDetector: detect, ReportWriter: &lint.TemplateWriter{ T: format, W: cmd.OutOrStdout(), }, Analyzers: az, } err = r.Run(cmd.Context()) // Print the error in case it was not printed before. cmd.SilenceErrors = errors.As(err, &lint.SilentError{}) return err } const ( txModeNone = "none" txModeAll = "all" txModeFile = "file" ) func printChecksumErr(out io.Writer) { fmt.Fprintf(out, `You have a checksum error in your migration directory. This happens if you manually create or edit a migration file. Please check your migration files and run 'atlas migrate hash' to re-hash the contents and resolve the error `) } // dir returns a migrate.Dir to use as migration directory. For now only local directories are supported. func dir(create bool) (migrate.Dir, error) { parts := strings.SplitN(MigrateFlags.DirURL, "://", 2) if len(parts) != 2 { return nil, fmt.Errorf("invalid dir url %q", MigrateFlags.DirURL) } if parts[0] != "file" { return nil, fmt.Errorf("unsupported driver %q", parts[0]) } f := func() (migrate.Dir, error) { return migrate.NewLocalDir(parts[1]) } switch MigrateFlags.DirFormat { case formatAtlas: case formatGolangMigrate: f = func() (migrate.Dir, error) { return sqltool.NewGolangMigrateDir(parts[1]) } case formatGoose: f = func() (migrate.Dir, error) { return sqltool.NewGooseDir(parts[1]) } case formatFlyway: f = func() (migrate.Dir, error) { return sqltool.NewFlywayDir(parts[1]) } case formatLiquibase: f = func() (migrate.Dir, error) { return sqltool.NewLiquibaseDir(parts[1]) } case formatDBMate: f = func() (migrate.Dir, error) { return sqltool.NewDBMateDir(parts[1]) } default: return nil, fmt.Errorf("unknown dir format %q", MigrateFlags.DirFormat) } d, err := f() if create && errors.Is(err, fs.ErrNotExist) { if err := os.MkdirAll(parts[1], 0755); err != nil { return nil, err } d, err = f() } return d, err } type target struct { migrate.StateReader // desired state. io.Closer // optional close function. Schema string // in case we work on a single schema. } // to returns a migrate.StateReader for the given to flag. func to(ctx context.Context, dev *sqlclient.Client) (*target, error) { scheme, err := selectScheme(MigrateFlags.ToURLs) if err != nil { return nil, err } schemas := MigrateFlags.Schemas switch scheme { case "file": // hcl file realm := &schema.Realm{} paths := make([]string, 0, len(MigrateFlags.ToURLs)) for _, u := range MigrateFlags.ToURLs { paths = append(paths, strings.TrimPrefix(u, "file://")) } parsed, err := parseHCLPaths(paths...) if err != nil { return nil, err } if err := dev.Eval(parsed, realm, nil); err != nil { return nil, err } if len(schemas) > 0 { // Validate all schemas in file were selected by user. sm := make(map[string]bool, len(schemas)) for _, s := range schemas { sm[s] = true } for _, s := range realm.Schemas { if !sm[s.Name] { return nil, fmt.Errorf("schema %q from paths %q is not requested (all schemas in HCL must be requested)", s.Name, paths) } } } // In case the dev connection is bound to a specific schema, we require the // desired schema to contain only one schema. Thus, executing diff will be // done on the content of these two schema and not the whole realm. if dev.URL.Schema != "" && len(realm.Schemas) > 1 { return nil, fmt.Errorf("cannot use HCL with more than 1 schema when dev-url is limited to schema %q", dev.URL.Schema) } if norm, ok := dev.Driver.(schema.Normalizer); ok && len(realm.Schemas) > 0 { realm, err = norm.NormalizeRealm(ctx, realm) if err != nil { return nil, err } } t := &target{StateReader: migrate.Realm(realm), Closer: io.NopCloser(nil)} if len(realm.Schemas) == 1 { t.Schema = realm.Schemas[0].Name } return t, nil default: // database connection client, err := sqlclient.Open(ctx, MigrateFlags.ToURLs[0]) if err != nil { return nil, err } t := &target{Closer: client} switch s := client.URL.Schema; { // Connection to a specific schema. case s != "": if len(schemas) > 1 || len(schemas) == 1 && schemas[0] != s { return nil, fmt.Errorf("cannot specify schemas with a schema connection to %q", s) } t.Schema = s t.StateReader = migrate.SchemaConn(client, s, &schema.InspectOptions{}) // A single schema is selected. case len(schemas) == 1: t.Schema = schemas[0] t.StateReader = migrate.SchemaConn(client, schemas[0], &schema.InspectOptions{}) // Multiple or all schemas. default: // In case the dev connection is limited to a single schema, // but we compare it to entire database. if dev.URL.Schema != "" { return nil, fmt.Errorf("cannot use database-url without a schema when dev-url is limited to %q", dev.URL.Schema) } t.StateReader = migrate.RealmConn(client, &schema.InspectRealmOption{Schemas: schemas}) } return t, nil } } // selectScheme validates the scheme of the provided to urls and returns the selected // url scheme. Currently, all URLs must be of the same scheme, and only multiple // "file://" URLs are allowed. func selectScheme(urls []string) (string, error) { var scheme string if len(urls) == 0 { return "", errors.New("at least one --to url is required") } for _, url := range urls { parts := strings.SplitN(url, "://", 2) switch current := parts[0]; { case scheme == "": scheme = current case scheme != current: return "", fmt.Errorf("got mixed --to url schemes: %q and %q, the desired state must be provided from a single kind of source", scheme, current) case current != "file": return "", fmt.Errorf("got multiple --to urls of scheme %q, only multiple 'file://' urls are supported", current) } } return scheme, nil } // parseHCLPaths parses the HCL files in the given paths. If a path represents a directory, // its direct descendants will be considered, skipping any subdirectories. If a project file // is present in the input paths, an error is returned. func parseHCLPaths(paths ...string) (*hclparse.Parser, error) { p := hclparse.NewParser() for _, path := range paths { switch stat, err := os.Stat(path); { case err != nil: return nil, err case stat.IsDir(): dir, err := os.ReadDir(path) if err != nil { return nil, err } for _, f := range dir { // Skip nested dirs. if f.IsDir() { continue } if err := mayParse(p, filepath.Join(path, f.Name())); err != nil { return nil, err } } default: if err := mayParse(p, path); err != nil { return nil, err } } } if len(p.Files()) == 0 { return nil, fmt.Errorf("no schema files found in: %s", paths) } return p, nil } // mayParse will parse the file in path if it is an HCL file. If the file is an Atlas // project file an error is returned. func mayParse(p *hclparse.Parser, path string) error { if n := filepath.Base(path); filepath.Ext(n) != ".hcl" { return nil } switch f, diag := p.ParseHCLFile(path); { case diag.HasErrors(): return diag case isProjectFile(f): return fmt.Errorf("cannot parse project file %q as a schema file", path) default: return nil } } func isProjectFile(f *hcl.File) bool { for _, blk := range f.Body.(*hclsyntax.Body).Blocks { if blk.Type == "env" { return true } } return false } const ( formatAtlas = "atlas" formatGolangMigrate = "golang-migrate" formatGoose = "goose" formatFlyway = "flyway" formatLiquibase = "liquibase" formatDBMate = "dbmate" ) func formatter() (migrate.Formatter, error) { switch MigrateFlags.DirFormat { case formatAtlas: return migrate.DefaultFormatter, nil case formatGolangMigrate: return sqltool.GolangMigrateFormatter, nil case formatGoose: return sqltool.GooseFormatter, nil case formatFlyway: return sqltool.FlywayFormatter, nil case formatLiquibase: return sqltool.LiquibaseFormatter, nil case formatDBMate: return sqltool.DBMateFormatter, nil default: return nil, fmt.Errorf("unknown format %q", MigrateFlags.DirFormat) } } const logFormatTTY = "tty" // LogTTY is a migrate.Logger that pretty prints execution progress. // If the connected out is not a tty, it will fall back to a non-colorful output. type LogTTY struct { out io.Writer start time.Time fileStart time.Time fileCounter int stmtCounter int } var ( cyan = color.CyanString green = color.HiGreenString red = color.HiRedString redBgWhiteFg = color.New(color.FgHiWhite, color.BgHiRed).SprintFunc() yellow = color.YellowString dash = yellow("--") arr = cyan("->") indent2 = " " indent4 = indent2 + indent2 ) // Log implements the migrate.Logger interface. func (l *LogTTY) Log(e migrate.LogEntry) { switch e := e.(type) { case migrate.LogExecution: l.start = time.Now() fmt.Fprintf(l.out, "Migrating to version %v", cyan(e.To)) if e.From != "" { fmt.Fprintf(l.out, " from %v", cyan(e.From)) } fmt.Fprintf(l.out, " (%d migrations in total):\n", len(e.Files)) case migrate.LogFile: l.fileCounter++ if !l.fileStart.IsZero() { l.reportFileEnd() } l.fileStart = time.Now() fmt.Fprintf(l.out, "\n%s%v migrating version %v", indent2, dash, cyan(e.Version)) if e.Skip > 0 { fmt.Fprintf(l.out, " (partially applied - skipping %s statements)", yellow("%d", e.Skip)) } fmt.Fprint(l.out, "\n") case migrate.LogStmt: l.stmtCounter++ fmt.Fprintf(l.out, "%s%v %s\n", indent4, arr, e.SQL) case migrate.LogDone: l.reportFileEnd() fmt.Fprintf(l.out, "\n%s%v\n", indent2, cyan(strings.Repeat("-", 25))) fmt.Fprintf(l.out, "%s%v %v\n", indent2, dash, time.Since(l.start)) fmt.Fprintf(l.out, "%s%v %v migrations\n", indent2, dash, l.fileCounter) fmt.Fprintf(l.out, "%s%v %v sql statements\n", indent2, dash, l.stmtCounter) case migrate.LogError: fmt.Fprintf(l.out, "%s %s\n", indent4, redBgWhiteFg(e.Error.Error())) fmt.Fprintf(l.out, "\n%s%v\n", indent2, cyan(strings.Repeat("-", 25))) fmt.Fprintf(l.out, "%s%v %v\n", indent2, dash, time.Since(l.start)) fmt.Fprintf(l.out, "%s%v %v migrations ok (%s)\n", indent2, dash, zero(l.fileCounter-1), red("1 with errors")) fmt.Fprintf(l.out, "%s%v %v sql statements ok (%s)\n", indent2, dash, zero(l.stmtCounter-1), red("1 with errors")) fmt.Fprintf(l.out, "\n%s\n%v\n\n", red("Error: Execution had errors:"), redBgWhiteFg(e.Error.Error())) default: fmt.Fprintf(l.out, "%v", e) } } func (l *LogTTY) reportFileEnd() { fmt.Fprintf(l.out, "%s%v ok (%v)\n", indent2, dash, yellow("%s", time.Since(l.fileStart))) } func zero(v int) int { if v < 0 { return 0 } return v } func logFormat(out io.Writer) (migrate.Logger, error) { switch l := MigrateFlags.Apply.LogFormat; l { case logFormatTTY: return &LogTTY{out: out}, nil default: return nil, fmt.Errorf("unknown log-format %q", l) } } func migrateFlagsFromEnv(cmd *cobra.Command, _ []string) error { activeEnv, err := selectEnv(GlobalFlags.SelectedEnv) if err != nil { return err } if err := inputValsFromEnv(cmd); err != nil { return err } if err := maySetFlag(cmd, migrateFlagDevURL, activeEnv.DevURL); err != nil { return err } if err := maySetFlag(cmd, migrateFlagDirFormat, activeEnv.Migration.Format); err != nil { return err } switch cmd.Name() { case "lint": if err := maySetFlag(cmd, migrateFlagLog, activeEnv.Lint.Log); err != nil { return err } if err := maySetFlag(cmd, migrateLintLatest, strconv.Itoa(activeEnv.Lint.Latest)); err != nil { return err } if err := maySetFlag(cmd, migrateLintGitDir, activeEnv.Lint.Git.Dir); err != nil { return err } if err := maySetFlag(cmd, migrateLintGitBase, activeEnv.Lint.Git.Base); err != nil { return err } default: if err := maySetFlag(cmd, migrateFlagURL, activeEnv.URL); err != nil { return err } if err := maySetFlag(cmd, migrateFlagRevisionsSchema, activeEnv.Migration.RevisionsSchema); err != nil { return err } } // Transform "src" to a URL. srcs, err := activeEnv.Sources() if err != nil { return err } for i, s := range srcs { if s, err = filepath.Abs(s); err != nil { return fmt.Errorf("finding abs path to source: %q: %w", s, err) } srcs[i] = "file://" + s } if err := maySetFlag(cmd, migrateFlagTo, strings.Join(srcs, ",")); err != nil { return err } if s := "[" + strings.Join(activeEnv.Schemas, "") + "]"; len(activeEnv.Schemas) > 0 { if err := maySetFlag(cmd, migrateFlagSchema, s); err != nil { return err } } return nil } type ( // dryRunDriver wraps a migrate.Driver without executing any SQL statements. dryRunDriver struct{ migrate.Driver } // dryRunRevisions wraps a migrate.RevisionReadWriter without executing any SQL statements. dryRunRevisions struct{ migrate.RevisionReadWriter } ) // QueryContext overrides the wrapped schema.ExecQuerier to not execute any SQL. func (dryRunDriver) QueryContext(context.Context, string, ...any) (*sql.Rows, error) { return nil, nil } // ExecContext overrides the wrapped schema.ExecQuerier to not execute any SQL. func (dryRunDriver) ExecContext(context.Context, string, ...any) (sql.Result, error) { return nil, nil } // Lock implements the schema.Locker interface. func (dryRunDriver) Lock(context.Context, string, time.Duration) (schema.UnlockFunc, error) { // We dry-run, we don't execute anything. Locking is not required. return func() error { return nil }, nil } // CheckClean implements the migrate.CleanChecker interface. func (dryRunDriver) CheckClean(context.Context, *migrate.TableIdent) error { return nil } // Snapshot implements the migrate.Snapshoter interface. func (dryRunDriver) Snapshot(context.Context) (migrate.RestoreFunc, error) { // We dry-run, we don't execute anything. Snapshotting not required. return func(context.Context) error { return nil }, nil } // WriteRevision overrides the wrapped migrate.RevisionReadWriter to not saved any changes to revisions. func (dryRunRevisions) WriteRevision(context.Context, *migrate.Revision) error { return nil } atlas-0.7.2/cmd/atlas/internal/cmdapi/migrate_test.go000066400000000000000000000722701431455511600225770ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "context" "database/sql" "errors" "fmt" "io" "net/url" "os" "os/exec" "path/filepath" "runtime" "strings" "testing" "time" migrate2 "ariga.io/atlas/cmd/atlas/internal/migrate" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" "ariga.io/atlas/sql/sqlite" _ "ariga.io/atlas/sql/sqlite" _ "ariga.io/atlas/sql/sqlite/sqlitecheck" "github.com/fatih/color" _ "github.com/mattn/go-sqlite3" "github.com/stretchr/testify/require" ) func TestMigrate(t *testing.T) { _, err := runCmd(Root, "migrate") require.NoError(t, err) } func TestMigrate_Import(t *testing.T) { for _, tool := range []string{"dbmate", "flyway", "golang-migrate", "goose", "liquibase"} { p := t.TempDir() t.Run(tool, func(t *testing.T) { path := filepath.FromSlash("testdata/import/" + tool) out, err := runCmd( Root, "migrate", "import", "--from", "file://"+path, "--to", "file://"+p, "--dir-format", tool, ) require.NoError(t, err) require.Zero(t, out) path += "_gold" ex, err := os.ReadDir(path) require.NoError(t, err) ac, err := os.ReadDir(p) require.NoError(t, err) require.Equal(t, len(ex)+1, len(ac)) // sum file for i := range ex { e, err := os.ReadFile(filepath.Join(path, ex[i].Name())) require.NoError(t, err) a, err := os.ReadFile(filepath.Join(p, ex[i].Name())) require.NoError(t, err) require.Equal(t, string(e), string(a)) } }) } } func TestMigrate_Apply(t *testing.T) { var ( p = t.TempDir() ctx = context.Background() ) // Disable text coloring in testing // to assert on string matching. color.NoColor = true // Fails on empty directory. s, err := runCmd( Root, "migrate", "apply", "--dir", "file://"+p, "-u", openSQLite(t, ""), ) require.NoError(t, err) require.Equal(t, "No migration files to execute\n", s) // Fails on directory without sum file. require.NoError(t, os.Rename( filepath.FromSlash("testdata/sqlite/atlas.sum"), filepath.FromSlash("testdata/sqlite/atlas.sum.bak"), )) t.Cleanup(func() { os.Rename(filepath.FromSlash("testdata/sqlite/atlas.sum.bak"), filepath.FromSlash("testdata/sqlite/atlas.sum")) }) _, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", openSQLite(t, ""), ) require.ErrorIs(t, err, migrate.ErrChecksumNotFound) require.NoError(t, os.Rename( filepath.FromSlash("testdata/sqlite/atlas.sum.bak"), filepath.FromSlash("testdata/sqlite/atlas.sum"), )) // A lock will prevent execution. sqlclient.Register( "sqlitelockapply", sqlclient.OpenerFunc(func(ctx context.Context, u *url.URL) (*sqlclient.Client, error) { client, err := sqlclient.Open(ctx, strings.Replace(u.String(), u.Scheme, "sqlite", 1)) if err != nil { return nil, err } client.Driver = &sqliteLockerDriver{client.Driver} return client, nil }), sqlclient.RegisterDriverOpener(func(db schema.ExecQuerier) (migrate.Driver, error) { drv, err := sqlite.Open(db) if err != nil { return nil, err } return &sqliteLockerDriver{drv}, nil }), ) f, err := os.Create(filepath.Join(p, "test.db")) require.NoError(t, err) require.NoError(t, f.Close()) s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", fmt.Sprintf("sqlitelockapply://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), ) require.ErrorIs(t, err, errLock) require.True(t, strings.HasPrefix(s, "Error: acquiring database lock: "+errLock.Error())) // Apply zero throws error. for _, n := range []string{"-1", "0"} { _, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), "--", n, ) require.EqualError(t, err, fmt.Sprintf("cannot apply '%s' migration files", n)) } // Will work and print stuff to the console. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), "1", ) require.NoError(t, err) require.Contains(t, s, "20220318104614") // log to version require.Contains(t, s, "CREATE TABLE tbl (`col` int NOT NULL);") // logs statement require.NotContains(t, s, "ALTER TABLE `tbl` ADD `col_2` bigint;") // does not execute second file require.Contains(t, s, "1 migrations") // logs amount of migrations require.Contains(t, s, "1 sql statements") // Transactions will be wrapped per file. If the second file has an error, first still is applied. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite2", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db")), ) require.Error(t, err) require.Contains(t, s, "20220318104614") // log to version require.Contains(t, s, "CREATE TABLE tbl (`col` int NOT NULL);") // logs statement require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_2` bigint;") // does execute first stmt first second file require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_3` bigint;") // does execute second stmt first second file require.NotContains(t, s, "ALTER TABLE `tbl` ADD `col_4` bigint;") // but not third require.Contains(t, s, "1 migrations ok (1 with errors)") // logs amount of migrations require.Contains(t, s, "2 sql statements ok (1 with errors)") // logs amount of statement require.Contains(t, s, "Error: Execution had errors:") // logs error summary require.Contains(t, s, "near \"asdasd\": syntax error") // logs error summary c, err := sqlclient.Open(ctx, fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db"))) require.NoError(t, err) t.Cleanup(func() { require.NoError(t, c.Close()) }) sch, err := c.InspectSchema(ctx, "", nil) tbl, ok := sch.Table("tbl") require.True(t, ok) _, ok = tbl.Column("col_2") require.False(t, ok) _, ok = tbl.Column("col_3") require.False(t, ok) rrw, err := migrate2.NewEntRevisions(ctx, c) require.NoError(t, err) revs, err := rrw.ReadRevisions(ctx) require.NoError(t, err) require.Len(t, revs, 1) // Running again will pick up the failed statement and try it again. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite2", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db")), ) require.Error(t, err) require.Contains(t, s, "20220318104614") // currently applied version require.Contains(t, s, "20220318104615") // retry second (partially applied) require.NotContains(t, s, "CREATE TABLE tbl (`col` int NOT NULL);") // will not attempt stmts from first file require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_2` bigint;") // picks up first statement require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_3` bigint;") // does execute second stmt first second file require.NotContains(t, s, "ALTER TABLE `tbl` ADD `col_4` bigint;") // but not third require.Contains(t, s, "0 migrations ok (1 with errors)") // logs amount of migrations require.Contains(t, s, "1 sql statements ok (1 with errors)") // logs amount of statement require.Contains(t, s, "Error: Execution had errors:") // logs error summary require.Contains(t, s, "near \"asdasd\": syntax error") // logs error summary // Editing an applied line will raise error. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite2", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db")), "--tx-mode", "none", ) t.Cleanup(func() { _ = os.RemoveAll("testdata/sqlite3") }) require.NoError(t, exec.Command("cp", "-r", "testdata/sqlite2", "testdata/sqlite3").Run()) sed(t, "s/col_2/col_5/g", "testdata/sqlite3/20220318104615_second.sql") _, err = runCmd(Root, "migrate", "hash", "--dir", "file://testdata/sqlite3") require.NoError(t, err) s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db")), ) require.ErrorAs(t, err, &migrate.HistoryChangedError{}) // Fixing the migration file will finish without errors. sed(t, "s/col_5/col_2/g", "testdata/sqlite3/20220318104615_second.sql") sed(t, "s/asdasd //g", "testdata/sqlite3/20220318104615_second.sql") _, err = runCmd(Root, "migrate", "hash", "--dir", "file://testdata/sqlite3") require.NoError(t, err) s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db")), ) require.NoError(t, err) require.Contains(t, s, "20220318104615") // retry second (partially applied) require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_3` bigint;") // does execute second stmt first second file require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_4` bigint;") // does execute second stmt first second file require.Contains(t, s, "1 migrations") // logs amount of migrations require.Contains(t, s, "2") // logs amount of statement require.NotContains(t, s, "Error: Execution had errors:") // logs error summary require.NotContains(t, s, "near \"asdasd\": syntax error") // logs error summary // Running again will report database being in clean state. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test2.db")), ) require.NoError(t, err) require.Equal(t, "No migration files to execute\n", s) // Dry run will print the statements in second migration file without executing them. // No changes to the revisions will be done. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), "--dry-run", "1", ) require.NoError(t, err) require.Contains(t, s, "20220318104615") // log to version require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_2` bigint;") // logs statement c1, err := sqlclient.Open(ctx, fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db"))) require.NoError(t, err) t.Cleanup(func() { require.NoError(t, c1.Close()) }) sch, err = c1.InspectSchema(ctx, "", nil) tbl, ok = sch.Table("tbl") require.True(t, ok) _, ok = tbl.Column("col_2") require.False(t, ok) rrw, err = migrate2.NewEntRevisions(ctx, c1) require.NoError(t, err) revs, err = rrw.ReadRevisions(ctx) require.NoError(t, err) require.Len(t, revs, 1) MigrateFlags.Apply.DryRun = false // global flag, undo for rest of tests // Prerequisites for testing missing migration behavior. c1, err = sqlclient.Open(ctx, fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test3.db"))) require.NoError(t, err) t.Cleanup(func() { require.NoError(t, c1.Close()) }) require.NoError(t, os.Rename( "testdata/sqlite3/20220318104615_second.sql", "testdata/sqlite3/20220318104616_second.sql", )) _, err = runCmd(Root, "migrate", "hash", "--dir", "file://testdata/sqlite3") require.NoError(t, err) rrw, err = migrate2.NewEntRevisions(ctx, c1) require.NoError(t, err) require.NoError(t, rrw.Migrate(ctx)) // No changes if the last revision has a greater version than the last migration. require.NoError(t, rrw.WriteRevision(ctx, &migrate.Revision{Version: "zzz"})) s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test3.db")), ) require.NoError(t, err) require.Equal(t, "No migration files to execute\n", s) // If the revision is before the last but after the first migration, only the last one is pending. _, err = c1.ExecContext(ctx, "DROP table `atlas_schema_revisions`") require.NoError(t, err) s, err = runCmd( Root, "migrate", "apply", "1", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test3.db")), ) require.NoError(t, rrw.WriteRevision(ctx, &migrate.Revision{Version: "20220318104615"})) require.NoError(t, err) s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test3.db")), ) require.NoError(t, err) require.NotContains(t, s, "20220318104614") // log to version require.Contains(t, s, "20220318104616") // log to version require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_2` bigint;") // logs statement // If the revision is before every migration file, every file is pending. _, err = c1.ExecContext(ctx, "DROP table `atlas_schema_revisions`; DROP table `tbl`;") require.NoError(t, err) require.NoError(t, rrw.Migrate(ctx)) require.NoError(t, rrw.WriteRevision(ctx, &migrate.Revision{Version: "1"})) require.NoError(t, err) s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test3.db")), ) require.NoError(t, err) require.Contains(t, s, "20220318104614") // log to version require.Contains(t, s, "20220318104616") // log to version require.Contains(t, s, "CREATE TABLE tbl (`col` int NOT NULL);") // logs statement require.Contains(t, s, "ALTER TABLE `tbl` ADD `col_2` bigint;") // logs statement // If the revision is partially applied, error out. require.NoError(t, rrw.WriteRevision(ctx, &migrate.Revision{Version: "z", Description: "z", Total: 1})) require.NoError(t, err) _, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlite3", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test3.db")), ) require.EqualError(t, err, migrate.MissingMigrationError{Version: "z", Description: "z"}.Error()) } func TestMigrate_ApplyTxMode(t *testing.T) { for _, mode := range []string{"none", "file", "all"} { t.Run(mode, func(t *testing.T) { p := t.TempDir() // Apply the first 2 migrations. s, err := runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlitetx", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), "--tx-mode", mode, "2", ) require.NoError(t, err) require.NotEmpty(t, s) db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db"))) require.NoError(t, err) var n int require.NoError(t, db.QueryRow("SELECT COUNT(*) FROM `friendships`").Scan(&n)) require.Equal(t, 2, n) // Apply the rest. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlitetx", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), "--tx-mode", mode, ) require.NoError(t, err) require.NoError(t, db.QueryRow("SELECT COUNT(*) FROM `friendships`").Scan(&n)) require.Equal(t, 2, n) // For transactions check that the foreign keys are checked before the transaction is committed. if mode != "none" { // Apply the first 2 migrations for the faulty one. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlitetx_2", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test_2.db")), "--tx-mode", mode, "2", ) require.NoError(t, err) require.NotEmpty(t, s) db, err = sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&_fk=1", filepath.Join(p, "test_2.db"))) require.NoError(t, err) require.NoError(t, db.QueryRow("SELECT COUNT(*) FROM `friendships`").Scan(&n)) require.Equal(t, 2, n) // Add an existing constraint. c, err := sqlclient.Open(context.Background(), fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test_2.db"))) require.NoError(t, err) _, err = c.ExecContext(context.Background(), "PRAGMA foreign_keys = off; INSERT INTO `friendships` (`user_id`, `friend_id`) VALUES (3,3);PRAGMA foreign_keys = on;") require.NoError(t, err) require.NoError(t, db.QueryRow("SELECT COUNT(*) FROM `friendships`").Scan(&n)) require.Equal(t, 3, n) // Apply the rest, expect it to fail due to constraint error, but only the new one is reported. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/sqlitetx_2", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test_2.db")), "--tx-mode", mode, ) require.EqualError(t, err, "sql/sqlite: foreign key mismatch: [{tbl:friendships ref:users row:4 index:1}]") require.NoError(t, db.QueryRow("SELECT COUNT(*) FROM `friendships`").Scan(&n)) require.Equal(t, 3, n) // was rolled back } }) } } func TestMigrate_ApplyBaseline(t *testing.T) { p := t.TempDir() // Run migration with baseline should store this revision in the database. s, err := runCmd( Root, "migrate", "apply", "--dir", "file://testdata/baseline1", "--baseline", "1", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), ) require.NoError(t, err) require.Contains(t, s, "No migration files to execute") // Next run without baseline should run the migration from the baseline. s, err = runCmd( Root, "migrate", "apply", "--dir", "file://testdata/baseline2", "--url", fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), ) require.NoError(t, err) require.Contains(t, s, "Migrating to version 20220318104615 from 1 (2 migrations in total)") } func TestMigrate_Diff(t *testing.T) { p := t.TempDir() to := hclURL(t) // Will create migration directory if not existing. _, err := runCmd( Root, "migrate", "diff", "name", "--dir", "file://"+filepath.Join(p, "migrations"), "--dev-url", openSQLite(t, ""), "--to", to, ) require.NoError(t, err) require.FileExists(t, filepath.Join(p, "migrations", fmt.Sprintf("%s_name.sql", time.Now().UTC().Format("20060102150405")))) // Expect no clean dev error. p = t.TempDir() s, err := runCmd( Root, "migrate", "diff", "name", "--dir", "file://"+p, "--dev-url", openSQLite(t, "create table t (c int);"), "--to", to, ) require.ErrorAs(t, err, &migrate.NotCleanError{}) require.ErrorContains(t, err, "found table \"t\"") // Works (on empty directory). s, err = runCmd( Root, "migrate", "diff", "name", "--dir", "file://"+p, "--dev-url", openSQLite(t, ""), "--to", to, ) require.NoError(t, err) require.Zero(t, s) require.FileExists(t, filepath.Join(p, fmt.Sprintf("%s_name.sql", time.Now().UTC().Format("20060102150405")))) require.FileExists(t, filepath.Join(p, "atlas.sum")) // A lock will prevent diffing. sqlclient.Register("sqlitelockdiff", sqlclient.OpenerFunc(func(ctx context.Context, u *url.URL) (*sqlclient.Client, error) { client, err := sqlclient.Open(ctx, strings.Replace(u.String(), u.Scheme, "sqlite", 1)) if err != nil { return nil, err } client.Driver = &sqliteLockerDriver{Driver: client.Driver} return client, nil })) f, err := os.Create(filepath.Join(p, "test.db")) require.NoError(t, err) require.NoError(t, f.Close()) s, err = runCmd( Root, "migrate", "diff", "name", "--dir", "file://"+t.TempDir(), "--dev-url", fmt.Sprintf("sqlitelockdiff://file:%s?cache=shared&_fk=1", filepath.Join(p, "test.db")), "--to", to, ) require.True(t, strings.HasPrefix(s, "Error: acquiring database lock: "+errLock.Error())) require.ErrorIs(t, err, errLock) } func TestMigrate_New(t *testing.T) { var ( p = t.TempDir() v = time.Now().UTC().Format("20060102150405") ) s, err := runCmd(Root, "migrate", "new", "--dir", "file://"+p) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, v+".sql")) require.FileExists(t, filepath.Join(p, "atlas.sum")) require.Equal(t, 2, countFiles(t, p)) s, err = runCmd(Root, "migrate", "new", "my-migration-file", "--dir", "file://"+p) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, v+"_my-migration-file.sql")) require.FileExists(t, filepath.Join(p, "atlas.sum")) require.Equal(t, 3, countFiles(t, p)) p = t.TempDir() s, err = runCmd(Root, "migrate", "new", "golang-migrate", "--dir", "file://"+p, "--dir-format", formatGolangMigrate) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, v+"_golang-migrate.up.sql")) require.FileExists(t, filepath.Join(p, v+"_golang-migrate.down.sql")) require.Equal(t, 3, countFiles(t, p)) p = t.TempDir() s, err = runCmd(Root, "migrate", "new", "goose", "--dir", "file://"+p, "--dir-format", formatGoose) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, v+"_goose.sql")) require.Equal(t, 2, countFiles(t, p)) p = t.TempDir() s, err = runCmd(Root, "migrate", "new", "flyway", "--dir", "file://"+p, "--dir-format", formatFlyway) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, fmt.Sprintf("V%s__%s.sql", v, formatFlyway))) require.FileExists(t, filepath.Join(p, fmt.Sprintf("U%s__%s.sql", v, formatFlyway))) require.Equal(t, 3, countFiles(t, p)) p = t.TempDir() s, err = runCmd(Root, "migrate", "new", "liquibase", "--dir", "file://"+p, "--dir-format", formatLiquibase) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, v+"_liquibase.sql")) require.Equal(t, 2, countFiles(t, p)) p = t.TempDir() s, err = runCmd(Root, "migrate", "new", "dbmate", "--dir", "file://"+p, "--dir-format", formatDBMate) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, v+"_dbmate.sql")) require.Equal(t, 2, countFiles(t, p)) f := filepath.Join("testdata", "mysql", "new.sql") require.NoError(t, os.WriteFile(f, []byte("contents"), 0600)) t.Cleanup(func() { os.Remove(f) }) s, err = runCmd(Root, "migrate", "new", "--dir", "file://testdata/mysql") require.NotZero(t, s) require.Error(t, err) } func TestMigrate_Validate(t *testing.T) { // Without re-playing. MigrateFlags.DevURL = "" // global flags are set from other tests ... MigrateFlags.DirFormat = "atlas" // global flags are set from other tests ... s, err := runCmd(Root, "migrate", "validate", "--dir", "file://testdata/mysql") require.Zero(t, s) require.NoError(t, err) f := filepath.Join("testdata", "mysql", "new.sql") require.NoError(t, os.WriteFile(f, []byte("contents"), 0600)) t.Cleanup(func() { os.Remove(f) }) s, err = runCmd(Root, "migrate", "validate", "--dir", "file://testdata/mysql") require.NotZero(t, s) require.Error(t, err) require.NoError(t, os.Remove(f)) // Replay migration files if a dev-url is given. p := t.TempDir() require.NoError(t, os.WriteFile(filepath.Join(p, "1_initial.sql"), []byte("create table t1 (c1 int)"), 0644)) require.NoError(t, os.WriteFile(filepath.Join(p, "2_second.sql"), []byte("create table t2 (c2 int)"), 0644)) _, err = runCmd(Root, "migrate", "hash", "--dir", "file://"+p) require.NoError(t, err) s, err = runCmd( Root, "migrate", "validate", "--dir", "file://"+p, "--dev-url", openSQLite(t, ""), ) require.Zero(t, s) require.NoError(t, err) // Should fail since the files are not compatible with SQLite. _, err = runCmd(Root, "migrate", "validate", "--dir", "file://testdata/mysql", "--dev-url", openSQLite(t, "")) require.Error(t, err) } func TestMigrate_Hash(t *testing.T) { s, err := runCmd(Root, "migrate", "hash", "--dir", "file://testdata/mysql") require.Zero(t, s) require.NoError(t, err) // Prints a warning if --force flag is still used. s, err = runCmd(Root, "migrate", "hash", "--dir", "file://testdata/mysql", "--force") require.NoError(t, err) require.Equal(t, "Flag --force has been deprecated, you can safely omit it.\n", s) p := t.TempDir() err = copyFile(filepath.Join("testdata", "mysql", "20220318104614_initial.sql"), filepath.Join(p, "20220318104614_initial.sql")) require.NoError(t, err) s, err = runCmd(Root, "migrate", "hash", "--dir", "file://"+p) require.Zero(t, s) require.NoError(t, err) require.FileExists(t, filepath.Join(p, "atlas.sum")) d, err := os.ReadFile(filepath.Join(p, "atlas.sum")) require.NoError(t, err) dir, err := migrate.NewLocalDir(p) require.NoError(t, err) sum, err := dir.Checksum() require.NoError(t, err) b, err := sum.MarshalText() require.NoError(t, err) require.Equal(t, d, b) p = t.TempDir() require.NoError(t, copyFile( filepath.Join("testdata", "mysql", "20220318104614_initial.sql"), filepath.Join(p, "20220318104614_initial.sql"), )) s, err = runCmd(Root, "migrate", "hash", "--dir", "file://"+os.Getenv("MIGRATION_DIR")) require.NotZero(t, s) require.Error(t, err) } func TestMigrate_Lint(t *testing.T) { p := t.TempDir() s, err := runCmd( Root, "migrate", "lint", "--dir", "file://"+p, "--dev-url", openSQLite(t, ""), "--latest", "1", ) require.NoError(t, err) require.Empty(t, s) err = os.WriteFile(filepath.Join(p, "1.sql"), []byte("CREATE TABLE t(c int);"), 0600) require.NoError(t, err) err = os.WriteFile(filepath.Join(p, "2.sql"), []byte("DROP TABLE t;"), 0600) require.NoError(t, err) s, err = runCmd( Root, "migrate", "lint", "--dir", "file://"+p, "--dev-url", openSQLite(t, ""), "--latest", "1", ) require.Error(t, err) require.Equal(t, "2.sql: destructive changes detected:\n\n\tL1: Dropping table \"t\"\n\n", s) s, err = runCmd( Root, "migrate", "lint", "--dir", "file://"+p, "--dev-url", openSQLite(t, ""), "--latest", "1", "--log", "{{ range .Files }}{{ .Name }}{{ end }}", ) require.Error(t, err) require.Equal(t, "2.sql", s) // Change files to golang-migrate format. require.NoError(t, os.Rename(filepath.Join(p, "1.sql"), filepath.Join(p, "1.up.sql"))) require.NoError(t, os.Rename(filepath.Join(p, "2.sql"), filepath.Join(p, "1.down.sql"))) s, err = runCmd( Root, "migrate", "lint", "--dir", "file://"+p, "--dir-format", "golang-migrate", "--dev-url", openSQLite(t, ""), "--latest", "2", "--log", "{{ range .Files }}{{ .Name }}:{{ len .Reports }}{{ end }}", ) require.NoError(t, err) require.Equal(t, "1.up.sql:0", s) // Invalid files. MigrateFlags.Lint.Format = "" err = os.WriteFile(filepath.Join(p, "2.up.sql"), []byte("BORING"), 0600) require.NoError(t, err) s, err = runCmd( Root, "migrate", "lint", "--dir", "file://"+p, "--dir-format", "golang-migrate", "--dev-url", openSQLite(t, ""), "--latest", "1", ) require.Error(t, err) require.Equal(t, "2.up.sql: executing statement: near \"BORING\": syntax error\n", s) } const testSchema = ` schema "main" { } table "table" { schema = schema.main column "col" { type = int comment = "column comment" } column "age" { type = int } column "price1" { type = int } column "price2" { type = int } column "account_name" { type = varchar(32) null = true } column "created_at" { type = datetime default = sql("current_timestamp") } primary_key { columns = [table.table.column.col] } index "index" { unique = true columns = [ table.table.column.col, table.table.column.age, ] comment = "index comment" } foreign_key "accounts" { columns = [ table.table.column.account_name, ] ref_columns = [ table.accounts.column.name, ] on_delete = SET_NULL on_update = "NO_ACTION" } check "positive price" { expr = "price1 > 0" } check { expr = "price1 <> price2" enforced = true } check { expr = "price2 <> price1" enforced = false } comment = "table comment" } table "accounts" { schema = schema.main column "name" { type = varchar(32) } column "unsigned_float" { type = float(10) unsigned = true } column "unsigned_decimal" { type = decimal(10, 2) unsigned = true } primary_key { columns = [table.accounts.column.name] } }` func hclURL(t *testing.T) string { p := t.TempDir() require.NoError(t, os.WriteFile(filepath.Join(p, "schema.hcl"), []byte(testSchema), 0600)) return "file://" + filepath.Join(p, "schema.hcl") } func copyFile(src, dst string) error { sf, err := os.Open(src) if err != nil { return err } defer sf.Close() df, err := os.Create(dst) if err != nil { return err } defer df.Close() _, err = io.Copy(df, sf) return err } type sqliteLockerDriver struct{ migrate.Driver } var errLock = errors.New("lockErr") func (d *sqliteLockerDriver) Lock(context.Context, string, time.Duration) (schema.UnlockFunc, error) { return func() error { return nil }, errLock } func (d *sqliteLockerDriver) Snapshot(ctx context.Context) (migrate.RestoreFunc, error) { return d.Driver.(migrate.Snapshoter).Snapshot(ctx) } func (d *sqliteLockerDriver) CheckClean(ctx context.Context, revT *migrate.TableIdent) error { return d.Driver.(migrate.CleanChecker).CheckClean(ctx, revT) } func countFiles(t *testing.T, p string) int { files, err := os.ReadDir(p) require.NoError(t, err) return len(files) } func sed(t *testing.T, r, p string) { args := []string{"-i"} if runtime.GOOS == "darwin" { args = append(args, ".bk") } buf, err := exec.Command("sed", append(args, r, p)...).CombinedOutput() require.NoError(t, err, string(buf)) } atlas-0.7.2/cmd/atlas/internal/cmdapi/project.go000066400000000000000000000122451431455511600215520ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "errors" "fmt" "os" "ariga.io/atlas/schemahcl" ) const projectFileName = "atlas.hcl" type loadConfig struct { inputVals map[string]string } // LoadOption configures the LoadEnv function. type LoadOption func(*loadConfig) // WithInput is a LoadOption that sets the input values for the LoadEnv function. func WithInput(vals map[string]string) LoadOption { return func(config *loadConfig) { config.inputVals = vals } } type ( // Project represents an atlas.hcl project file. Project struct { Envs []*Env `spec:"env"` // List of environments Lint *Lint `spec:"lint"` // Optional global lint config } // Env represents an Atlas environment. Env struct { // Name for this environment. Name string `spec:"name,name"` // URL of the database. URL string `spec:"url"` // URL of the dev-database for this environment. // See: https://atlasgo.io/dev-database DevURL string `spec:"dev"` // List of schemas in this database that are managed by Atlas. Schemas []string `spec:"schemas"` // Exclude defines a list of glob patterns used to filter // resources on inspection. Exclude []string `spec:"exclude"` // Migration containing the migration configuration of the env. Migration *Migration `spec:"migration"` // Lint of the environment. Lint *Lint `spec:"lint"` schemahcl.DefaultExtension } // Migration represents the migration directory for the Env. Migration struct { Dir string `spec:"dir"` Format string `spec:"format"` RevisionsSchema string `spec:"revisions_schema"` } // Lint represents the configuration of migration linting. Lint struct { // Log configures the --log option. Log string `spec:"log"` // Latest configures the --latest option. Latest int `spec:"latest"` Git struct { // Dir configures the --git-dir option. Dir string `spec:"dir"` // Base configures the --git-base option. Base string `spec:"base"` } `spec:"git"` schemahcl.DefaultExtension } ) // Extend allows extending environment blocks with // a global one. For example: // // lint { // log = < schema.hcl This file can then be edited and used with the` + " `atlas schema apply` " + `command to plan and execute schema migrations against the given database. In cases where users wish to inspect all multiple schemas in a given database (for instance a MySQL server may contain multiple named databases), omit the relevant part from the url, e.g. "mysql://user:pass@localhost:3306/". To select specific schemas from the databases, users may use the "--schema" (or "-s" shorthand) flag. `, PreRunE: schemaFlagsFromEnv, RunE: CmdInspectRun, Example: ` atlas schema inspect -u "mysql://user:pass@localhost:3306/dbname" atlas schema inspect -u "mariadb://user:pass@localhost:3306/" --schema=schemaA,schemaB -s schemaC atlas schema inspect --url "postgres://user:pass@host:port/dbname?sslmode=disable" atlas schema inspect -u "sqlite://file:ex1.db?_fk=1"`, } // SchemaFmt represents the 'atlas schema fmt' subcommand. SchemaFmt = &cobra.Command{ Use: "fmt [path ...]", Short: "Formats Atlas HCL files", Long: `'atlas schema fmt' formats all ".hcl" files under the given path using canonical HCL layout style as defined by the github.com/hashicorp/hcl/v2/hclwrite package. Unless stated otherwise, the fmt command will use the current directory. After running, the command will print the names of the files it has formatted. If all files in the directory are formatted, no input will be printed out. `, Run: CmdFmtRun, } ) const ( answerApply = "Apply" answerAbort = "Abort" ) func init() { // Common flags. receivesEnv(schemaCmd) // Schema apply flags. schemaCmd.AddCommand(SchemaApply) SchemaApply.Flags().SortFlags = false SchemaApply.Flags().StringSliceVarP(&ApplyFlags.Paths, fileFlag, "f", nil, "[paths...] file or directory containing the HCL files") SchemaApply.Flags().StringVarP(&SchemaFlags.URL, urlFlag, "u", "", "URL to the database using the format:\n[driver://username:password@address/dbname?param=value]") SchemaApply.Flags().StringSliceVarP(&SchemaFlags.Exclude, excludeFlag, "", nil, "List of glob patterns used to filter resources from applying.") SchemaApply.Flags().StringSliceVarP(&SchemaFlags.Schemas, schemaFlag, "s", nil, "Set schema names.") SchemaApply.Flags().StringVarP(&ApplyFlags.DevURL, devURLFlag, "", "", "URL for the dev database. Used to validate schemas and calculate diffs\nbefore running migration.") SchemaApply.Flags().BoolVarP(&ApplyFlags.DryRun, "dry-run", "", false, "Dry-run. Print SQL plan without prompting for execution.") SchemaApply.Flags().BoolVarP(&ApplyFlags.AutoApprove, autoApproveFlag, "", false, "Auto approve. Apply the schema changes without prompting for approval.") SchemaApply.Flags().StringVarP(&SchemaFlags.DSN, dsnFlag, "d", "", "") cobra.CheckErr(SchemaApply.Flags().MarkHidden(dsnFlag)) cobra.CheckErr(SchemaApply.MarkFlagRequired(urlFlag)) cobra.CheckErr(SchemaApply.MarkFlagRequired(fileFlag)) // Schema clean flags. SchemaClean.Flags().StringVarP(&CleanFlags.URL, urlFlag, "u", "", "URL to the database using the format:\n[driver://username:password@address/dbname?param=value]") SchemaClean.Flags().BoolVarP(&CleanFlags.AutoApprove, autoApproveFlag, "", false, "Auto approve. Apply the schema changes without prompting for approval.") cobra.CheckErr(SchemaClean.MarkFlagRequired(urlFlag)) // Schema inspect flags. schemaCmd.AddCommand(SchemaInspect) SchemaInspect.Flags().StringVarP(&SchemaFlags.URL, urlFlag, "u", "", "[driver://username:password@protocol(address)/dbname?param=value] select a database using the URL format") SchemaInspect.Flags().StringSliceVarP(&SchemaFlags.Schemas, schemaFlag, "s", nil, "Set schema name") SchemaInspect.Flags().StringSliceVarP(&SchemaFlags.Exclude, excludeFlag, "", nil, "List of glob patterns used to filter resources from inspection") SchemaInspect.Flags().StringVarP(&SchemaFlags.DSN, dsnFlag, "d", "", "") cobra.CheckErr(SchemaInspect.Flags().MarkHidden(dsnFlag)) cobra.CheckErr(SchemaInspect.MarkFlagRequired(urlFlag)) // Schema fmt. schemaCmd.AddCommand(SchemaFmt) schemaCmd.AddCommand(SchemaClean) } // selectEnv returns the Env from the current project file based on the selected // argument. If selected is "", or no project file exists in the current directory // a zero-value Env is returned. func selectEnv(selected string) (*Env, error) { env := &Env{ Lint: &Lint{}, Migration: &Migration{}, } if selected == "" { return env, nil } if _, err := os.Stat(projectFileName); os.IsNotExist(err) { return nil, fmt.Errorf("project file %q was not found", projectFileName) } return LoadEnv(projectFileName, selected, WithInput(GlobalFlags.Vars)) } func schemaFlagsFromEnv(cmd *cobra.Command, _ []string) error { activeEnv, err := selectEnv(GlobalFlags.SelectedEnv) if err != nil { return err } if err := inputValsFromEnv(cmd); err != nil { return err } if err := dsn2url(cmd); err != nil { return err } if err := maySetFlag(cmd, urlFlag, activeEnv.URL); err != nil { return err } if err := maySetFlag(cmd, devURLFlag, activeEnv.DevURL); err != nil { return err } srcs, err := activeEnv.Sources() if err != nil { return err } if err := maySetFlag(cmd, fileFlag, strings.Join(srcs, "")); err != nil { return err } if s := strings.Join(activeEnv.Schemas, ","); s != "" { if err := maySetFlag(cmd, schemaFlag, s); err != nil { return err } } if s := strings.Join(activeEnv.Exclude, ","); s != "" { if err := maySetFlag(cmd, excludeFlag, s); err != nil { return err } } return nil } // maySetFlag sets the flag with the provided name to envVal if such a flag exists // on the cmd, it was not set by the user via the command line and if envVal is not // an empty string. func maySetFlag(cmd *cobra.Command, name, envVal string) error { fl := cmd.Flag(name) if fl == nil { return nil } if fl.Changed { return nil } if envVal == "" { return nil } return cmd.Flags().Set(name, envVal) } func dsn2url(cmd *cobra.Command) error { dsnF, urlF := cmd.Flag(dsnFlag), cmd.Flag(urlFlag) switch { case dsnF == nil: case dsnF.Changed && urlF.Changed: return errors.New(`both flags "url" and "dsn" were set`) case dsnF.Changed && !urlF.Changed: return cmd.Flags().Set(urlFlag, dsnF.Value.String()) } return nil } // CmdInspectRun is the command used when running CLI. func CmdInspectRun(cmd *cobra.Command, _ []string) error { // Create the client. client, err := sqlclient.Open(cmd.Context(), SchemaFlags.URL) if err != nil { return err } defer client.Close() schemas := SchemaFlags.Schemas if client.URL.Schema != "" { schemas = append(schemas, client.URL.Schema) } s, err := client.InspectRealm(cmd.Context(), &schema.InspectRealmOption{ Schemas: schemas, Exclude: SchemaFlags.Exclude, }) if err != nil { return err } ddl, err := client.MarshalSpec(s) if err != nil { return err } cmd.Print(string(ddl)) return nil } // CmdApplyRun is the command used when running CLI. func CmdApplyRun(cmd *cobra.Command, _ []string) error { c, err := sqlclient.Open(cmd.Context(), SchemaFlags.URL) if err != nil { return err } defer c.Close() return applyRun(cmd, c, ApplyFlags.DevURL, ApplyFlags.Paths, ApplyFlags.DryRun, ApplyFlags.AutoApprove, GlobalFlags.Vars) } // CmdCleanRun is the command executed when running the CLI with 'schema clean' args. func CmdCleanRun(cmd *cobra.Command, _ []string) error { // Open a client to the database. c, err := sqlclient.Open(cmd.Context(), CleanFlags.URL) if err != nil { return err } defer c.Close() var drop []schema.Change // If the connection is bound to a schema, only drop the resources inside the schema. switch c.URL.Schema { case "": r, err := c.InspectRealm(cmd.Context(), nil) if err != nil { return err } drop, err = c.RealmDiff(r, nil) if err != nil { return err } default: s, err := c.InspectSchema(cmd.Context(), c.URL.Schema, nil) if err != nil { return err } drop, err = c.SchemaDiff(s, schema.New(s.Name)) if err != nil { return err } } if len(drop) == 0 { cmd.Println("Nothing to drop") return nil } if err := summary(cmd, c, drop); err != nil { return err } if CleanFlags.AutoApprove || promptUser() { if err := c.ApplyChanges(cmd.Context(), drop); err != nil { return err } } return nil } func summary(cmd *cobra.Command, drv migrate.Driver, changes []schema.Change) error { p, err := drv.PlanChanges(cmd.Context(), "", changes) if err != nil { return err } cmd.Println("-- Planned Changes:") for _, c := range p.Changes { if c.Comment != "" { cmd.Println("--", strings.ToUpper(c.Comment[:1])+c.Comment[1:]) } cmd.Println(c.Cmd) } return nil } // CmdFmtRun formats all HCL files in a given directory using canonical HCL formatting // rules. func CmdFmtRun(cmd *cobra.Command, args []string) { if len(args) == 0 { args = append(args, "./") } for _, path := range args { handlePath(cmd, path) } } func applyRun(cmd *cobra.Command, client *sqlclient.Client, devURL string, paths []string, dryRun, autoApprove bool, input map[string]string) error { schemas, ctx := SchemaFlags.Schemas, cmd.Context() if client.URL.Schema != "" { schemas = append(schemas, client.URL.Schema) } realm, err := client.InspectRealm(ctx, &schema.InspectRealmOption{ Schemas: schemas, Exclude: SchemaFlags.Exclude, }) if err != nil { return err } desired := &schema.Realm{} parsed, err := parseHCLPaths(paths...) if err != nil { return err } if err := client.Eval(parsed, desired, input); err != nil { return err } if len(schemas) > 0 { // Validate all schemas in file were selected by user. sm := make(map[string]bool, len(schemas)) for _, s := range schemas { sm[s] = true } for _, s := range desired.Schemas { if !sm[s.Name] { return fmt.Errorf("schema %q was not selected %q, all schemas defined in file must be selected", s.Name, schemas) } } } if _, ok := client.Driver.(schema.Normalizer); ok && devURL != "" { dev, err := sqlclient.Open(ctx, ApplyFlags.DevURL) if err != nil { return err } defer dev.Close() desired, err = dev.Driver.(schema.Normalizer).NormalizeRealm(ctx, desired) if err != nil { return err } } changes, err := client.RealmDiff(realm, desired) if err != nil { return err } if len(changes) == 0 { cmd.Println("Schema is synced, no changes to be made") return nil } if err := summary(cmd, client, changes); err != nil { return err } if !dryRun && (autoApprove || promptUser()) { if err := client.ApplyChanges(ctx, changes); err != nil { return err } } return nil } func promptUser() bool { prompt := promptui.Select{ Label: "Are you sure?", Items: []string{answerApply, answerAbort}, } _, result, err := prompt.Run() cobra.CheckErr(err) return result == answerApply } func handlePath(cmd *cobra.Command, path string) { tasks, err := tasks(path) cobra.CheckErr(err) for _, task := range tasks { changed, err := fmtFile(task) cobra.CheckErr(err) if changed { cmd.Println(task.path) } } } func tasks(path string) ([]fmttask, error) { var tasks []fmttask stat, err := os.Stat(path) if err != nil { return nil, err } if !stat.IsDir() { if strings.HasSuffix(path, ".hcl") { tasks = append(tasks, fmttask{ path: path, info: stat, }) } return tasks, nil } all, err := os.ReadDir(path) if err != nil { return nil, err } for _, f := range all { if f.IsDir() { continue } if strings.HasSuffix(f.Name(), ".hcl") { i, err := f.Info() if err != nil { return nil, err } tasks = append(tasks, fmttask{ path: filepath.Join(path, f.Name()), info: i, }) } } return tasks, nil } type fmttask struct { path string info fs.FileInfo } // fmtFile tries to format a file and reports if formatting occurred. func fmtFile(task fmttask) (bool, error) { orig, err := os.ReadFile(task.path) if err != nil { return false, err } formatted := hclwrite.Format(orig) if !bytes.Equal(formatted, orig) { return true, os.WriteFile(task.path, formatted, task.info.Mode()) } return false, nil } atlas-0.7.2/cmd/atlas/internal/cmdapi/schema_test.go000066400000000000000000000100201431455511600223700ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package cmdapi import ( "bytes" "context" "fmt" "io/ioutil" "os" "path" "path/filepath" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/sqlclient" "github.com/stretchr/testify/require" ) const ( unformatted = `block "x" { x = 1 y = 2 } ` formatted = `block "x" { x = 1 y = 2 } ` ) func TestFmt(t *testing.T) { for _, tt := range []struct { name string inputDir map[string]string expectedDir map[string]string expectedFile string expectedOut string args []string expectedPrint bool }{ { name: "specific file", inputDir: map[string]string{ "test.hcl": unformatted, }, expectedDir: map[string]string{ "test.hcl": formatted, }, args: []string{"test.hcl"}, expectedOut: "test.hcl\n", }, { name: "current dir", inputDir: map[string]string{ "test.hcl": unformatted, }, expectedDir: map[string]string{ "test.hcl": formatted, }, expectedOut: "test.hcl\n", }, { name: "multi path implicit", inputDir: map[string]string{ "test.hcl": unformatted, "test2.hcl": unformatted, }, expectedDir: map[string]string{ "test.hcl": formatted, "test2.hcl": formatted, }, expectedOut: "test.hcl\ntest2.hcl\n", }, { name: "multi path explicit", inputDir: map[string]string{ "test.hcl": unformatted, "test2.hcl": unformatted, }, expectedDir: map[string]string{ "test.hcl": formatted, "test2.hcl": formatted, }, args: []string{"test.hcl", "test2.hcl"}, expectedOut: "test.hcl\ntest2.hcl\n", }, { name: "formatted", inputDir: map[string]string{ "test.hcl": formatted, }, expectedDir: map[string]string{ "test.hcl": formatted, }, }, } { t.Run(tt.name, func(t *testing.T) { dir := setupFmtTest(t, tt.inputDir) out := runFmt(t, tt.args) assertDir(t, dir, tt.expectedDir) require.EqualValues(t, tt.expectedOut, out) }) } } func TestSchema_Clean(t *testing.T) { var ( u = fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", filepath.Join(t.TempDir(), "test.db")) c, err = sqlclient.Open(context.Background(), u) ) require.NoError(t, err) // Apply migrations onto database. _, err = runCmd(Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", u) require.NoError(t, err) // Run clean and expect to be clean. _, err = runCmd(Root, "migrate", "apply", "--dir", "file://testdata/sqlite", "--url", u) require.NoError(t, err) s, err := runCmd(Root, "schema", "clean", "--url", u, "--auto-approve") require.NoError(t, err) require.NotZero(t, s) require.NoError(t, c.Driver.(migrate.CleanChecker).CheckClean(context.Background(), nil)) } func runFmt(t *testing.T, args []string) string { var out bytes.Buffer SchemaFmt.ResetCommands() // Detach from sub-commands and parents, needed to skip input validation done by them. SchemaFmt.SetOut(&out) SchemaFmt.SetArgs(args) err := SchemaFmt.Execute() require.NoError(t, err) return out.String() } func assertDir(t *testing.T, dir string, expected map[string]string) { act := make(map[string]string) files, err := ioutil.ReadDir(dir) require.NoError(t, err) for _, f := range files { if f.IsDir() { continue } contents, err := os.ReadFile(filepath.Join(dir, f.Name())) require.NoError(t, err) act[f.Name()] = string(contents) } require.EqualValues(t, expected, act) } func setupFmtTest(t *testing.T, inputDir map[string]string) string { wd, err := os.Getwd() require.NoError(t, err) dir, err := os.MkdirTemp(os.TempDir(), "fmt-test-") require.NoError(t, err) err = os.Chdir(dir) require.NoError(t, err) t.Cleanup(func() { os.RemoveAll(dir) os.Chdir(wd) //nolint:errcheck }) for name, contents := range inputDir { file := path.Join(dir, name) err = os.WriteFile(file, []byte(contents), 0600) } require.NoError(t, err) return dir } atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/000077500000000000000000000000001431455511600213625ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline1/000077500000000000000000000000001431455511600232255ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline1/1_baseline.sql000066400000000000000000000001051431455511600257440ustar00rootroot00000000000000-- create "baseline" table CREATE TABLE baseline (`c` int NOT NULL); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline1/atlas.sum000066400000000000000000000001571431455511600250620ustar00rootroot00000000000000h1:GqOjMVGk2H0qYhhtJ1SPCyyHEBbWP/LD2ueWQTY4e6A= 1_baseline.sql h1:rZgkRmNcN2UEKgxru1nHCpBRVn/fjFavyQ4xxPxhrD4= atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline2/000077500000000000000000000000001431455511600232265ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline2/1_baseline.sql000066400000000000000000000001051431455511600257450ustar00rootroot00000000000000-- create "baseline" table CREATE TABLE baseline (`c` int NOT NULL); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline2/20220318104614_initial.sql000066400000000000000000000000751431455511600270430ustar00rootroot00000000000000-- create "tbl" table CREATE TABLE tbl (`col` int NOT NULL); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline2/20220318104615_second.sql000066400000000000000000000000461431455511600266640ustar00rootroot00000000000000ALTER TABLE `tbl` ADD `col_2` bigint; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/baseline2/atlas.sum000066400000000000000000000004041431455511600250560ustar00rootroot00000000000000h1:sxNQqhuqhm1fLpr3WN9N/M/niaS81Y7k5RrSaKpmBZE= 1_baseline.sql h1:rZgkRmNcN2UEKgxru1nHCpBRVn/fjFavyQ4xxPxhrD4= 20220318104614_initial.sql h1:/B1/+IxzgrRc4tCm1tpcpMhocHgqkdWF+iffxuguYaQ= 20220318104615_second.sql h1:nUc1cUvm8BzjTZdbavM1IRlNpfhtyY3YyZJ8v23K9j4= atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/000077500000000000000000000000001431455511600226745ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/dbmate/000077500000000000000000000000001431455511600241305ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/dbmate/1_initial.sql000066400000000000000000000005231431455511600265220ustar00rootroot00000000000000-- migrate:up CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); /* Multiline comment ... */ ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; -- Normal comment -- With a second line INSERT INTO post (title) VALUES ( 'This is my multiline value'); -- migrate:down DROP TABLE post;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/dbmate/2_second_migration.sql000066400000000000000000000000621431455511600304140ustar00rootroot00000000000000 -- migrate:up CREATE TABLE tbl_2 (col INT);atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/dbmate_gold/000077500000000000000000000000001431455511600251355ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/dbmate_gold/1_initial.sql000066400000000000000000000004401431455511600275250ustar00rootroot00000000000000CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); /* Multiline comment ... */ ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; -- Normal comment -- With a second line INSERT INTO post (title) VALUES ( 'This is my multiline value'); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/dbmate_gold/2_second_migration.sql000066400000000000000000000000361431455511600314220ustar00rootroot00000000000000CREATE TABLE tbl_2 (col INT); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/000077500000000000000000000000001431455511600242075ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/B2__baseline.sql000066400000000000000000000003331431455511600271730ustar00rootroot00000000000000CREATE TABLE post ( id int NOT NULL, title text, body text, created_at TIMESTAMP NOT NULL PRIMARY KEY (id) ); INSERT INTO post (title, created_at) VALUES ( 'This is my multiline value', NOW());atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/R__views.sql000066400000000000000000000000561431455511600265060ustar00rootroot00000000000000CREATE VIEW `my_view` AS SELECT * FROM `post`;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/U1__initial.sql000066400000000000000000000000171431455511600270630ustar00rootroot00000000000000DROP TABLE tbl;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/V1__initial.sql000066400000000000000000000003721431455511600270700ustar00rootroot00000000000000-- comment CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; INSERT INTO post (title, created_at) VALUES ( 'This is my multiline value', NOW()); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/V2__second_migration.sql000066400000000000000000000000621431455511600307600ustar00rootroot00000000000000 -- migrate:up CREATE TABLE tbl_2 (col INT);atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway/V3__third_migration.sql000066400000000000000000000000551431455511600306220ustar00rootroot00000000000000ALTER TABLE tbl_2 ADD col_1 INTEGER NOT NULL;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway_gold/000077500000000000000000000000001431455511600252145ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway_gold/2_baseline.sql000066400000000000000000000003331431455511600277370ustar00rootroot00000000000000CREATE TABLE post ( id int NOT NULL, title text, body text, created_at TIMESTAMP NOT NULL PRIMARY KEY (id) ); INSERT INTO post (title, created_at) VALUES ( 'This is my multiline value', NOW()); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway_gold/3R_views.sql000066400000000000000000000000571431455511600274400ustar00rootroot00000000000000CREATE VIEW `my_view` AS SELECT * FROM `post`; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/flyway_gold/3_third_migration.sql000066400000000000000000000000561431455511600313430ustar00rootroot00000000000000ALTER TABLE tbl_2 ADD col_1 INTEGER NOT NULL; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate/000077500000000000000000000000001431455511600255715ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate/1_initial.down.sql000066400000000000000000000000171431455511600311270ustar00rootroot00000000000000DROP TABLE tbl;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate/1_initial.up.sql000066400000000000000000000000411431455511600306010ustar00rootroot00000000000000CREATE TABLE tbl ( col INT );atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate/2_second_migration.down.sql000066400000000000000000000000211431455511600330160ustar00rootroot00000000000000DROP TABLE tbl_2;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate/2_second_migration.up.sql000066400000000000000000000000351431455511600325000ustar00rootroot00000000000000CREATE TABLE tbl_2 (col INT);atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate_gold/000077500000000000000000000000001431455511600265765ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate_gold/1_initial.sql000066400000000000000000000000421431455511600311640ustar00rootroot00000000000000CREATE TABLE tbl ( col INT ); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/golang-migrate_gold/2_second_migration.sql000066400000000000000000000000361431455511600330630ustar00rootroot00000000000000CREATE TABLE tbl_2 (col INT); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/goose/000077500000000000000000000000001431455511600240105ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/goose/1_initial.sql000066400000000000000000000004111431455511600263760ustar00rootroot00000000000000-- +goose Up CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; INSERT INTO post (title) VALUES ( 'This is my multiline value'); -- +goose Down DROP TABLE post;atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/goose/2_second_migration.sql000066400000000000000000000014371431455511600303030ustar00rootroot00000000000000 -- +goose Up ALTER TABLE post ADD updated_at TIMESTAMP NOT NULL; -- +goose StatementBegin -- Comment for the function declaration. CREATE OR REPLACE FUNCTION histories_partition_creation( DATE, DATE ) returns void AS $$ DECLARE create_query text; BEGIN FOR create_query IN SELECT 'CREATE TABLE IF NOT EXISTS histories_' || TO_CHAR(d, 'YYYY_MM') || ' ( CHECK( created_at >= timestamp ''' || TO_CHAR(d, 'YYYY-MM-DD 00:00:00') || ''' AND created_at < timestamp ''' || TO_CHAR(d + INTERVAL '1 month', 'YYYY-MM-DD 00:00:00') || ''' ) ) inherits ( histories );' FROM generate_series($1, $2, '1 month') AS d LOOP EXECUTE create_query; END LOOP; -- LOOP END END; -- FUNCTION END $$ language plpgsql; -- +goose StatementEndatlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/goose_gold/000077500000000000000000000000001431455511600250155ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/goose_gold/1_initial.sql000066400000000000000000000003321431455511600274050ustar00rootroot00000000000000CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; INSERT INTO post (title) VALUES ( 'This is my multiline value'); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/goose_gold/2_second_migration.sql000066400000000000000000000013351431455511600313050ustar00rootroot00000000000000ALTER TABLE post ADD updated_at TIMESTAMP NOT NULL; -- Comment for the function declaration. CREATE OR REPLACE FUNCTION histories_partition_creation( DATE, DATE ) returns void AS $$ DECLARE create_query text; BEGIN FOR create_query IN SELECT 'CREATE TABLE IF NOT EXISTS histories_' || TO_CHAR(d, 'YYYY_MM') || ' ( CHECK( created_at >= timestamp ''' || TO_CHAR(d, 'YYYY-MM-DD 00:00:00') || ''' AND created_at < timestamp ''' || TO_CHAR(d + INTERVAL '1 month', 'YYYY-MM-DD 00:00:00') || ''' ) ) inherits ( histories );' FROM generate_series($1, $2, '1 month') AS d LOOP EXECUTE create_query; END LOOP; -- LOOP END END; -- FUNCTION END $$ language plpgsql; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/liquibase/000077500000000000000000000000001431455511600246525ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/liquibase/1_initial.sql000066400000000000000000000006041431455511600272440ustar00rootroot00000000000000--liquibase formatted sql --changeset atlas:1-1 CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); --rollback: DROP TABLE post; --changeset atlas:1-2 ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; --rollback: ALTER TABLE post DROP created_at; --changeset atlas:1-3 INSERT INTO post (title) VALUES ( 'This is my multiline value'); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/liquibase/2_second_migration.sql000066400000000000000000000001541431455511600311400ustar00rootroot00000000000000--liquibase formatted sql --changeset atlas:2-1 CREATE TABLE tbl_2 (col INT); --rollback DROP TABLE tbl_2; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/liquibase_gold/000077500000000000000000000000001431455511600256575ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/liquibase_gold/1_initial.sql000066400000000000000000000004341431455511600302520ustar00rootroot00000000000000--changeset atlas:1-1 CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); --changeset atlas:1-2 ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; --changeset atlas:1-3 INSERT INTO post (title) VALUES ( 'This is my multiline value'); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/import/liquibase_gold/2_second_migration.sql000066400000000000000000000000641431455511600321450ustar00rootroot00000000000000--changeset atlas:2-1 CREATE TABLE tbl_2 (col INT); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/mysql/000077500000000000000000000000001431455511600225275ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/mysql/20220318104614_initial.sql000066400000000000000000000002641431455511600263440ustar00rootroot00000000000000-- add new schema named "atlantis" CREATE DATABASE `atlantis`; -- create "tbl" table CREATE TABLE `atlantis`.`tbl` (`col` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/mysql/20220420213403_second.sql000066400000000000000000000000571431455511600261550ustar00rootroot00000000000000ALTER TABLE `atlantis`.`tbl` ADD `col_2` TEXT; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/mysql/atlas.sum000066400000000000000000000003051431455511600243570ustar00rootroot00000000000000h1:EGX5/CEEerpLWqYQNHB1veTXon8t05wEGJiX2fOtFXg= 20220318104614_initial.sql h1:EoDHPlX7fTGn5qiCdR5xhwFh+DrOi3cQ7Y49BsIy97k= 20220420213403_second.sql h1:cAioQjDgJkOIiMAyEwqaurPs0EHpTeu0CnWlJzNj5SE= atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite/000077500000000000000000000000001431455511600226635ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite/20220318104614_initial.sql000066400000000000000000000000751431455511600265000ustar00rootroot00000000000000-- create "tbl" table CREATE TABLE tbl (`col` int NOT NULL); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite/20220318104615_second.sql000066400000000000000000000000461431455511600263210ustar00rootroot00000000000000ALTER TABLE `tbl` ADD `col_2` bigint; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite/atlas.sum000066400000000000000000000003051431455511600245130ustar00rootroot00000000000000h1:GMi7mvWSIHv0I/Wrc2NCGVt9Z5hWZbPa6wL986t7Z2o= 20220318104614_initial.sql h1:FifWjY2X0g2YVnb18Qm+QBPvoldDOOob7bS0LrFuCXc= 20220318104615_second.sql h1:wbPDlODOQeixCiopAhlT7W4xOO9TgJxzjYjxf4TA2f4= atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite2/000077500000000000000000000000001431455511600227455ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite2/20220318104614_initial.sql000066400000000000000000000000751431455511600265620ustar00rootroot00000000000000-- create "tbl" table CREATE TABLE tbl (`col` int NOT NULL); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite2/20220318104615_second.sql000066400000000000000000000002061431455511600264010ustar00rootroot00000000000000ALTER TABLE `tbl` ADD `col_2` bigint; asdasd ALTER TABLE `tbl` ADD `col_3` bigint; -- will fail ALTER TABLE `tbl` ADD `col_4` bigint; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlite2/atlas.sum000066400000000000000000000003051431455511600245750ustar00rootroot00000000000000h1:lXdG49p5Vr5b9eARNKq3Gkgd+flbQXDM+XDyB6b2nzw= 20220318104614_initial.sql h1:FifWjY2X0g2YVnb18Qm+QBPvoldDOOob7bS0LrFuCXc= 20220318104615_second.sql h1:UA1TOODS2yU138E2HBlChe/O8vSmTRxkHs4OJOUK3K8= atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx/000077500000000000000000000000001431455511600232375ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx/20220925092817_initial.sql000066400000000000000000000001541431455511600270710ustar00rootroot00000000000000-- create "users" table CREATE TABLE `users` (`id` integer NOT NULL, `name` text NULL, PRIMARY KEY (`id`)); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx/20220925094021_second.sql000066400000000000000000000007161431455511600267040ustar00rootroot00000000000000-- create "friendships" table CREATE TABLE `friendships` (`user_id` integer NOT NULL, `friend_id` integer NOT NULL, PRIMARY KEY (`user_id`, `friend_id`), CONSTRAINT `user_id_fk` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE, CONSTRAINT `friend_id_fk` FOREIGN KEY (`friend_id`) REFERENCES `users` (`id`) ON DELETE CASCADE); INSERT INTO `users` (`id`) VALUES (1), (2); INSERT INTO `friendships` (`user_id`, `friend_id`) VALUES (1,2), (2,1); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx/20220925094437_third.sql000066400000000000000000000010421431455511600265470ustar00rootroot00000000000000-- disable the enforcement of foreign-keys constraints PRAGMA foreign_keys = off; -- create "new_users" table CREATE TABLE `new_users` (`id` integer NOT NULL, PRIMARY KEY (`id`)); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`id`) SELECT `id` FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; -- enable back the enforcement of foreign-keys constraints PRAGMA foreign_keys = on; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx/atlas.sum000066400000000000000000000004161431455511600250720ustar00rootroot00000000000000h1:09lkmQGTxdoqPwK0ZXtU4+KHipufkVp1Jlje3t6Opy4= 20220925092817_initial.sql h1:ZGeLdeqNUMXqJm+hPkhBrhzbtUzSBH8yVTsnSnJo/qU= 20220925094021_second.sql h1:vcoquz3yk+TlTPiQgW5hHpS/abIvySCM/bzgwYTDoqY= 20220925094437_third.sql h1:2pbBiUBKsEC5+ppfPPTDr+iwJSgZ2rM4qmHI44/vmnc= atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx_2/000077500000000000000000000000001431455511600234605ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx_2/20220925092817_initial.sql000066400000000000000000000001541431455511600273120ustar00rootroot00000000000000-- create "users" table CREATE TABLE `users` (`id` integer NOT NULL, `name` text NULL, PRIMARY KEY (`id`)); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx_2/20220925094021_second.sql000066400000000000000000000007161431455511600271250ustar00rootroot00000000000000-- create "friendships" table CREATE TABLE `friendships` (`user_id` integer NOT NULL, `friend_id` integer NOT NULL, PRIMARY KEY (`user_id`, `friend_id`), CONSTRAINT `user_id_fk` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE, CONSTRAINT `friend_id_fk` FOREIGN KEY (`friend_id`) REFERENCES `users` (`id`) ON DELETE CASCADE); INSERT INTO `users` (`id`) VALUES (1), (2); INSERT INTO `friendships` (`user_id`, `friend_id`) VALUES (1,2), (2,1); atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx_2/20220925094437_third.sql000066400000000000000000000011711431455511600267730ustar00rootroot00000000000000-- disable the enforcement of foreign-keys constraints PRAGMA foreign_keys = off; -- create "new_users" table CREATE TABLE `new_users` (`id` integer NOT NULL, PRIMARY KEY (`id`)); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`id`) SELECT `id` FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; -- insert faulty data INSERT INTO `friendships` (`user_id`, `friend_id`) VALUES (3,2); -- enable back the enforcement of foreign-keys constraints PRAGMA foreign_keys = on; atlas-0.7.2/cmd/atlas/internal/cmdapi/testdata/sqlitetx_2/atlas.sum000066400000000000000000000004161431455511600253130ustar00rootroot00000000000000h1:eH+7c2mVWbrhky00/3SKYklyHLyz+QzDk1UZJ9+ZJsg= 20220925092817_initial.sql h1:ZGeLdeqNUMXqJm+hPkhBrhzbtUzSBH8yVTsnSnJo/qU= 20220925094021_second.sql h1:vcoquz3yk+TlTPiQgW5hHpS/abIvySCM/bzgwYTDoqY= 20220925094437_third.sql h1:58glD96PVBa0fSu8x/3Gbwbf7N6WjAcVl4jh/XcNXoM= atlas-0.7.2/cmd/atlas/internal/docker/000077500000000000000000000000001431455511600175635ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/docker/docker.go000066400000000000000000000175151431455511600213720ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package docker import ( "bytes" "context" "errors" "fmt" "io" "io/ioutil" "log" "net" "net/url" "os" "os/exec" "strconv" "strings" "time" "ariga.io/atlas/sql/sqlclient" "github.com/go-sql-driver/mysql" ) const pass = "pass" type ( // Config is used to configure container creation. Config struct { setup []string // contains statements to execute once the service is up // Image is the name of the image to pull and run. Image string // Env vars to pass to the docker container. Env []string // Internal Port to expose anc connect to. Port string // Out is a custom writer to send docker cli output to. Out io.Writer } // A Container is an instance of a created container. Container struct { cfg Config // Config used to create this container out io.Writer // custom write to log status messages to // ID of the container. ID string // Passphrase of the root user. Passphrase string // Port on the host this containers service is bound to. Port string } // ConfigOption allows configuring Config with functional arguments. ConfigOption func(*Config) error ) // NewConfig returns a new config with the given options applied. func NewConfig(opts ...ConfigOption) (*Config, error) { c := &Config{Out: ioutil.Discard} for _, opt := range opts { if err := opt(c); err != nil { return nil, err } } return c, nil } // MySQL returns a new Config for a MySQL image. func MySQL(version string, opts ...ConfigOption) (*Config, error) { return NewConfig( append( []ConfigOption{ Image("mysql:" + version), Port("3306"), Env("MYSQL_ROOT_PASSWORD=" + pass), }, opts..., )..., ) } // MariaDB returns a new Config for a MariaDB image. func MariaDB(version string, opts ...ConfigOption) (*Config, error) { return MySQL(version, append([]ConfigOption{Image("mariadb:" + version)}, opts...)...) } // PostgreSQL returns a new Config for a PostgreSQL image. func PostgreSQL(version string, opts ...ConfigOption) (*Config, error) { return NewConfig( append( []ConfigOption{ Image("postgres:" + version), Port("5432"), Env("POSTGRES_PASSWORD=" + pass), }, opts..., )..., ) } // Image sets the docker image to use. For example: // // Image("mysql") // Image("postgres:13") func Image(i string) ConfigOption { return func(c *Config) error { c.Image = strings.TrimSuffix(i, ":") return nil } } // Port sets the port the container services exposes. For example: // // Port("3306") // Port("5432") func Port(p string) ConfigOption { return func(c *Config) error { c.Port = p return nil } } // Env sets the environment variables to pass to the container. For example: // // Config(Image("mysql"), Env("MYSQL_ROOT_PASSWORD=password")) // Config(Image("postgres"), Env("MYSQL_ROOT_PASSWORD=password")) func Env(env ...string) ConfigOption { return func(c *Config) error { c.Env = env return nil } } // Out sets an io.Writer to use when running docker commands. For example: // // buf := new(bytes.Buffer) // NewConfig(Out(buf)) func Out(w io.Writer) ConfigOption { return func(c *Config) error { c.Out = w return nil } } // setup adds statements to execute once the service is ready. For example: // // setup("DROP SCHEMA IF EXISTS public CASCADE;") func setup(s ...string) ConfigOption { return func(c *Config) error { c.setup = s return nil } } // Run pulls and starts a new docker container from the Config. func (c *Config) Run(ctx context.Context) (*Container, error) { // Make sure the configuration is not missing critical values. if err := c.validate(); err != nil { return nil, err } // Get a free host TCP port the container can bind its exposed service port on. p, err := freePort() if err != nil { return nil, fmt.Errorf("getting open port: %w", err) } // Make sure the image is up-to-date. cmd := exec.CommandContext(ctx, "docker", "pull", c.Image) //nolint:gosec cmd.Stdout = c.Out if err := cmd.Run(); err != nil { return nil, fmt.Errorf("pulling image: %w", err) } // Run the container. args := []string{"docker", "run", "--rm", "--detach"} for _, e := range c.Env { args = append(args, "-e", e) } args = append(args, "-p", fmt.Sprintf("%s:%s", p, c.Port), c.Image) cmd = exec.CommandContext(ctx, args[0], args[1:]...) //nolint:gosec out := &bytes.Buffer{} cmd.Stdout = io.MultiWriter(c.Out, out) if err := cmd.Run(); err != nil { return nil, err } return &Container{ cfg: *c, ID: strings.TrimSpace(out.String()), Passphrase: pass, Port: p, out: c.Out, }, nil } // Close stops and removes this container. func (c *Container) Close() error { return exec.Command("docker", "stop", c.ID).Run() //nolint:gosec } // Wait waits for this container to be ready. func (c *Container) Wait(ctx context.Context, timeout time.Duration) error { fmt.Fprintln(c.out, "Waiting for service to be ready ... ") mysql.SetLogger(log.New(io.Discard, "", 1)) defer mysql.SetLogger(log.New(os.Stderr, "[mysql] ", log.Ldate|log.Ltime|log.Lshortfile)) if timeout > time.Minute { timeout = time.Minute } var ( done = time.After(timeout) u, err = c.URL() ) if err != nil { return err } for { select { case <-time.After(100 * time.Millisecond): client, err := sqlclient.Open(ctx, u) if err != nil { continue } db := client.DB if err := db.PingContext(ctx); err != nil { continue } for _, s := range c.cfg.setup { if _, err := db.ExecContext(ctx, s); err != nil { _ = db.Close() return fmt.Errorf("%q: %w", s, err) } } _ = db.Close() fmt.Fprintln(c.out, "Service is ready to connect!") return nil case <-ctx.Done(): return ctx.Err() case <-done: return errors.New("timeout") } } } // URL returns a URL to connect to the Container. func (c *Container) URL() (string, error) { switch img := strings.SplitN(c.cfg.Image, ":", 2)[0]; img { case "postgres": return fmt.Sprintf("postgres://postgres:%s@localhost:%s/postgres?sslmode=disable", c.Passphrase, c.Port), nil case "mysql", "mariadb": return fmt.Sprintf("%s://root:%s@localhost:%s/", img, c.Passphrase, c.Port), nil default: return "", fmt.Errorf("unknown container image: %q", img) } } // validate that no empty values are given. func (c *Config) validate() error { if c == nil || c.Image == "" || c.Port == "" || c.Out == nil { return fmt.Errorf("invalid configuration %q", c) } return nil } func freePort() (string, error) { a, err := net.ResolveTCPAddr("tcp", ":0") if err != nil { return "", err } l, err := net.ListenTCP("tcp", a) if err != nil { return "", err } if err := l.Close(); err != nil { return "", err } return strconv.Itoa(l.Addr().(*net.TCPAddr).Port), nil } func init() { sqlclient.Register("docker", sqlclient.OpenerFunc(client)) } func client(ctx context.Context, u *url.URL) (client *sqlclient.Client, err error) { var cfg *Config switch img, tag := u.Host, strings.TrimPrefix(u.Path, "/"); img { case "mysql": cfg, err = MySQL(tag) case "mariadb": cfg, err = MariaDB(tag) case "postgres": cfg, err = PostgreSQL(tag) default: return nil, fmt.Errorf("unsupported docker image %q", img) } if err != nil { return nil, err } if u.Query().Has("v") || u.Query().Has("verbose") { if err := Out(os.Stdout)(cfg); err != nil { return nil, err } } c, err := cfg.Run(ctx) if err != nil { return nil, err } defer func() { if err != nil { if cerr := c.Close(); err != nil { err = fmt.Errorf("%w: %v", err, cerr) } } }() if err = c.Wait(ctx, time.Minute); err != nil { return nil, err } u1, err := c.URL() if err != nil { return nil, err } if client, err = sqlclient.Open(ctx, u1); err != nil { return nil, err } client.AddClosers(c) return client, nil } atlas-0.7.2/cmd/atlas/internal/docker/docker_test.go000066400000000000000000000021631431455511600224220ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package docker import ( "context" "io" "testing" "github.com/stretchr/testify/require" ) func TestDockerConfig(t *testing.T) { ctx := context.Background() // invalid config _, err := (&Config{}).Run(ctx) require.Error(t, err) // MySQL cfg, err := MySQL("latest", Out(io.Discard)) require.NoError(t, err) require.Equal(t, &Config{ Image: "mysql:latest", Env: []string{"MYSQL_ROOT_PASSWORD=pass"}, Port: "3306", Out: io.Discard, }, cfg) // MariaDB cfg, err = MariaDB("latest", Out(io.Discard)) require.NoError(t, err) require.Equal(t, &Config{ Image: "mariadb:latest", Env: []string{"MYSQL_ROOT_PASSWORD=pass"}, Port: "3306", Out: io.Discard, }, cfg) // PostgreSQL cfg, err = PostgreSQL("latest", Out(io.Discard)) require.NoError(t, err) require.Equal(t, &Config{ Image: "postgres:latest", Env: []string{"POSTGRES_PASSWORD=pass"}, Port: "5432", Out: io.Discard, }, cfg) } atlas-0.7.2/cmd/atlas/internal/lint/000077500000000000000000000000001431455511600172625ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/lint/lint.go000066400000000000000000000220301431455511600205540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package lint import ( "context" "errors" "fmt" "os/exec" "path/filepath" "strings" "ariga.io/atlas/cmd/atlas/internal/sqlparse" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlclient" ) type ( // A ChangeDetector takes a migration directory and splits it into the "base" files (already merged) and new ones. ChangeDetector interface { // DetectChanges splits the files of a migration directory into the "base" files (already merged) and new ones. DetectChanges(context.Context) ([]migrate.File, []migrate.File, error) } // A ChangeLoader takes a set of migration files and will create multiple schema.Changes out of it. ChangeLoader interface { // LoadChanges converts each of the given migration files into one Changes. LoadChanges(context.Context, []migrate.File) (*Changes, error) } // Changes holds schema changes information returned by the loader. Changes struct { From, To *schema.Realm // Current and desired schema. Files []*sqlcheck.File // Files for moving from current to desired state. } ) type ( // GitChangeDetector implements the ChangeDetector interface by utilizing a git repository. GitChangeDetector struct { work string // path to the git working directory (i.e. -C) base string // name of the base branch (e.g. master) path string // path of the migration directory relative to the repository root (in slash notation) dir migrate.Dir // the migration directory to load migration files from } // GitChangeDetectorOption allows configuring GitChangeDetector with functional arguments. GitChangeDetectorOption func(*GitChangeDetector) error ) // NewGitChangeDetector configures a new GitChangeDetector. func NewGitChangeDetector(dir migrate.Dir, opts ...GitChangeDetectorOption) (*GitChangeDetector, error) { if dir == nil { return nil, errors.New("internal/ci: dir cannot be nil") } d := &GitChangeDetector{dir: dir} for _, opt := range opts { if err := opt(d); err != nil { return nil, err } } if d.base == "" { d.base = "master" } if d.path == "" { d.path = "migrations" } return d, nil } // WithWorkDir configures the git working directory for a GitChangeDetector. func WithWorkDir(work string) GitChangeDetectorOption { return func(d *GitChangeDetector) error { d.work = work return nil } } // WithBase configures the git base branch name for a GitChangeDetector. func WithBase(base string) GitChangeDetectorOption { return func(d *GitChangeDetector) error { d.base = base return nil } } // WithMigrationsPath configures the path for the migration directory. func WithMigrationsPath(path string) GitChangeDetectorOption { return func(d *GitChangeDetector) error { d.path = filepath.ToSlash(path) return nil } } // DetectChanges implements the ChangeDetector interface. func (d *GitChangeDetector) DetectChanges(ctx context.Context) ([]migrate.File, []migrate.File, error) { if _, err := exec.LookPath("git"); err != nil { return nil, nil, fmt.Errorf("lookup git: %w", err) } var args []string if d.work != "" { args = append(args, "-C", d.work) } args = append(args, "--no-pager", "diff", "--name-only", "--diff-filter=A", d.base, "HEAD", d.path) buf, err := exec.CommandContext(ctx, "git", args...).CombinedOutput() if err != nil { return nil, nil, fmt.Errorf("git diff: %w", err) } diff := strings.Split(string(buf), "\n") names := make(map[string]struct{}, len(diff)) for i := range diff { names[filepath.Base(diff[i])] = struct{}{} } files, err := d.dir.Files() if err != nil { return nil, nil, fmt.Errorf("reading migration directory: %w", err) } // Iterate over the migration files. If we find a file, that has been added in the diff between base and head, // every migration file preceding it can be considered old, the file itself and everything thereafter new, // since Atlas assumes a linear migration history. for i, f := range files { if _, ok := names[f.Name()]; ok { return files[:i], files[i:], nil } } return files, nil, nil } var _ ChangeDetector = (*GitChangeDetector)(nil) // latestChange implements the ChangeDetector by selecting the latest N files. type latestChange struct { n int // number of (latest) files considered new. dir migrate.Dir // migration directory to load migration files from. } // LatestChanges implements the ChangeDetector interface by selecting the latest N files as new. // It is useful for executing analysis on files in development before they are committed or on // all files in a directory. func LatestChanges(dir migrate.Dir, n int) ChangeDetector { return &latestChange{n: n, dir: dir} } // DetectChanges implements the ChangeDetector interface. func (d *latestChange) DetectChanges(context.Context) ([]migrate.File, []migrate.File, error) { files, err := d.dir.Files() if err != nil { return nil, nil, fmt.Errorf("internal/ci: reading migration directory: %w", err) } // In case n is -1 or greater than the // number of files, return all files. if len(files) <= d.n || d.n < 0 { return nil, files, nil } return files[:len(files)-d.n], files[len(files)-d.n:], nil } // DevLoader implements the ChangesLoader interface using a dev-driver. type DevLoader struct { // Dev environment used as a sandbox instantiated to the starting point (e.g. base branch). Dev *sqlclient.Client } // LoadChanges implements the ChangesLoader interface. func (d *DevLoader) LoadChanges(ctx context.Context, base, files []migrate.File) (diff *Changes, err error) { unlock, err := d.lock(ctx) if err != nil { return nil, err } defer unlock() // Clean up after ourselves. snap, ok := d.Dev.Driver.(migrate.Snapshoter) if !ok { return nil, errors.New("driver does not implement migrate.Snapshoter") } restore, err := snap.Snapshot(ctx) if err != nil { return nil, fmt.Errorf("taking database snapshot: %w", err) } defer func() { if err2 := restore(ctx); err2 != nil { if err != nil { err2 = fmt.Errorf("%w: %v", err, err2) } err = err2 } }() // Bring the dev environment to the base point. for _, f := range base { stmt, err := f.Stmts() if err != nil { return nil, &FileError{File: f.Name(), Err: fmt.Errorf("scanning statements: %w", err)} } for _, s := range stmt { if _, err := d.Dev.ExecContext(ctx, s); err != nil { return nil, &FileError{File: f.Name(), Err: fmt.Errorf("executing statement: %q: %w", s, err)} } } } current, err := d.inspect(ctx) if err != nil { return nil, err } diff = &Changes{ From: current, Files: make([]*sqlcheck.File, len(files)), } for i, f := range files { diff.Files[i] = &sqlcheck.File{ File: f, Parser: sqlparse.ParserFor(d.Dev.Name), } stmts, err := f.StmtDecls() if err != nil { return nil, &FileError{File: f.Name(), Err: fmt.Errorf("scanning statements: %w", err)} } start := current for _, s := range stmts { if _, err := d.Dev.ExecContext(ctx, s.Text); err != nil { return nil, &FileError{File: f.Name(), Err: fmt.Errorf("executing statement: %w", err)} } target, err := d.inspect(ctx) if err != nil { return nil, err } changes, err := d.Dev.RealmDiff(current, target) if err != nil { return nil, err } current = target diff.Files[i].Changes = append(diff.Files[i].Changes, &sqlcheck.Change{ Stmt: s, Changes: d.mayFix(s.Text, changes), }) } if diff.Files[i].Sum, err = d.Dev.RealmDiff(start, current); err != nil { return nil, err } } diff.To = current return diff, nil } // mayFix uses the sqlparse package for fixing or attaching more info to the changes. func (d *DevLoader) mayFix(stmt string, changes schema.Changes) schema.Changes { p := sqlparse.ParserFor(d.Dev.Name) if p == nil { return changes } if fixed, err := p.FixChange(d.Dev.Driver, stmt, changes); err == nil { return fixed } return changes } // inspect the realm and filter by schema if we are connected to one. func (d *DevLoader) inspect(ctx context.Context) (*schema.Realm, error) { opts := &schema.InspectRealmOption{} if d.Dev.URL.Schema != "" { opts.Schemas = append(opts.Schemas, d.Dev.URL.Schema) } return d.Dev.InspectRealm(ctx, opts) } // lock database so no one else interferes with our change detection. func (d *DevLoader) lock(ctx context.Context) (schema.UnlockFunc, error) { l, ok := d.Dev.Driver.(schema.Locker) if !ok { return nil, errors.New("driver does not support locking") } name := "atlas_lint" // In case the client is connected to specific schema, // minimize the lock resolution to the schema name. if s := d.Dev.URL.Schema; s != "" { name = fmt.Sprintf("%s_%s", name, s) } unlock, err := l.Lock(ctx, name, 0) if err != nil { return nil, fmt.Errorf("acquiring database lock: %w", err) } return unlock, nil } // FileError represents an error that occurred while processing a file. type FileError struct { File string Err error } func (e FileError) Error() string { return e.Err.Error() } atlas-0.7.2/cmd/atlas/internal/lint/lint_test.go000066400000000000000000000171271431455511600216260ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package lint_test import ( "context" "io/fs" "os" "os/exec" "path/filepath" "strconv" "strings" "testing" "time" "ariga.io/atlas/cmd/atlas/internal/lint" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" _ "ariga.io/atlas/sql/sqlite" _ "github.com/mattn/go-sqlite3" "github.com/stretchr/testify/require" ) func TestGitChangeDetector(t *testing.T) { // Prepare environment. root := filepath.Join(t.TempDir(), t.Name(), strconv.FormatInt(time.Now().Unix(), 10)) mdir := filepath.Join(root, "migrations") require.NoError(t, os.MkdirAll(mdir, 0755)) git := func(args ...string) { out, err := exec.Command("git", append([]string{"-C", root}, args...)...).CombinedOutput() require.NoError(t, err, string(out)) } git("init") // Config a fake Git user for the working directory. git("config", "user.name", "a8m") git("config", "user.email", "a8m@atlasgo.io") require.NoError(t, os.WriteFile(filepath.Join(mdir, "1_applied.sql"), []byte("1_applied.sql"), 0644)) require.NoError(t, os.WriteFile(filepath.Join(mdir, "2_applied.sql"), []byte("2_applied.sql"), 0644)) git("add", ".") git("commit", "-m", "applied migrations") git("checkout", "-b", "feature") require.NoError(t, os.WriteFile(filepath.Join(mdir, "3_new.sql"), []byte("3_new.sql"), 0644)) require.NoError(t, os.WriteFile(filepath.Join(mdir, "4_new.sql"), []byte("4_new.sql"), 0644)) git("add", ".") git("commit", "-am", "new migrations") // Test change detector. dir, err := migrate.NewLocalDir(mdir) require.NoError(t, err) cs, err := lint.NewGitChangeDetector(dir, lint.WithWorkDir(root)) require.NoError(t, err) base, feat, err := cs.DetectChanges(context.Background()) require.NoError(t, err) require.Len(t, base, 2) require.Len(t, feat, 2) require.Equal(t, "1_applied.sql", base[0].Name()) require.Equal(t, "2_applied.sql", base[1].Name()) require.Equal(t, "3_new.sql", feat[0].Name()) require.Equal(t, "4_new.sql", feat[1].Name()) require.NoError(t, os.WriteFile(filepath.Join(mdir, "5_new.sql"), []byte("5_new.sql"), 0644)) require.NoError(t, os.WriteFile(filepath.Join(mdir, "6_new.sql"), []byte("6_new.sql"), 0644)) git("checkout", "-b", "feature-1") git("add", ".") git("commit", "-am", "new migrations") base, feat, err = cs.DetectChanges(context.Background()) require.NoError(t, err) require.Len(t, base, 2) require.Len(t, feat, 4) require.Equal(t, "5_new.sql", feat[2].Name()) require.Equal(t, "6_new.sql", feat[3].Name()) // Compare feature and feature-1. cs, err = lint.NewGitChangeDetector(dir, lint.WithWorkDir(root), lint.WithBase("feature")) require.NoError(t, err) base, feat, err = cs.DetectChanges(context.Background()) require.NoError(t, err) require.Len(t, base, 4) require.Len(t, feat, 2) require.Equal(t, "1_applied.sql", base[0].Name()) require.Equal(t, "2_applied.sql", base[1].Name()) require.Equal(t, "3_new.sql", base[2].Name()) require.Equal(t, "4_new.sql", base[3].Name()) require.Equal(t, "5_new.sql", feat[0].Name()) require.Equal(t, "6_new.sql", feat[1].Name()) } func TestLatestChanges(t *testing.T) { files := []migrate.File{ testFile{name: "1.sql", content: "CREATE TABLE t1 (id INT)"}, testFile{name: "2.sql", content: "CREATE TABLE t2 (id INT)\nDROP TABLE users"}, } base, feat, err := lint.LatestChanges(testDir{files: files}, 0).DetectChanges(context.Background()) require.NoError(t, err) require.Equal(t, files, base) require.Empty(t, feat) base, feat, err = lint.LatestChanges(testDir{files: files}, 2).DetectChanges(context.Background()) require.NoError(t, err) require.Empty(t, base) require.Equal(t, files, feat) base, feat, err = lint.LatestChanges(testDir{files: files}, -1).DetectChanges(context.Background()) require.NoError(t, err) require.Empty(t, base) require.Equal(t, files, feat) base, feat, err = lint.LatestChanges(testDir{files: files}, 1).DetectChanges(context.Background()) require.NoError(t, err) require.Equal(t, files[:1], base) require.Equal(t, files[1:], feat) } func TestDevLoader_LoadChanges(t *testing.T) { ctx := context.Background() c, err := sqlclient.Open(ctx, "sqlite://ci?mode=memory&cache=shared&_fk=1") require.NoError(t, err) defer c.Close() l := &lint.DevLoader{Dev: c} diff, err := l.LoadChanges(ctx, nil, nil) require.NoError(t, err) require.Empty(t, diff.Files) base := []migrate.File{ testFile{name: "base.sql", content: "CREATE TABLE users (id INT)"}, } files := []migrate.File{ testFile{name: "1.sql", content: "CREATE TABLE t1 (id INT)\nINSERT INTO t1 (id) VALUES (1)"}, testFile{name: "2.sql", content: "CREATE TABLE t2 (id INT)\nDROP TABLE users"}, testFile{name: "3.sql", content: "CREATE TABLE t3 (id INT)\nDROP TABLE t3"}, testFile{name: "4.sql", content: "ALTER TABLE t2 RENAME id TO oid"}, } diff, err = l.LoadChanges(ctx, base, files) require.NoError(t, err) require.Len(t, diff.Files, 4) // File 1. require.Equal(t, files[0], diff.Files[0].File) require.Len(t, diff.Files[0].Changes, 2) require.Zero(t, diff.Files[0].Changes[0].Stmt.Pos) require.Equal(t, "CREATE TABLE t1 (id INT)", diff.Files[0].Changes[0].Stmt.Text) require.IsType(t, (*schema.AddTable)(nil), diff.Files[0].Changes[0].Changes[0]) require.Equal(t, "INSERT INTO t1 (id) VALUES (1)", diff.Files[0].Changes[1].Stmt.Text) require.Empty(t, diff.Files[0].Changes[1].Changes) // File 2. require.Equal(t, files[1], diff.Files[1].File) require.Len(t, diff.Files[1].Changes, 2) require.Zero(t, diff.Files[1].Changes[0].Stmt.Pos) require.Equal(t, "CREATE TABLE t2 (id INT)", diff.Files[1].Changes[0].Stmt.Text) require.IsType(t, (*schema.AddTable)(nil), diff.Files[1].Changes[0].Changes[0]) require.Zero(t, diff.Files[1].Changes[0].Stmt.Pos) require.Equal(t, "DROP TABLE users", diff.Files[1].Changes[1].Stmt.Text) require.IsType(t, (*schema.DropTable)(nil), diff.Files[1].Changes[1].Changes[0]) // File 3. require.Equal(t, files[2], diff.Files[2].File) require.IsType(t, (*schema.AddTable)(nil), diff.Files[2].Changes[0].Changes[0]) require.IsType(t, (*schema.DropTable)(nil), diff.Files[2].Changes[1].Changes[0]) require.Empty(t, diff.Files[2].Sum) // File 3. require.Equal(t, files[3], diff.Files[3].File) require.IsType(t, (*schema.ModifyTable)(nil), diff.Files[3].Changes[0].Changes[0]) require.IsType(t, (*schema.RenameColumn)(nil), diff.Files[3].Changes[0].Changes[0].(*schema.ModifyTable).Changes[0]) // Changes. changes, err := c.RealmDiff(diff.From, diff.To) require.NoError(t, err) require.Len(t, changes, 3) err = c.ApplyChanges(ctx, []schema.Change{ &schema.AddTable{ T: schema.NewTable("users").AddColumns(schema.NewIntColumn("id", "int")), }, }) require.NoError(t, err) _, err = l.LoadChanges(ctx, base, files) require.ErrorAs(t, err, &migrate.NotCleanError{}) } type testDir struct { migrate.Dir files []migrate.File } func (t testDir) Path() string { return "migrations" } func (t testDir) Open(string) (fs.File, error) { return nil, fs.ErrNotExist } func (t testDir) Files() ([]migrate.File, error) { return t.files, nil } type testFile struct { migrate.File name, content string } func (f testFile) Name() string { return f.name } func (f testFile) Bytes() []byte { return []byte(f.content) } func (f testFile) Stmts() ([]string, error) { return strings.Split(string(f.Bytes()), "\n"), nil } func (f testFile) StmtDecls() (stmts []*migrate.Stmt, err error) { s, err := f.Stmts() if err != nil { return nil, err } for _, s := range s { stmts = append(stmts, &migrate.Stmt{Text: s}) } return } atlas-0.7.2/cmd/atlas/internal/lint/run.go000066400000000000000000000237531431455511600204270ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package lint import ( "context" "encoding/json" "errors" "fmt" "io" "strings" "text/template" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlclient" "golang.org/x/exp/slices" ) // Runner is used to execute CI jobs. type Runner struct { // DevClient configures the "dev driver" to calculate // migration changes by the driver. Dev *sqlclient.Client // RunChangeDetector configures the ChangeDetector to // be used by the runner. ChangeDetector ChangeDetector // Dir is used for scanning and validating the migration directory. Dir migrate.Dir // Analyzers defines the analysis to be run in the CI job. Analyzers []sqlcheck.Analyzer // ReportWriter writes the summary report. ReportWriter ReportWriter // summary report. reset on each run. sum *SummaryReport } // Run executes the CI job. func (r *Runner) Run(ctx context.Context) error { switch err := r.summary(ctx); err.(type) { case nil: if err := r.ReportWriter.WriteReport(r.sum); err != nil { return err } // If any of the analyzers returns // an error, fail silently. for _, f := range r.sum.Files { if f.Error != "" { return SilentError{} } } return nil case *FileError: if err := r.ReportWriter.WriteReport(r.sum); err != nil { return err } return SilentError{error: err} default: return err } } const ( stepIntegrityCheck = "Migration Integrity Check" stepDetectChanges = "Detect New Migration Files" stepLoadChanges = "Replay Migration Files" stepAnalyzeFile = "Analyze %s" ) func (r *Runner) summary(ctx context.Context) error { r.sum = NewSummaryReport(r.Dev, r.Dir) // Integrity check. switch err := migrate.Validate(r.Dir); { case errors.Is(err, migrate.ErrChecksumNotFound): case err != nil: err := &FileError{File: migrate.HashFileName, Err: err} r.sum.Files = append(r.sum.Files, &FileReport{Name: migrate.HashFileName, Error: err.Error()}) return r.sum.StepError(stepIntegrityCheck, fmt.Sprintf("File %s is invalid", migrate.HashFileName), err) default: r.sum.StepResult(stepIntegrityCheck, fmt.Sprintf("File %s is valid", migrate.HashFileName), nil) } // Detect new migration files. base, feat, err := r.ChangeDetector.DetectChanges(ctx) if err != nil { return r.sum.StepError(stepDetectChanges, "Failed find new migration files", err) } r.sum.StepResult(stepDetectChanges, fmt.Sprintf("Found %d new migration files (from %d total)", len(feat), len(base)+len(feat)), nil) // Load files into changes. l := &DevLoader{Dev: r.Dev} diff, err := l.LoadChanges(ctx, base, feat) if err != nil { if fr := (&FileError{}); errors.As(err, &fr) { r.sum.Files = append(r.sum.Files, &FileReport{Name: fr.File, Error: err.Error()}) } return r.sum.StepError(stepLoadChanges, "Failed loading changes on dev database", err) } r.sum.StepResult(stepLoadChanges, fmt.Sprintf("Loaded %d changes on dev database", len(diff.Files)), nil) r.sum.WriteSchema(r.Dev, diff) // Analyze files. for _, f := range diff.Files { var ( es []string nl = nolintRules(f) fr = NewFileReport(f) ) for _, az := range r.Analyzers { if err := az.Analyze(ctx, &sqlcheck.Pass{ File: f, Dev: r.Dev, Reporter: nl.reporterFor(fr, az), }); err != nil && !nl.skipped { es = append(es, err.Error()) } } fr.Error = strings.Join(es, "; ") r.sum.Files = append(r.sum.Files, fr) r.sum.StepResult( fmt.Sprintf(stepAnalyzeFile, f.Name()), fmt.Sprintf("%d reports were found in analysis", len(fr.Reports)), fr, ) } return nil } var ( // TemplateFuncs are global functions available in templates. TemplateFuncs = template.FuncMap{ "json": func(v any) (string, error) { b, err := json.Marshal(v) return string(b), err }, } // DefaultTemplate is the default template used by the CI job. DefaultTemplate = template.Must(template.New("report"). Funcs(TemplateFuncs). Parse(` {{- range $f := .Files }} {{- /* If there is an error but not diagnostics, print it. */}} {{- if and $f.Error (not $f.Reports) }} {{- printf "%s: %s\n" $f.Name $f.Error }} {{- else }} {{- range $r := $f.Reports }} {{- if $r.Text }} {{- printf "%s: %s:\n\n" $f.Name $r.Text }} {{- else if $r.Diagnostics }} {{- printf "Unnamed diagnostics for file %s:\n\n" $f.Name }} {{- end }} {{- range $d := $r.Diagnostics }} {{- printf "\tL%d: %s\n" ($f.Line $d.Pos) $d.Text }} {{- end }} {{- if $r.Diagnostics }} {{- print "\n" }} {{- end }} {{- end }} {{- end }} {{- end -}} `)) ) type ( // A SummaryReport contains a summary of the analysis of all files. // It is used as an input to templates to report the CI results. SummaryReport struct { // Env holds the environment information. Env struct { Driver string `json:"Driver,omitempty"` // Driver name. URL *sqlclient.URL `json:"URL,omitempty"` // URL to dev database. Dir string `json:"Dir,omitempty"` // Path to migration directory. } // Steps of the analysis. Added in verbose mode. Steps []struct { Name string `json:"Name,omitempty"` // Step name. Text string `json:"Text,omitempty"` // Step description. Error string `json:"Error,omitempty"` // Error that cause the execution to halt. Result any `json:"Result,omitempty"` // Result of the step. For example, a diagnostic. } // Schema versions found by the runner. Schema struct { Current string `json:"Current,omitempty"` // Current schema. Desired string `json:"Desired,omitempty"` // Desired schema. } // Files reports. Non-empty in case there are findings. Files []*FileReport `json:"Files,omitempty"` } // FileReport contains a summary of the analysis of a single file. FileReport struct { Name string `json:"Name,omitempty"` // Name of the file. Text string `json:"Text,omitempty"` // Contents of the file. Reports []sqlcheck.Report `json:"Reports,omitempty"` // List of reports. Error string `json:"Error,omitempty"` // File specific error. } // ReportWriter is a type of report writer that writes a summary of analysis reports. ReportWriter interface { WriteReport(*SummaryReport) error } // A TemplateWriter is a type of writer that writes output according to a template. TemplateWriter struct { T *template.Template W io.Writer } // SilentError is returned in case the wrapped error is already // printed by the runner and should not be printed by its caller SilentError struct{ error } ) // NewSummaryReport returns a new SummaryReport. func NewSummaryReport(c *sqlclient.Client, dir migrate.Dir) *SummaryReport { sum := &SummaryReport{ Env: struct { Driver string `json:"Driver,omitempty"` URL *sqlclient.URL `json:"URL,omitempty"` Dir string `json:"Dir,omitempty"` }{ Driver: c.Name, URL: c.URL, }, Files: make([]*FileReport, 0), } if p, ok := dir.(interface{ Path() string }); ok { sum.Env.Dir = p.Path() } return sum } // StepResult appends step result to the summary. func (f *SummaryReport) StepResult(name, text string, result any) { f.Steps = append(f.Steps, struct { Name string `json:"Name,omitempty"` Text string `json:"Text,omitempty"` Error string `json:"Error,omitempty"` Result any `json:"Result,omitempty"` }{ Name: name, Text: text, Result: result, }) } // StepError appends step error to the summary. func (f *SummaryReport) StepError(name, text string, err error) error { f.Steps = append(f.Steps, struct { Name string `json:"Name,omitempty"` Text string `json:"Text,omitempty"` Error string `json:"Error,omitempty"` Result any `json:"Result,omitempty"` }{ Name: name, Text: text, Error: err.Error(), }) return err } // WriteSchema writes the current and desired schema to the summary. func (f *SummaryReport) WriteSchema(c *sqlclient.Client, diff *Changes) { if curr, err := c.MarshalSpec(diff.From); err == nil { f.Schema.Current = string(curr) } if desired, err := c.MarshalSpec(diff.To); err == nil { f.Schema.Desired = string(desired) } } // NewFileReport returns a new FileReport. func NewFileReport(f migrate.File) *FileReport { return &FileReport{Name: f.Name(), Text: string(f.Bytes())} } // Line returns the line number from a position. func (f *FileReport) Line(pos int) int { return strings.Count(f.Text[:pos], "\n") + 1 } // WriteReport implements sqlcheck.ReportWriter. func (f *FileReport) WriteReport(r sqlcheck.Report) { f.Reports = append(f.Reports, r) } // WriteReport implements ReportWriter. func (w *TemplateWriter) WriteReport(r *SummaryReport) error { return w.T.Execute(w.W, r) } func nolintRules(f *sqlcheck.File) *skipRules { s := &skipRules{pos2rules: make(map[int][]string)} for _, c := range f.Changes { for _, d := range c.Stmt.Directive("nolint") { s.pos2rules[c.Stmt.Pos] = append(s.pos2rules[c.Stmt.Pos], strings.Split(d, " ")...) } } return s } type skipRules struct { pos2rules map[int][]string // statement positions to rules skipped bool // last one skipped } func (s *skipRules) reporterFor(rw sqlcheck.ReportWriter, az sqlcheck.Analyzer) sqlcheck.ReportWriter { return sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { var ( ds = make([]sqlcheck.Diagnostic, 0, len(r.Diagnostics)) az, ok = az.(sqlcheck.NamedAnalyzer) ) for _, d := range r.Diagnostics { switch rules := s.pos2rules[d.Pos]; { case // A directive without specific classes/codes // (e.g. atlas:nolint) ignore all diagnostics. len(rules) == 1 && rules[0] == "", // Match a specific code/diagnostic. e.g. atlas:nolint DS101. slices.Contains(rules, d.Code), // Skip the entire analyzer (class of changes). ok && slices.Contains(rules, az.Name()): default: ds = append(ds, d) } } if s.skipped = len(ds) == 0; !s.skipped { rw.WriteReport(sqlcheck.Report{Text: r.Text, Diagnostics: ds}) } }) } atlas-0.7.2/cmd/atlas/internal/lint/run_test.go000066400000000000000000000070311431455511600214550ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package lint_test import ( "bytes" "context" _ "embed" "fmt" "testing" "text/template" "ariga.io/atlas/cmd/atlas/internal/lint" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlclient" "github.com/stretchr/testify/require" ) func TestRunner_Run(t *testing.T) { ctx := context.Background() b := &bytes.Buffer{} c, err := sqlclient.Open(ctx, "sqlite://run?mode=memory&cache=shared&_fk=1") require.NoError(t, err) r := &lint.Runner{ Dir: testDir{}, Dev: c, ChangeDetector: testDetector{ base: []migrate.File{ testFile{name: "1.sql", content: "CREATE TABLE users (id INT)"}, }, feat: []migrate.File{ testFile{name: "2.sql", content: "CREATE TABLE pets (id INT)\nDROP TABLE users"}, }, }, Analyzers: []sqlcheck.Analyzer{ &testAnalyzer{}, }, ReportWriter: &lint.TemplateWriter{ T: lint.DefaultTemplate, W: b, }, } require.NoError(t, r.Run(ctx)) passes := r.Analyzers[0].(*testAnalyzer).passes require.Len(t, passes, 1) changes := passes[0].File.Changes require.Len(t, changes, 2) require.Equal(t, "CREATE TABLE pets (id INT)", changes[0].Stmt.Text) require.Equal(t, "DROP TABLE users", changes[1].Stmt.Text) require.Equal(t, `2.sql: Report 1: L1: Diagnostic 1 `, b.String()) b.Reset() r.ReportWriter.(*lint.TemplateWriter).T = template.Must(template.New(""). Funcs(lint.TemplateFuncs). Parse(` Env: {{ .Env.Driver }}, {{ .Env.Dir }} Steps: {{ range $s := .Steps }} {{- if $s.Error }} "Error in step " {{ $s.Name }} ": " {{ $s.Error }} {{- else }} {{- json $s }} {{- end }} {{ end }} {{- if .Files }} Files: {{ range $f := .Files }} {{- json $f }} {{ end }} {{- end }} Current Schema: {{ .Schema.Current }} Desired Schema: {{ .Schema.Desired }} `)) require.NoError(t, r.Run(ctx)) require.Equal(t, ` Env: sqlite3, migrations Steps: {"Name":"Detect New Migration Files","Text":"Found 1 new migration files (from 2 total)"} {"Name":"Replay Migration Files","Text":"Loaded 1 changes on dev database"} {"Name":"Analyze 2.sql","Text":"1 reports were found in analysis","Result":{"Name":"2.sql","Text":"CREATE TABLE pets (id INT)\nDROP TABLE users","Reports":[{"Text":"Report 2","Diagnostics":[{"Pos":1,"Text":"Diagnostic 1","Code":""},{"Pos":2,"Text":"Diagnostic 2","Code":""}]}]}} Files: {"Name":"2.sql","Text":"CREATE TABLE pets (id INT)\nDROP TABLE users","Reports":[{"Text":"Report 2","Diagnostics":[{"Pos":1,"Text":"Diagnostic 1","Code":""},{"Pos":2,"Text":"Diagnostic 2","Code":""}]}]} Current Schema: table "users" { schema = schema.main column "id" { null = true type = int } } schema "main" { } Desired Schema: table "pets" { schema = schema.main column "id" { null = true type = int } } schema "main" { } `, b.String()) } type testAnalyzer struct { passes []*sqlcheck.Pass } func (t *testAnalyzer) Analyze(_ context.Context, p *sqlcheck.Pass) error { t.passes = append(t.passes, p) r := sqlcheck.Report{ Text: fmt.Sprintf("Report %d", len(t.passes)), } for i := 1; i <= len(t.passes); i++ { r.Diagnostics = append(r.Diagnostics, sqlcheck.Diagnostic{ Pos: i, Text: fmt.Sprintf("Diagnostic %d", i), }) } p.Reporter.WriteReport(r) return nil } type testDetector struct { base, feat []migrate.File } func (t testDetector) DetectChanges(context.Context) ([]migrate.File, []migrate.File, error) { return t.base, t.feat, nil } atlas-0.7.2/cmd/atlas/internal/migrate/000077500000000000000000000000001431455511600177445ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/000077500000000000000000000000001431455511600205325ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/client.go000066400000000000000000000150031431455511600223360ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "fmt" "log" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/migrate" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql" ) // Client is the client that holds all ent builders. type Client struct { config // Schema is the client for creating, migrating and dropping schema. Schema *migrate.Schema // Revision is the client for interacting with the Revision builders. Revision *RevisionClient } // NewClient creates a new client configured with the given options. func NewClient(opts ...Option) *Client { cfg := config{log: log.Println, hooks: &hooks{}} cfg.options(opts...) client := &Client{config: cfg} client.init() return client } func (c *Client) init() { c.Schema = migrate.NewSchema(c.driver) c.Revision = NewRevisionClient(c.config) } // Open opens a database/sql.DB specified by the driver name and // the data source name, and returns a new client attached to it. // Optional parameters can be added for configuring the client. func Open(driverName, dataSourceName string, options ...Option) (*Client, error) { switch driverName { case dialect.MySQL, dialect.Postgres, dialect.SQLite: drv, err := sql.Open(driverName, dataSourceName) if err != nil { return nil, err } return NewClient(append(options, Driver(drv))...), nil default: return nil, fmt.Errorf("unsupported driver: %q", driverName) } } // Tx returns a new transactional client. The provided context // is used until the transaction is committed or rolled back. func (c *Client) Tx(ctx context.Context) (*Tx, error) { if _, ok := c.driver.(*txDriver); ok { return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") } tx, err := newTx(ctx, c.driver) if err != nil { return nil, fmt.Errorf("ent: starting a transaction: %w", err) } cfg := c.config cfg.driver = tx return &Tx{ ctx: ctx, config: cfg, Revision: NewRevisionClient(cfg), }, nil } // BeginTx returns a transactional client with specified options. func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) { if _, ok := c.driver.(*txDriver); ok { return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") } tx, err := c.driver.(interface { BeginTx(context.Context, *sql.TxOptions) (dialect.Tx, error) }).BeginTx(ctx, opts) if err != nil { return nil, fmt.Errorf("ent: starting a transaction: %w", err) } cfg := c.config cfg.driver = &txDriver{tx: tx, drv: c.driver} return &Tx{ ctx: ctx, config: cfg, Revision: NewRevisionClient(cfg), }, nil } // Debug returns a new debug-client. It's used to get verbose logging on specific operations. // // client.Debug(). // Revision. // Query(). // Count(ctx) func (c *Client) Debug() *Client { if c.debug { return c } cfg := c.config cfg.driver = dialect.Debug(c.driver, c.log) client := &Client{config: cfg} client.init() return client } // Close closes the database connection and prevents new queries from starting. func (c *Client) Close() error { return c.driver.Close() } // Use adds the mutation hooks to all the entity clients. // In order to add hooks to a specific client, call: `client.Node.Use(...)`. func (c *Client) Use(hooks ...Hook) { c.Revision.Use(hooks...) } // RevisionClient is a client for the Revision schema. type RevisionClient struct { config } // NewRevisionClient returns a client for the Revision from the given config. func NewRevisionClient(c config) *RevisionClient { return &RevisionClient{config: c} } // Use adds a list of mutation hooks to the hooks stack. // A call to `Use(f, g, h)` equals to `revision.Hooks(f(g(h())))`. func (c *RevisionClient) Use(hooks ...Hook) { c.hooks.Revision = append(c.hooks.Revision, hooks...) } // Create returns a builder for creating a Revision entity. func (c *RevisionClient) Create() *RevisionCreate { mutation := newRevisionMutation(c.config, OpCreate) return &RevisionCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} } // CreateBulk returns a builder for creating a bulk of Revision entities. func (c *RevisionClient) CreateBulk(builders ...*RevisionCreate) *RevisionCreateBulk { return &RevisionCreateBulk{config: c.config, builders: builders} } // Update returns an update builder for Revision. func (c *RevisionClient) Update() *RevisionUpdate { mutation := newRevisionMutation(c.config, OpUpdate) return &RevisionUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} } // UpdateOne returns an update builder for the given entity. func (c *RevisionClient) UpdateOne(r *Revision) *RevisionUpdateOne { mutation := newRevisionMutation(c.config, OpUpdateOne, withRevision(r)) return &RevisionUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} } // UpdateOneID returns an update builder for the given id. func (c *RevisionClient) UpdateOneID(id string) *RevisionUpdateOne { mutation := newRevisionMutation(c.config, OpUpdateOne, withRevisionID(id)) return &RevisionUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} } // Delete returns a delete builder for Revision. func (c *RevisionClient) Delete() *RevisionDelete { mutation := newRevisionMutation(c.config, OpDelete) return &RevisionDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} } // DeleteOne returns a builder for deleting the given entity. func (c *RevisionClient) DeleteOne(r *Revision) *RevisionDeleteOne { return c.DeleteOneID(r.ID) } // DeleteOne returns a builder for deleting the given entity by its id. func (c *RevisionClient) DeleteOneID(id string) *RevisionDeleteOne { builder := c.Delete().Where(revision.ID(id)) builder.mutation.id = &id builder.mutation.op = OpDeleteOne return &RevisionDeleteOne{builder} } // Query returns a query builder for Revision. func (c *RevisionClient) Query() *RevisionQuery { return &RevisionQuery{ config: c.config, } } // Get returns a Revision entity by its id. func (c *RevisionClient) Get(ctx context.Context, id string) (*Revision, error) { return c.Query().Where(revision.ID(id)).Only(ctx) } // GetX is like Get, but panics if an error occurs. func (c *RevisionClient) GetX(ctx context.Context, id string) *Revision { obj, err := c.Get(ctx, id) if err != nil { panic(err) } return obj } // Hooks returns the client hooks. func (c *RevisionClient) Hooks() []Hook { return c.hooks.Revision } atlas-0.7.2/cmd/atlas/internal/migrate/ent/config.go000066400000000000000000000055701431455511600223350ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" stdsql "database/sql" "fmt" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/internal" "entgo.io/ent" "entgo.io/ent/dialect" ) // Option function to configure the client. type Option func(*config) // Config is the configuration for the client and its builder. type config struct { // driver used for executing database requests. driver dialect.Driver // debug enable a debug logging. debug bool // log used for logging on debug mode. log func(...interface{}) // hooks to execute on mutations. hooks *hooks // schemaConfig contains alternative names for all tables. schemaConfig SchemaConfig } // hooks per client, for fast access. type hooks struct { Revision []ent.Hook } // Options applies the options on the config object. func (c *config) options(opts ...Option) { for _, opt := range opts { opt(c) } if c.debug { c.driver = dialect.Debug(c.driver, c.log) } } // Debug enables debug logging on the ent.Driver. func Debug() Option { return func(c *config) { c.debug = true } } // Log sets the logging function for debug mode. func Log(fn func(...interface{})) Option { return func(c *config) { c.log = fn } } // Driver configures the client driver. func Driver(driver dialect.Driver) Option { return func(c *config) { c.driver = driver } } // SchemaConfig represents alternative schema names for all tables // that can be passed at runtime. type SchemaConfig = internal.SchemaConfig // AlternateSchemas allows alternate schema names to be // passed into ent operations. func AlternateSchema(schemaConfig SchemaConfig) Option { return func(c *config) { c.schemaConfig = schemaConfig } } // ExecContext allows calling the underlying ExecContext method of the driver if it is supported by it. // See, database/sql#DB.ExecContext for more information. func (c *config) ExecContext(ctx context.Context, query string, args ...interface{}) (stdsql.Result, error) { ex, ok := c.driver.(interface { ExecContext(context.Context, string, ...interface{}) (stdsql.Result, error) }) if !ok { return nil, fmt.Errorf("Driver.ExecContext is not supported") } return ex.ExecContext(ctx, query, args...) } // QueryContext allows calling the underlying QueryContext method of the driver if it is supported by it. // See, database/sql#DB.QueryContext for more information. func (c *config) QueryContext(ctx context.Context, query string, args ...interface{}) (*stdsql.Rows, error) { q, ok := c.driver.(interface { QueryContext(context.Context, string, ...interface{}) (*stdsql.Rows, error) }) if !ok { return nil, fmt.Errorf("Driver.QueryContext is not supported") } return q.QueryContext(ctx, query, args...) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/context.go000066400000000000000000000020661431455511600225510ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" ) type clientCtxKey struct{} // FromContext returns a Client stored inside a context, or nil if there isn't one. func FromContext(ctx context.Context) *Client { c, _ := ctx.Value(clientCtxKey{}).(*Client) return c } // NewContext returns a new context with the given Client attached. func NewContext(parent context.Context, c *Client) context.Context { return context.WithValue(parent, clientCtxKey{}, c) } type txCtxKey struct{} // TxFromContext returns a Tx stored inside a context, or nil if there isn't one. func TxFromContext(ctx context.Context) *Tx { tx, _ := ctx.Value(txCtxKey{}).(*Tx) return tx } // NewTxContext returns a new context with the given Tx attached. func NewTxContext(parent context.Context, tx *Tx) context.Context { return context.WithValue(parent, txCtxKey{}, tx) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/convert.go000066400000000000000000000024261431455511600225450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import "ariga.io/atlas/sql/migrate" // SetRevision takes the values for each field from the given migrate.Revision. func (rc *RevisionCreate) SetRevision(rev *migrate.Revision) *RevisionCreate { rc.SetID(rev.Version) rc.SetDescription(rev.Description) rc.SetType(rev.Type) rc.SetApplied(rev.Applied) rc.SetTotal(rev.Total) rc.SetExecutedAt(rev.ExecutedAt) rc.SetExecutionTime(rev.ExecutionTime) rc.SetError(rev.Error) rc.SetHash(rev.Hash) rc.SetPartialHashes(rev.PartialHashes) rc.SetOperatorVersion(rev.OperatorVersion) return rc } // AtlasRevision returns an migrate.Revision from the current Revision. func (r *Revision) AtlasRevision() *migrate.Revision { return &migrate.Revision{ Version: r.ID, Description: r.Description, Type: r.Type, Applied: r.Applied, Total: r.Total, ExecutedAt: r.ExecutedAt, ExecutionTime: r.ExecutionTime, Error: r.Error, Hash: r.Hash, PartialHashes: r.PartialHashes, OperatorVersion: r.OperatorVersion, } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/ent.go000066400000000000000000000275471431455511600216660ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "errors" "fmt" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "entgo.io/ent" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" ) // ent aliases to avoid import conflicts in user's code. type ( Op = ent.Op Hook = ent.Hook Value = ent.Value Query = ent.Query Policy = ent.Policy Mutator = ent.Mutator Mutation = ent.Mutation MutateFunc = ent.MutateFunc ) // OrderFunc applies an ordering on the sql selector. type OrderFunc func(*sql.Selector) // columnChecker returns a function indicates if the column exists in the given column. func columnChecker(table string) func(string) error { checks := map[string]func(string) bool{ revision.Table: revision.ValidColumn, } check, ok := checks[table] if !ok { return func(string) error { return fmt.Errorf("unknown table %q", table) } } return func(column string) error { if !check(column) { return fmt.Errorf("unknown column %q for table %q", column, table) } return nil } } // Asc applies the given fields in ASC order. func Asc(fields ...string) OrderFunc { return func(s *sql.Selector) { check := columnChecker(s.TableName()) for _, f := range fields { if err := check(f); err != nil { s.AddError(&ValidationError{Name: f, err: fmt.Errorf("ent: %w", err)}) } s.OrderBy(sql.Asc(s.C(f))) } } } // Desc applies the given fields in DESC order. func Desc(fields ...string) OrderFunc { return func(s *sql.Selector) { check := columnChecker(s.TableName()) for _, f := range fields { if err := check(f); err != nil { s.AddError(&ValidationError{Name: f, err: fmt.Errorf("ent: %w", err)}) } s.OrderBy(sql.Desc(s.C(f))) } } } // AggregateFunc applies an aggregation step on the group-by traversal/selector. type AggregateFunc func(*sql.Selector) string // As is a pseudo aggregation function for renaming another other functions with custom names. For example: // // GroupBy(field1, field2). // Aggregate(ent.As(ent.Sum(field1), "sum_field1"), (ent.As(ent.Sum(field2), "sum_field2")). // Scan(ctx, &v) func As(fn AggregateFunc, end string) AggregateFunc { return func(s *sql.Selector) string { return sql.As(fn(s), end) } } // Count applies the "count" aggregation function on each group. func Count() AggregateFunc { return func(s *sql.Selector) string { return sql.Count("*") } } // Max applies the "max" aggregation function on the given field of each group. func Max(field string) AggregateFunc { return func(s *sql.Selector) string { check := columnChecker(s.TableName()) if err := check(field); err != nil { s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) return "" } return sql.Max(s.C(field)) } } // Mean applies the "mean" aggregation function on the given field of each group. func Mean(field string) AggregateFunc { return func(s *sql.Selector) string { check := columnChecker(s.TableName()) if err := check(field); err != nil { s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) return "" } return sql.Avg(s.C(field)) } } // Min applies the "min" aggregation function on the given field of each group. func Min(field string) AggregateFunc { return func(s *sql.Selector) string { check := columnChecker(s.TableName()) if err := check(field); err != nil { s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) return "" } return sql.Min(s.C(field)) } } // Sum applies the "sum" aggregation function on the given field of each group. func Sum(field string) AggregateFunc { return func(s *sql.Selector) string { check := columnChecker(s.TableName()) if err := check(field); err != nil { s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) return "" } return sql.Sum(s.C(field)) } } // ValidationError returns when validating a field or edge fails. type ValidationError struct { Name string // Field or edge name. err error } // Error implements the error interface. func (e *ValidationError) Error() string { return e.err.Error() } // Unwrap implements the errors.Wrapper interface. func (e *ValidationError) Unwrap() error { return e.err } // IsValidationError returns a boolean indicating whether the error is a validation error. func IsValidationError(err error) bool { if err == nil { return false } var e *ValidationError return errors.As(err, &e) } // NotFoundError returns when trying to fetch a specific entity and it was not found in the database. type NotFoundError struct { label string } // Error implements the error interface. func (e *NotFoundError) Error() string { return "ent: " + e.label + " not found" } // IsNotFound returns a boolean indicating whether the error is a not found error. func IsNotFound(err error) bool { if err == nil { return false } var e *NotFoundError return errors.As(err, &e) } // MaskNotFound masks not found error. func MaskNotFound(err error) error { if IsNotFound(err) { return nil } return err } // NotSingularError returns when trying to fetch a singular entity and more then one was found in the database. type NotSingularError struct { label string } // Error implements the error interface. func (e *NotSingularError) Error() string { return "ent: " + e.label + " not singular" } // IsNotSingular returns a boolean indicating whether the error is a not singular error. func IsNotSingular(err error) bool { if err == nil { return false } var e *NotSingularError return errors.As(err, &e) } // NotLoadedError returns when trying to get a node that was not loaded by the query. type NotLoadedError struct { edge string } // Error implements the error interface. func (e *NotLoadedError) Error() string { return "ent: " + e.edge + " edge was not loaded" } // IsNotLoaded returns a boolean indicating whether the error is a not loaded error. func IsNotLoaded(err error) bool { if err == nil { return false } var e *NotLoadedError return errors.As(err, &e) } // ConstraintError returns when trying to create/update one or more entities and // one or more of their constraints failed. For example, violation of edge or // field uniqueness. type ConstraintError struct { msg string wrap error } // Error implements the error interface. func (e ConstraintError) Error() string { return "ent: constraint failed: " + e.msg } // Unwrap implements the errors.Wrapper interface. func (e *ConstraintError) Unwrap() error { return e.wrap } // IsConstraintError returns a boolean indicating whether the error is a constraint failure. func IsConstraintError(err error) bool { if err == nil { return false } var e *ConstraintError return errors.As(err, &e) } // selector embedded by the different Select/GroupBy builders. type selector struct { label string flds *[]string scan func(context.Context, interface{}) error } // ScanX is like Scan, but panics if an error occurs. func (s *selector) ScanX(ctx context.Context, v interface{}) { if err := s.scan(ctx, v); err != nil { panic(err) } } // Strings returns list of strings from a selector. It is only allowed when selecting one field. func (s *selector) Strings(ctx context.Context) ([]string, error) { if len(*s.flds) > 1 { return nil, errors.New("ent: Strings is not achievable when selecting more than 1 field") } var v []string if err := s.scan(ctx, &v); err != nil { return nil, err } return v, nil } // StringsX is like Strings, but panics if an error occurs. func (s *selector) StringsX(ctx context.Context) []string { v, err := s.Strings(ctx) if err != nil { panic(err) } return v } // String returns a single string from a selector. It is only allowed when selecting one field. func (s *selector) String(ctx context.Context) (_ string, err error) { var v []string if v, err = s.Strings(ctx); err != nil { return } switch len(v) { case 1: return v[0], nil case 0: err = &NotFoundError{s.label} default: err = fmt.Errorf("ent: Strings returned %d results when one was expected", len(v)) } return } // StringX is like String, but panics if an error occurs. func (s *selector) StringX(ctx context.Context) string { v, err := s.String(ctx) if err != nil { panic(err) } return v } // Ints returns list of ints from a selector. It is only allowed when selecting one field. func (s *selector) Ints(ctx context.Context) ([]int, error) { if len(*s.flds) > 1 { return nil, errors.New("ent: Ints is not achievable when selecting more than 1 field") } var v []int if err := s.scan(ctx, &v); err != nil { return nil, err } return v, nil } // IntsX is like Ints, but panics if an error occurs. func (s *selector) IntsX(ctx context.Context) []int { v, err := s.Ints(ctx) if err != nil { panic(err) } return v } // Int returns a single int from a selector. It is only allowed when selecting one field. func (s *selector) Int(ctx context.Context) (_ int, err error) { var v []int if v, err = s.Ints(ctx); err != nil { return } switch len(v) { case 1: return v[0], nil case 0: err = &NotFoundError{s.label} default: err = fmt.Errorf("ent: Ints returned %d results when one was expected", len(v)) } return } // IntX is like Int, but panics if an error occurs. func (s *selector) IntX(ctx context.Context) int { v, err := s.Int(ctx) if err != nil { panic(err) } return v } // Float64s returns list of float64s from a selector. It is only allowed when selecting one field. func (s *selector) Float64s(ctx context.Context) ([]float64, error) { if len(*s.flds) > 1 { return nil, errors.New("ent: Float64s is not achievable when selecting more than 1 field") } var v []float64 if err := s.scan(ctx, &v); err != nil { return nil, err } return v, nil } // Float64sX is like Float64s, but panics if an error occurs. func (s *selector) Float64sX(ctx context.Context) []float64 { v, err := s.Float64s(ctx) if err != nil { panic(err) } return v } // Float64 returns a single float64 from a selector. It is only allowed when selecting one field. func (s *selector) Float64(ctx context.Context) (_ float64, err error) { var v []float64 if v, err = s.Float64s(ctx); err != nil { return } switch len(v) { case 1: return v[0], nil case 0: err = &NotFoundError{s.label} default: err = fmt.Errorf("ent: Float64s returned %d results when one was expected", len(v)) } return } // Float64X is like Float64, but panics if an error occurs. func (s *selector) Float64X(ctx context.Context) float64 { v, err := s.Float64(ctx) if err != nil { panic(err) } return v } // Bools returns list of bools from a selector. It is only allowed when selecting one field. func (s *selector) Bools(ctx context.Context) ([]bool, error) { if len(*s.flds) > 1 { return nil, errors.New("ent: Bools is not achievable when selecting more than 1 field") } var v []bool if err := s.scan(ctx, &v); err != nil { return nil, err } return v, nil } // BoolsX is like Bools, but panics if an error occurs. func (s *selector) BoolsX(ctx context.Context) []bool { v, err := s.Bools(ctx) if err != nil { panic(err) } return v } // Bool returns a single bool from a selector. It is only allowed when selecting one field. func (s *selector) Bool(ctx context.Context) (_ bool, err error) { var v []bool if v, err = s.Bools(ctx); err != nil { return } switch len(v) { case 1: return v[0], nil case 0: err = &NotFoundError{s.label} default: err = fmt.Errorf("ent: Bools returned %d results when one was expected", len(v)) } return } // BoolX is like Bool, but panics if an error occurs. func (s *selector) BoolX(ctx context.Context) bool { v, err := s.Bool(ctx) if err != nil { panic(err) } return v } // queryHook describes an internal hook for the different sqlAll methods. type queryHook func(context.Context, *sqlgraph.QuerySpec) atlas-0.7.2/cmd/atlas/internal/migrate/ent/entc.go000066400000000000000000000014661431455511600220210ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. //go:build ignore package main import ( "log" "entgo.io/ent/entc" "entgo.io/ent/entc/gen" ) func main() { err := entc.Generate("./schema", &gen.Config{ Header: `// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. `, Features: []gen.Feature{ gen.FeatureUpsert, gen.FeatureExecQuery, gen.FeatureSchemaConfig, }, }, entc.TemplateDir("template")) if err != nil { log.Fatalf("running ent codegen: %v", err) } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/enttest/000077500000000000000000000000001431455511600222205ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/enttest/enttest.go000066400000000000000000000041011431455511600242310ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package enttest import ( "context" "ariga.io/atlas/cmd/atlas/internal/migrate/ent" // required by schema hooks. _ "ariga.io/atlas/cmd/atlas/internal/migrate/ent/runtime" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/migrate" "entgo.io/ent/dialect/sql/schema" ) type ( // TestingT is the interface that is shared between // testing.T and testing.B and used by enttest. TestingT interface { FailNow() Error(...interface{}) } // Option configures client creation. Option func(*options) options struct { opts []ent.Option migrateOpts []schema.MigrateOption } ) // WithOptions forwards options to client creation. func WithOptions(opts ...ent.Option) Option { return func(o *options) { o.opts = append(o.opts, opts...) } } // WithMigrateOptions forwards options to auto migration. func WithMigrateOptions(opts ...schema.MigrateOption) Option { return func(o *options) { o.migrateOpts = append(o.migrateOpts, opts...) } } func newOptions(opts []Option) *options { o := &options{} for _, opt := range opts { opt(o) } return o } // Open calls ent.Open and auto-run migration. func Open(t TestingT, driverName, dataSourceName string, opts ...Option) *ent.Client { o := newOptions(opts) c, err := ent.Open(driverName, dataSourceName, o.opts...) if err != nil { t.Error(err) t.FailNow() } migrateSchema(t, c, o) return c } // NewClient calls ent.NewClient and auto-run migration. func NewClient(t TestingT, opts ...Option) *ent.Client { o := newOptions(opts) c := ent.NewClient(o.opts...) migrateSchema(t, c, o) return c } func migrateSchema(t TestingT, c *ent.Client, o *options) { tables, err := schema.CopyTables(migrate.Tables) if err != nil { t.Error(err) t.FailNow() } if err := migrate.Create(context.Background(), c.Schema, tables, o.migrateOpts...); err != nil { t.Error(err) t.FailNow() } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/generate.go000066400000000000000000000003621431455511600226540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package ent //go:generate go run entc.go atlas-0.7.2/cmd/atlas/internal/migrate/ent/hook/000077500000000000000000000000001431455511600214725ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/hook/hook.go000066400000000000000000000123511431455511600227630ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package hook import ( "context" "fmt" "ariga.io/atlas/cmd/atlas/internal/migrate/ent" ) // The RevisionFunc type is an adapter to allow the use of ordinary // function as Revision mutator. type RevisionFunc func(context.Context, *ent.RevisionMutation) (ent.Value, error) // Mutate calls f(ctx, m). func (f RevisionFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { mv, ok := m.(*ent.RevisionMutation) if !ok { return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.RevisionMutation", m) } return f(ctx, mv) } // Condition is a hook condition function. type Condition func(context.Context, ent.Mutation) bool // And groups conditions with the AND operator. func And(first, second Condition, rest ...Condition) Condition { return func(ctx context.Context, m ent.Mutation) bool { if !first(ctx, m) || !second(ctx, m) { return false } for _, cond := range rest { if !cond(ctx, m) { return false } } return true } } // Or groups conditions with the OR operator. func Or(first, second Condition, rest ...Condition) Condition { return func(ctx context.Context, m ent.Mutation) bool { if first(ctx, m) || second(ctx, m) { return true } for _, cond := range rest { if cond(ctx, m) { return true } } return false } } // Not negates a given condition. func Not(cond Condition) Condition { return func(ctx context.Context, m ent.Mutation) bool { return !cond(ctx, m) } } // HasOp is a condition testing mutation operation. func HasOp(op ent.Op) Condition { return func(_ context.Context, m ent.Mutation) bool { return m.Op().Is(op) } } // HasAddedFields is a condition validating `.AddedField` on fields. func HasAddedFields(field string, fields ...string) Condition { return func(_ context.Context, m ent.Mutation) bool { if _, exists := m.AddedField(field); !exists { return false } for _, field := range fields { if _, exists := m.AddedField(field); !exists { return false } } return true } } // HasClearedFields is a condition validating `.FieldCleared` on fields. func HasClearedFields(field string, fields ...string) Condition { return func(_ context.Context, m ent.Mutation) bool { if exists := m.FieldCleared(field); !exists { return false } for _, field := range fields { if exists := m.FieldCleared(field); !exists { return false } } return true } } // HasFields is a condition validating `.Field` on fields. func HasFields(field string, fields ...string) Condition { return func(_ context.Context, m ent.Mutation) bool { if _, exists := m.Field(field); !exists { return false } for _, field := range fields { if _, exists := m.Field(field); !exists { return false } } return true } } // If executes the given hook under condition. // // hook.If(ComputeAverage, And(HasFields(...), HasAddedFields(...))) func If(hk ent.Hook, cond Condition) ent.Hook { return func(next ent.Mutator) ent.Mutator { return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) { if cond(ctx, m) { return hk(next).Mutate(ctx, m) } return next.Mutate(ctx, m) }) } } // On executes the given hook only for the given operation. // // hook.On(Log, ent.Delete|ent.Create) func On(hk ent.Hook, op ent.Op) ent.Hook { return If(hk, HasOp(op)) } // Unless skips the given hook only for the given operation. // // hook.Unless(Log, ent.Update|ent.UpdateOne) func Unless(hk ent.Hook, op ent.Op) ent.Hook { return If(hk, Not(HasOp(op))) } // FixedError is a hook returning a fixed error. func FixedError(err error) ent.Hook { return func(ent.Mutator) ent.Mutator { return ent.MutateFunc(func(context.Context, ent.Mutation) (ent.Value, error) { return nil, err }) } } // Reject returns a hook that rejects all operations that match op. // // func (T) Hooks() []ent.Hook { // return []ent.Hook{ // Reject(ent.Delete|ent.Update), // } // } func Reject(op ent.Op) ent.Hook { hk := FixedError(fmt.Errorf("%s operation is not allowed", op)) return On(hk, op) } // Chain acts as a list of hooks and is effectively immutable. // Once created, it will always hold the same set of hooks in the same order. type Chain struct { hooks []ent.Hook } // NewChain creates a new chain of hooks. func NewChain(hooks ...ent.Hook) Chain { return Chain{append([]ent.Hook(nil), hooks...)} } // Hook chains the list of hooks and returns the final hook. func (c Chain) Hook() ent.Hook { return func(mutator ent.Mutator) ent.Mutator { for i := len(c.hooks) - 1; i >= 0; i-- { mutator = c.hooks[i](mutator) } return mutator } } // Append extends a chain, adding the specified hook // as the last ones in the mutation flow. func (c Chain) Append(hooks ...ent.Hook) Chain { newHooks := make([]ent.Hook, 0, len(c.hooks)+len(hooks)) newHooks = append(newHooks, c.hooks...) newHooks = append(newHooks, hooks...) return Chain{newHooks} } // Extend extends a chain, adding the specified chain // as the last ones in the mutation flow. func (c Chain) Extend(chain Chain) Chain { return c.Append(chain.hooks...) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/internal/000077500000000000000000000000001431455511600223465ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/internal/schemaconfig.go000066400000000000000000000016671431455511600253350ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package internal import "context" // SchemaConfig represents alternative schema names for all tables // that can be passed at runtime. type SchemaConfig struct { Revision string // Revision table. } type schemaCtxKey struct{} // SchemaConfigFromContext returns a SchemaConfig stored inside a context, or empty if there isn't one. func SchemaConfigFromContext(ctx context.Context) SchemaConfig { config, _ := ctx.Value(schemaCtxKey{}).(SchemaConfig) return config } // NewSchemaConfigContext returns a new context with the given SchemaConfig attached. func NewSchemaConfigContext(parent context.Context, config SchemaConfig) context.Context { return context.WithValue(parent, schemaCtxKey{}, config) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/migrate/000077500000000000000000000000001431455511600221625ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/migrate/migrate.go000066400000000000000000000050051431455511600241410ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package migrate import ( "context" "fmt" "io" "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql/schema" ) var ( // WithGlobalUniqueID sets the universal ids options to the migration. // If this option is enabled, ent migration will allocate a 1<<32 range // for the ids of each entity (table). // Note that this option cannot be applied on tables that already exist. WithGlobalUniqueID = schema.WithGlobalUniqueID // WithDropColumn sets the drop column option to the migration. // If this option is enabled, ent migration will drop old columns // that were used for both fields and edges. This defaults to false. WithDropColumn = schema.WithDropColumn // WithDropIndex sets the drop index option to the migration. // If this option is enabled, ent migration will drop old indexes // that were defined in the schema. This defaults to false. // Note that unique constraints are defined using `UNIQUE INDEX`, // and therefore, it's recommended to enable this option to get more // flexibility in the schema changes. WithDropIndex = schema.WithDropIndex // WithForeignKeys enables creating foreign-key in schema DDL. This defaults to true. WithForeignKeys = schema.WithForeignKeys ) // Schema is the API for creating, migrating and dropping a schema. type Schema struct { drv dialect.Driver } // NewSchema creates a new schema client. func NewSchema(drv dialect.Driver) *Schema { return &Schema{drv: drv} } // Create creates all schema resources. func (s *Schema) Create(ctx context.Context, opts ...schema.MigrateOption) error { return Create(ctx, s, Tables, opts...) } // Create creates all table resources using the given schema driver. func Create(ctx context.Context, s *Schema, tables []*schema.Table, opts ...schema.MigrateOption) error { migrate, err := schema.NewMigrate(s.drv, opts...) if err != nil { return fmt.Errorf("ent/migrate: %w", err) } return migrate.Create(ctx, tables...) } // WriteTo writes the schema changes to w instead of running them against the database. // // if err := client.Schema.WriteTo(context.Background(), os.Stdout); err != nil { // log.Fatal(err) // } func (s *Schema) WriteTo(ctx context.Context, w io.Writer, opts ...schema.MigrateOption) error { return Create(ctx, &Schema{drv: &schema.WriteDriver{Writer: w, Driver: s.drv}}, Tables, opts...) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/migrate/schema.go000066400000000000000000000031221431455511600237470ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package migrate import ( "entgo.io/ent/dialect/entsql" "entgo.io/ent/dialect/sql/schema" "entgo.io/ent/schema/field" ) var ( // AtlasSchemaRevisionsColumns holds the columns for the "atlas_schema_revisions" table. AtlasSchemaRevisionsColumns = []*schema.Column{ {Name: "version", Type: field.TypeString}, {Name: "description", Type: field.TypeString}, {Name: "type", Type: field.TypeUint, Default: 2}, {Name: "applied", Type: field.TypeInt, Default: 0}, {Name: "total", Type: field.TypeInt, Default: 0}, {Name: "executed_at", Type: field.TypeTime}, {Name: "execution_time", Type: field.TypeInt64}, {Name: "error", Type: field.TypeString, Nullable: true, Size: 2147483647}, {Name: "hash", Type: field.TypeString}, {Name: "partial_hashes", Type: field.TypeJSON, Nullable: true}, {Name: "operator_version", Type: field.TypeString}, } // AtlasSchemaRevisionsTable holds the schema information for the "atlas_schema_revisions" table. AtlasSchemaRevisionsTable = &schema.Table{ Name: "atlas_schema_revisions", Columns: AtlasSchemaRevisionsColumns, PrimaryKey: []*schema.Column{AtlasSchemaRevisionsColumns[0]}, } // Tables holds all the tables in the schema. Tables = []*schema.Table{ AtlasSchemaRevisionsTable, } ) func init() { AtlasSchemaRevisionsTable.Annotation = &entsql.Annotation{ Table: "atlas_schema_revisions", } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/mutation.go000066400000000000000000000727301431455511600227320ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "errors" "fmt" "sync" "time" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/predicate" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/migrate" "entgo.io/ent" ) const ( // Operation types. OpCreate = ent.OpCreate OpDelete = ent.OpDelete OpDeleteOne = ent.OpDeleteOne OpUpdate = ent.OpUpdate OpUpdateOne = ent.OpUpdateOne // Node types. TypeRevision = "Revision" ) // RevisionMutation represents an operation that mutates the Revision nodes in the graph. type RevisionMutation struct { config op Op typ string id *string description *string _type *migrate.RevisionType add_type *migrate.RevisionType applied *int addapplied *int total *int addtotal *int executed_at *time.Time execution_time *time.Duration addexecution_time *time.Duration error *string hash *string partial_hashes *[]string operator_version *string clearedFields map[string]struct{} done bool oldValue func(context.Context) (*Revision, error) predicates []predicate.Revision } var _ ent.Mutation = (*RevisionMutation)(nil) // revisionOption allows management of the mutation configuration using functional options. type revisionOption func(*RevisionMutation) // newRevisionMutation creates new mutation for the Revision entity. func newRevisionMutation(c config, op Op, opts ...revisionOption) *RevisionMutation { m := &RevisionMutation{ config: c, op: op, typ: TypeRevision, clearedFields: make(map[string]struct{}), } for _, opt := range opts { opt(m) } return m } // withRevisionID sets the ID field of the mutation. func withRevisionID(id string) revisionOption { return func(m *RevisionMutation) { var ( err error once sync.Once value *Revision ) m.oldValue = func(ctx context.Context) (*Revision, error) { once.Do(func() { if m.done { err = errors.New("querying old values post mutation is not allowed") } else { value, err = m.Client().Revision.Get(ctx, id) } }) return value, err } m.id = &id } } // withRevision sets the old Revision of the mutation. func withRevision(node *Revision) revisionOption { return func(m *RevisionMutation) { m.oldValue = func(context.Context) (*Revision, error) { return node, nil } m.id = &node.ID } } // Client returns a new `ent.Client` from the mutation. If the mutation was // executed in a transaction (ent.Tx), a transactional client is returned. func (m RevisionMutation) Client() *Client { client := &Client{config: m.config} client.init() return client } // Tx returns an `ent.Tx` for mutations that were executed in transactions; // it returns an error otherwise. func (m RevisionMutation) Tx() (*Tx, error) { if _, ok := m.driver.(*txDriver); !ok { return nil, errors.New("ent: mutation is not running in a transaction") } tx := &Tx{config: m.config} tx.init() return tx, nil } // SetID sets the value of the id field. Note that this // operation is only accepted on creation of Revision entities. func (m *RevisionMutation) SetID(id string) { m.id = &id } // ID returns the ID value in the mutation. Note that the ID is only available // if it was provided to the builder or after it was returned from the database. func (m *RevisionMutation) ID() (id string, exists bool) { if m.id == nil { return } return *m.id, true } // IDs queries the database and returns the entity ids that match the mutation's predicate. // That means, if the mutation is applied within a transaction with an isolation level such // as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated // or updated by the mutation. func (m *RevisionMutation) IDs(ctx context.Context) ([]string, error) { switch { case m.op.Is(OpUpdateOne | OpDeleteOne): id, exists := m.ID() if exists { return []string{id}, nil } fallthrough case m.op.Is(OpUpdate | OpDelete): return m.Client().Revision.Query().Where(m.predicates...).IDs(ctx) default: return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) } } // SetDescription sets the "description" field. func (m *RevisionMutation) SetDescription(s string) { m.description = &s } // Description returns the value of the "description" field in the mutation. func (m *RevisionMutation) Description() (r string, exists bool) { v := m.description if v == nil { return } return *v, true } // OldDescription returns the old "description" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldDescription(ctx context.Context) (v string, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldDescription is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldDescription requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldDescription: %w", err) } return oldValue.Description, nil } // ResetDescription resets all changes to the "description" field. func (m *RevisionMutation) ResetDescription() { m.description = nil } // SetType sets the "type" field. func (m *RevisionMutation) SetType(mt migrate.RevisionType) { m._type = &mt m.add_type = nil } // GetType returns the value of the "type" field in the mutation. func (m *RevisionMutation) GetType() (r migrate.RevisionType, exists bool) { v := m._type if v == nil { return } return *v, true } // OldType returns the old "type" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldType(ctx context.Context) (v migrate.RevisionType, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldType is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldType requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldType: %w", err) } return oldValue.Type, nil } // AddType adds mt to the "type" field. func (m *RevisionMutation) AddType(mt migrate.RevisionType) { if m.add_type != nil { *m.add_type += mt } else { m.add_type = &mt } } // AddedType returns the value that was added to the "type" field in this mutation. func (m *RevisionMutation) AddedType() (r migrate.RevisionType, exists bool) { v := m.add_type if v == nil { return } return *v, true } // ResetType resets all changes to the "type" field. func (m *RevisionMutation) ResetType() { m._type = nil m.add_type = nil } // SetApplied sets the "applied" field. func (m *RevisionMutation) SetApplied(i int) { m.applied = &i m.addapplied = nil } // Applied returns the value of the "applied" field in the mutation. func (m *RevisionMutation) Applied() (r int, exists bool) { v := m.applied if v == nil { return } return *v, true } // OldApplied returns the old "applied" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldApplied(ctx context.Context) (v int, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldApplied is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldApplied requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldApplied: %w", err) } return oldValue.Applied, nil } // AddApplied adds i to the "applied" field. func (m *RevisionMutation) AddApplied(i int) { if m.addapplied != nil { *m.addapplied += i } else { m.addapplied = &i } } // AddedApplied returns the value that was added to the "applied" field in this mutation. func (m *RevisionMutation) AddedApplied() (r int, exists bool) { v := m.addapplied if v == nil { return } return *v, true } // ResetApplied resets all changes to the "applied" field. func (m *RevisionMutation) ResetApplied() { m.applied = nil m.addapplied = nil } // SetTotal sets the "total" field. func (m *RevisionMutation) SetTotal(i int) { m.total = &i m.addtotal = nil } // Total returns the value of the "total" field in the mutation. func (m *RevisionMutation) Total() (r int, exists bool) { v := m.total if v == nil { return } return *v, true } // OldTotal returns the old "total" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldTotal(ctx context.Context) (v int, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldTotal is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldTotal requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldTotal: %w", err) } return oldValue.Total, nil } // AddTotal adds i to the "total" field. func (m *RevisionMutation) AddTotal(i int) { if m.addtotal != nil { *m.addtotal += i } else { m.addtotal = &i } } // AddedTotal returns the value that was added to the "total" field in this mutation. func (m *RevisionMutation) AddedTotal() (r int, exists bool) { v := m.addtotal if v == nil { return } return *v, true } // ResetTotal resets all changes to the "total" field. func (m *RevisionMutation) ResetTotal() { m.total = nil m.addtotal = nil } // SetExecutedAt sets the "executed_at" field. func (m *RevisionMutation) SetExecutedAt(t time.Time) { m.executed_at = &t } // ExecutedAt returns the value of the "executed_at" field in the mutation. func (m *RevisionMutation) ExecutedAt() (r time.Time, exists bool) { v := m.executed_at if v == nil { return } return *v, true } // OldExecutedAt returns the old "executed_at" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldExecutedAt(ctx context.Context) (v time.Time, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldExecutedAt is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldExecutedAt requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldExecutedAt: %w", err) } return oldValue.ExecutedAt, nil } // ResetExecutedAt resets all changes to the "executed_at" field. func (m *RevisionMutation) ResetExecutedAt() { m.executed_at = nil } // SetExecutionTime sets the "execution_time" field. func (m *RevisionMutation) SetExecutionTime(t time.Duration) { m.execution_time = &t m.addexecution_time = nil } // ExecutionTime returns the value of the "execution_time" field in the mutation. func (m *RevisionMutation) ExecutionTime() (r time.Duration, exists bool) { v := m.execution_time if v == nil { return } return *v, true } // OldExecutionTime returns the old "execution_time" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldExecutionTime(ctx context.Context) (v time.Duration, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldExecutionTime is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldExecutionTime requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldExecutionTime: %w", err) } return oldValue.ExecutionTime, nil } // AddExecutionTime adds t to the "execution_time" field. func (m *RevisionMutation) AddExecutionTime(t time.Duration) { if m.addexecution_time != nil { *m.addexecution_time += t } else { m.addexecution_time = &t } } // AddedExecutionTime returns the value that was added to the "execution_time" field in this mutation. func (m *RevisionMutation) AddedExecutionTime() (r time.Duration, exists bool) { v := m.addexecution_time if v == nil { return } return *v, true } // ResetExecutionTime resets all changes to the "execution_time" field. func (m *RevisionMutation) ResetExecutionTime() { m.execution_time = nil m.addexecution_time = nil } // SetError sets the "error" field. func (m *RevisionMutation) SetError(s string) { m.error = &s } // Error returns the value of the "error" field in the mutation. func (m *RevisionMutation) Error() (r string, exists bool) { v := m.error if v == nil { return } return *v, true } // OldError returns the old "error" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldError(ctx context.Context) (v string, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldError is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldError requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldError: %w", err) } return oldValue.Error, nil } // ClearError clears the value of the "error" field. func (m *RevisionMutation) ClearError() { m.error = nil m.clearedFields[revision.FieldError] = struct{}{} } // ErrorCleared returns if the "error" field was cleared in this mutation. func (m *RevisionMutation) ErrorCleared() bool { _, ok := m.clearedFields[revision.FieldError] return ok } // ResetError resets all changes to the "error" field. func (m *RevisionMutation) ResetError() { m.error = nil delete(m.clearedFields, revision.FieldError) } // SetHash sets the "hash" field. func (m *RevisionMutation) SetHash(s string) { m.hash = &s } // Hash returns the value of the "hash" field in the mutation. func (m *RevisionMutation) Hash() (r string, exists bool) { v := m.hash if v == nil { return } return *v, true } // OldHash returns the old "hash" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldHash(ctx context.Context) (v string, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldHash is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldHash requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldHash: %w", err) } return oldValue.Hash, nil } // ResetHash resets all changes to the "hash" field. func (m *RevisionMutation) ResetHash() { m.hash = nil } // SetPartialHashes sets the "partial_hashes" field. func (m *RevisionMutation) SetPartialHashes(s []string) { m.partial_hashes = &s } // PartialHashes returns the value of the "partial_hashes" field in the mutation. func (m *RevisionMutation) PartialHashes() (r []string, exists bool) { v := m.partial_hashes if v == nil { return } return *v, true } // OldPartialHashes returns the old "partial_hashes" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldPartialHashes(ctx context.Context) (v []string, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldPartialHashes is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldPartialHashes requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldPartialHashes: %w", err) } return oldValue.PartialHashes, nil } // ClearPartialHashes clears the value of the "partial_hashes" field. func (m *RevisionMutation) ClearPartialHashes() { m.partial_hashes = nil m.clearedFields[revision.FieldPartialHashes] = struct{}{} } // PartialHashesCleared returns if the "partial_hashes" field was cleared in this mutation. func (m *RevisionMutation) PartialHashesCleared() bool { _, ok := m.clearedFields[revision.FieldPartialHashes] return ok } // ResetPartialHashes resets all changes to the "partial_hashes" field. func (m *RevisionMutation) ResetPartialHashes() { m.partial_hashes = nil delete(m.clearedFields, revision.FieldPartialHashes) } // SetOperatorVersion sets the "operator_version" field. func (m *RevisionMutation) SetOperatorVersion(s string) { m.operator_version = &s } // OperatorVersion returns the value of the "operator_version" field in the mutation. func (m *RevisionMutation) OperatorVersion() (r string, exists bool) { v := m.operator_version if v == nil { return } return *v, true } // OldOperatorVersion returns the old "operator_version" field's value of the Revision entity. // If the Revision object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. func (m *RevisionMutation) OldOperatorVersion(ctx context.Context) (v string, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldOperatorVersion is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { return v, errors.New("OldOperatorVersion requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { return v, fmt.Errorf("querying old value for OldOperatorVersion: %w", err) } return oldValue.OperatorVersion, nil } // ResetOperatorVersion resets all changes to the "operator_version" field. func (m *RevisionMutation) ResetOperatorVersion() { m.operator_version = nil } // Where appends a list predicates to the RevisionMutation builder. func (m *RevisionMutation) Where(ps ...predicate.Revision) { m.predicates = append(m.predicates, ps...) } // Op returns the operation name. func (m *RevisionMutation) Op() Op { return m.op } // Type returns the node type of this mutation (Revision). func (m *RevisionMutation) Type() string { return m.typ } // Fields returns all fields that were changed during this mutation. Note that in // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *RevisionMutation) Fields() []string { fields := make([]string, 0, 10) if m.description != nil { fields = append(fields, revision.FieldDescription) } if m._type != nil { fields = append(fields, revision.FieldType) } if m.applied != nil { fields = append(fields, revision.FieldApplied) } if m.total != nil { fields = append(fields, revision.FieldTotal) } if m.executed_at != nil { fields = append(fields, revision.FieldExecutedAt) } if m.execution_time != nil { fields = append(fields, revision.FieldExecutionTime) } if m.error != nil { fields = append(fields, revision.FieldError) } if m.hash != nil { fields = append(fields, revision.FieldHash) } if m.partial_hashes != nil { fields = append(fields, revision.FieldPartialHashes) } if m.operator_version != nil { fields = append(fields, revision.FieldOperatorVersion) } return fields } // Field returns the value of a field with the given name. The second boolean // return value indicates that this field was not set, or was not defined in the // schema. func (m *RevisionMutation) Field(name string) (ent.Value, bool) { switch name { case revision.FieldDescription: return m.Description() case revision.FieldType: return m.GetType() case revision.FieldApplied: return m.Applied() case revision.FieldTotal: return m.Total() case revision.FieldExecutedAt: return m.ExecutedAt() case revision.FieldExecutionTime: return m.ExecutionTime() case revision.FieldError: return m.Error() case revision.FieldHash: return m.Hash() case revision.FieldPartialHashes: return m.PartialHashes() case revision.FieldOperatorVersion: return m.OperatorVersion() } return nil, false } // OldField returns the old value of the field from the database. An error is // returned if the mutation operation is not UpdateOne, or the query to the // database failed. func (m *RevisionMutation) OldField(ctx context.Context, name string) (ent.Value, error) { switch name { case revision.FieldDescription: return m.OldDescription(ctx) case revision.FieldType: return m.OldType(ctx) case revision.FieldApplied: return m.OldApplied(ctx) case revision.FieldTotal: return m.OldTotal(ctx) case revision.FieldExecutedAt: return m.OldExecutedAt(ctx) case revision.FieldExecutionTime: return m.OldExecutionTime(ctx) case revision.FieldError: return m.OldError(ctx) case revision.FieldHash: return m.OldHash(ctx) case revision.FieldPartialHashes: return m.OldPartialHashes(ctx) case revision.FieldOperatorVersion: return m.OldOperatorVersion(ctx) } return nil, fmt.Errorf("unknown Revision field %s", name) } // SetField sets the value of a field with the given name. It returns an error if // the field is not defined in the schema, or if the type mismatched the field // type. func (m *RevisionMutation) SetField(name string, value ent.Value) error { switch name { case revision.FieldDescription: v, ok := value.(string) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetDescription(v) return nil case revision.FieldType: v, ok := value.(migrate.RevisionType) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetType(v) return nil case revision.FieldApplied: v, ok := value.(int) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetApplied(v) return nil case revision.FieldTotal: v, ok := value.(int) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetTotal(v) return nil case revision.FieldExecutedAt: v, ok := value.(time.Time) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetExecutedAt(v) return nil case revision.FieldExecutionTime: v, ok := value.(time.Duration) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetExecutionTime(v) return nil case revision.FieldError: v, ok := value.(string) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetError(v) return nil case revision.FieldHash: v, ok := value.(string) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetHash(v) return nil case revision.FieldPartialHashes: v, ok := value.([]string) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetPartialHashes(v) return nil case revision.FieldOperatorVersion: v, ok := value.(string) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetOperatorVersion(v) return nil } return fmt.Errorf("unknown Revision field %s", name) } // AddedFields returns all numeric fields that were incremented/decremented during // this mutation. func (m *RevisionMutation) AddedFields() []string { var fields []string if m.add_type != nil { fields = append(fields, revision.FieldType) } if m.addapplied != nil { fields = append(fields, revision.FieldApplied) } if m.addtotal != nil { fields = append(fields, revision.FieldTotal) } if m.addexecution_time != nil { fields = append(fields, revision.FieldExecutionTime) } return fields } // AddedField returns the numeric value that was incremented/decremented on a field // with the given name. The second boolean return value indicates that this field // was not set, or was not defined in the schema. func (m *RevisionMutation) AddedField(name string) (ent.Value, bool) { switch name { case revision.FieldType: return m.AddedType() case revision.FieldApplied: return m.AddedApplied() case revision.FieldTotal: return m.AddedTotal() case revision.FieldExecutionTime: return m.AddedExecutionTime() } return nil, false } // AddField adds the value to the field with the given name. It returns an error if // the field is not defined in the schema, or if the type mismatched the field // type. func (m *RevisionMutation) AddField(name string, value ent.Value) error { switch name { case revision.FieldType: v, ok := value.(migrate.RevisionType) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.AddType(v) return nil case revision.FieldApplied: v, ok := value.(int) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.AddApplied(v) return nil case revision.FieldTotal: v, ok := value.(int) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.AddTotal(v) return nil case revision.FieldExecutionTime: v, ok := value.(time.Duration) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.AddExecutionTime(v) return nil } return fmt.Errorf("unknown Revision numeric field %s", name) } // ClearedFields returns all nullable fields that were cleared during this // mutation. func (m *RevisionMutation) ClearedFields() []string { var fields []string if m.FieldCleared(revision.FieldError) { fields = append(fields, revision.FieldError) } if m.FieldCleared(revision.FieldPartialHashes) { fields = append(fields, revision.FieldPartialHashes) } return fields } // FieldCleared returns a boolean indicating if a field with the given name was // cleared in this mutation. func (m *RevisionMutation) FieldCleared(name string) bool { _, ok := m.clearedFields[name] return ok } // ClearField clears the value of the field with the given name. It returns an // error if the field is not defined in the schema. func (m *RevisionMutation) ClearField(name string) error { switch name { case revision.FieldError: m.ClearError() return nil case revision.FieldPartialHashes: m.ClearPartialHashes() return nil } return fmt.Errorf("unknown Revision nullable field %s", name) } // ResetField resets all changes in the mutation for the field with the given name. // It returns an error if the field is not defined in the schema. func (m *RevisionMutation) ResetField(name string) error { switch name { case revision.FieldDescription: m.ResetDescription() return nil case revision.FieldType: m.ResetType() return nil case revision.FieldApplied: m.ResetApplied() return nil case revision.FieldTotal: m.ResetTotal() return nil case revision.FieldExecutedAt: m.ResetExecutedAt() return nil case revision.FieldExecutionTime: m.ResetExecutionTime() return nil case revision.FieldError: m.ResetError() return nil case revision.FieldHash: m.ResetHash() return nil case revision.FieldPartialHashes: m.ResetPartialHashes() return nil case revision.FieldOperatorVersion: m.ResetOperatorVersion() return nil } return fmt.Errorf("unknown Revision field %s", name) } // AddedEdges returns all edge names that were set/added in this mutation. func (m *RevisionMutation) AddedEdges() []string { edges := make([]string, 0, 0) return edges } // AddedIDs returns all IDs (to other nodes) that were added for the given edge // name in this mutation. func (m *RevisionMutation) AddedIDs(name string) []ent.Value { return nil } // RemovedEdges returns all edge names that were removed in this mutation. func (m *RevisionMutation) RemovedEdges() []string { edges := make([]string, 0, 0) return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *RevisionMutation) RemovedIDs(name string) []ent.Value { return nil } // ClearedEdges returns all edge names that were cleared in this mutation. func (m *RevisionMutation) ClearedEdges() []string { edges := make([]string, 0, 0) return edges } // EdgeCleared returns a boolean which indicates if the edge with the given name // was cleared in this mutation. func (m *RevisionMutation) EdgeCleared(name string) bool { return false } // ClearEdge clears the value of the edge with the given name. It returns an error // if that edge is not defined in the schema. func (m *RevisionMutation) ClearEdge(name string) error { return fmt.Errorf("unknown Revision unique edge %s", name) } // ResetEdge resets all changes to the edge with the given name in this mutation. // It returns an error if the edge is not defined in the schema. func (m *RevisionMutation) ResetEdge(name string) error { return fmt.Errorf("unknown Revision edge %s", name) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/predicate/000077500000000000000000000000001431455511600224725ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/predicate/predicate.go000066400000000000000000000006131431455511600247610ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package predicate import ( "entgo.io/ent/dialect/sql" ) // Revision is the predicate function for revision builders. type Revision func(*sql.Selector) atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision.go000066400000000000000000000165711431455511600227310ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "encoding/json" "fmt" "strings" "time" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/migrate" "entgo.io/ent/dialect/sql" ) // Revision is the model entity for the Revision schema. type Revision struct { config `json:"-"` // ID of the ent. ID string `json:"id,omitempty"` // Description holds the value of the "description" field. Description string `json:"description,omitempty"` // Type holds the value of the "type" field. Type migrate.RevisionType `json:"type,omitempty"` // Applied holds the value of the "applied" field. Applied int `json:"applied,omitempty"` // Total holds the value of the "total" field. Total int `json:"total,omitempty"` // ExecutedAt holds the value of the "executed_at" field. ExecutedAt time.Time `json:"executed_at,omitempty"` // ExecutionTime holds the value of the "execution_time" field. ExecutionTime time.Duration `json:"execution_time,omitempty"` // Error holds the value of the "error" field. Error string `json:"error,omitempty"` // Hash holds the value of the "hash" field. Hash string `json:"hash,omitempty"` // PartialHashes holds the value of the "partial_hashes" field. PartialHashes []string `json:"partial_hashes,omitempty"` // OperatorVersion holds the value of the "operator_version" field. OperatorVersion string `json:"operator_version,omitempty"` } // scanValues returns the types for scanning values from sql.Rows. func (*Revision) scanValues(columns []string) ([]interface{}, error) { values := make([]interface{}, len(columns)) for i := range columns { switch columns[i] { case revision.FieldPartialHashes: values[i] = new([]byte) case revision.FieldType, revision.FieldApplied, revision.FieldTotal, revision.FieldExecutionTime: values[i] = new(sql.NullInt64) case revision.FieldID, revision.FieldDescription, revision.FieldError, revision.FieldHash, revision.FieldOperatorVersion: values[i] = new(sql.NullString) case revision.FieldExecutedAt: values[i] = new(sql.NullTime) default: return nil, fmt.Errorf("unexpected column %q for type Revision", columns[i]) } } return values, nil } // assignValues assigns the values that were returned from sql.Rows (after scanning) // to the Revision fields. func (r *Revision) assignValues(columns []string, values []interface{}) error { if m, n := len(values), len(columns); m < n { return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) } for i := range columns { switch columns[i] { case revision.FieldID: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field id", values[i]) } else if value.Valid { r.ID = value.String } case revision.FieldDescription: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field description", values[i]) } else if value.Valid { r.Description = value.String } case revision.FieldType: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for field type", values[i]) } else if value.Valid { r.Type = migrate.RevisionType(value.Int64) } case revision.FieldApplied: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for field applied", values[i]) } else if value.Valid { r.Applied = int(value.Int64) } case revision.FieldTotal: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for field total", values[i]) } else if value.Valid { r.Total = int(value.Int64) } case revision.FieldExecutedAt: if value, ok := values[i].(*sql.NullTime); !ok { return fmt.Errorf("unexpected type %T for field executed_at", values[i]) } else if value.Valid { r.ExecutedAt = value.Time } case revision.FieldExecutionTime: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for field execution_time", values[i]) } else if value.Valid { r.ExecutionTime = time.Duration(value.Int64) } case revision.FieldError: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field error", values[i]) } else if value.Valid { r.Error = value.String } case revision.FieldHash: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field hash", values[i]) } else if value.Valid { r.Hash = value.String } case revision.FieldPartialHashes: if value, ok := values[i].(*[]byte); !ok { return fmt.Errorf("unexpected type %T for field partial_hashes", values[i]) } else if value != nil && len(*value) > 0 { if err := json.Unmarshal(*value, &r.PartialHashes); err != nil { return fmt.Errorf("unmarshal field partial_hashes: %w", err) } } case revision.FieldOperatorVersion: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field operator_version", values[i]) } else if value.Valid { r.OperatorVersion = value.String } } } return nil } // Update returns a builder for updating this Revision. // Note that you need to call Revision.Unwrap() before calling this method if this Revision // was returned from a transaction, and the transaction was committed or rolled back. func (r *Revision) Update() *RevisionUpdateOne { return (&RevisionClient{config: r.config}).UpdateOne(r) } // Unwrap unwraps the Revision entity that was returned from a transaction after it was closed, // so that all future queries will be executed through the driver which created the transaction. func (r *Revision) Unwrap() *Revision { _tx, ok := r.config.driver.(*txDriver) if !ok { panic("ent: Revision is not a transactional entity") } r.config.driver = _tx.drv return r } // String implements the fmt.Stringer. func (r *Revision) String() string { var builder strings.Builder builder.WriteString("Revision(") builder.WriteString(fmt.Sprintf("id=%v, ", r.ID)) builder.WriteString("description=") builder.WriteString(r.Description) builder.WriteString(", ") builder.WriteString("type=") builder.WriteString(fmt.Sprintf("%v", r.Type)) builder.WriteString(", ") builder.WriteString("applied=") builder.WriteString(fmt.Sprintf("%v", r.Applied)) builder.WriteString(", ") builder.WriteString("total=") builder.WriteString(fmt.Sprintf("%v", r.Total)) builder.WriteString(", ") builder.WriteString("executed_at=") builder.WriteString(r.ExecutedAt.Format(time.ANSIC)) builder.WriteString(", ") builder.WriteString("execution_time=") builder.WriteString(fmt.Sprintf("%v", r.ExecutionTime)) builder.WriteString(", ") builder.WriteString("error=") builder.WriteString(r.Error) builder.WriteString(", ") builder.WriteString("hash=") builder.WriteString(r.Hash) builder.WriteString(", ") builder.WriteString("partial_hashes=") builder.WriteString(fmt.Sprintf("%v", r.PartialHashes)) builder.WriteString(", ") builder.WriteString("operator_version=") builder.WriteString(r.OperatorVersion) builder.WriteByte(')') return builder.String() } // Revisions is a parsable slice of Revision. type Revisions []*Revision func (r Revisions) config(cfg config) { for _i := range r { r[_i].config = cfg } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision/000077500000000000000000000000001431455511600223705ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision/revision.go000066400000000000000000000053101431455511600245540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package revision import ( "ariga.io/atlas/sql/migrate" ) const ( // Label holds the string label denoting the revision type in the database. Label = "revision" // FieldID holds the string denoting the id field in the database. FieldID = "version" // FieldDescription holds the string denoting the description field in the database. FieldDescription = "description" // FieldType holds the string denoting the type field in the database. FieldType = "type" // FieldApplied holds the string denoting the applied field in the database. FieldApplied = "applied" // FieldTotal holds the string denoting the total field in the database. FieldTotal = "total" // FieldExecutedAt holds the string denoting the executed_at field in the database. FieldExecutedAt = "executed_at" // FieldExecutionTime holds the string denoting the execution_time field in the database. FieldExecutionTime = "execution_time" // FieldError holds the string denoting the error field in the database. FieldError = "error" // FieldHash holds the string denoting the hash field in the database. FieldHash = "hash" // FieldPartialHashes holds the string denoting the partial_hashes field in the database. FieldPartialHashes = "partial_hashes" // FieldOperatorVersion holds the string denoting the operator_version field in the database. FieldOperatorVersion = "operator_version" // Table holds the table name of the revision in the database. Table = "atlas_schema_revisions" ) // Columns holds all SQL columns for revision fields. var Columns = []string{ FieldID, FieldDescription, FieldType, FieldApplied, FieldTotal, FieldExecutedAt, FieldExecutionTime, FieldError, FieldHash, FieldPartialHashes, FieldOperatorVersion, } // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { if column == Columns[i] { return true } } return false } var ( // DefaultType holds the default value on creation for the "type" field. DefaultType migrate.RevisionType // DefaultApplied holds the default value on creation for the "applied" field. DefaultApplied int // AppliedValidator is a validator for the "applied" field. It is called by the builders before save. AppliedValidator func(int) error // DefaultTotal holds the default value on creation for the "total" field. DefaultTotal int // TotalValidator is a validator for the "total" field. It is called by the builders before save. TotalValidator func(int) error ) atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision/where.go000066400000000000000000000757641431455511600240540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package revision import ( "time" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/predicate" "ariga.io/atlas/sql/migrate" "entgo.io/ent/dialect/sql" ) // ID filters vertices based on their ID field. func ID(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldID), id)) }) } // IDEQ applies the EQ predicate on the ID field. func IDEQ(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldID), id)) }) } // IDNEQ applies the NEQ predicate on the ID field. func IDNEQ(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldID), id)) }) } // IDIn applies the In predicate on the ID field. func IDIn(ids ...string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { v := make([]interface{}, len(ids)) for i := range v { v[i] = ids[i] } s.Where(sql.In(s.C(FieldID), v...)) }) } // IDNotIn applies the NotIn predicate on the ID field. func IDNotIn(ids ...string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { v := make([]interface{}, len(ids)) for i := range v { v[i] = ids[i] } s.Where(sql.NotIn(s.C(FieldID), v...)) }) } // IDGT applies the GT predicate on the ID field. func IDGT(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldID), id)) }) } // IDGTE applies the GTE predicate on the ID field. func IDGTE(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldID), id)) }) } // IDLT applies the LT predicate on the ID field. func IDLT(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldID), id)) }) } // IDLTE applies the LTE predicate on the ID field. func IDLTE(id string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldID), id)) }) } // Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. func Description(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldDescription), v)) }) } // Type applies equality check predicate on the "type" field. It's identical to TypeEQ. func Type(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldType), vc)) }) } // Applied applies equality check predicate on the "applied" field. It's identical to AppliedEQ. func Applied(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldApplied), v)) }) } // Total applies equality check predicate on the "total" field. It's identical to TotalEQ. func Total(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldTotal), v)) }) } // ExecutedAt applies equality check predicate on the "executed_at" field. It's identical to ExecutedAtEQ. func ExecutedAt(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldExecutedAt), v)) }) } // ExecutionTime applies equality check predicate on the "execution_time" field. It's identical to ExecutionTimeEQ. func ExecutionTime(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldExecutionTime), vc)) }) } // Error applies equality check predicate on the "error" field. It's identical to ErrorEQ. func Error(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldError), v)) }) } // Hash applies equality check predicate on the "hash" field. It's identical to HashEQ. func Hash(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldHash), v)) }) } // OperatorVersion applies equality check predicate on the "operator_version" field. It's identical to OperatorVersionEQ. func OperatorVersion(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldOperatorVersion), v)) }) } // DescriptionEQ applies the EQ predicate on the "description" field. func DescriptionEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldDescription), v)) }) } // DescriptionNEQ applies the NEQ predicate on the "description" field. func DescriptionNEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldDescription), v)) }) } // DescriptionIn applies the In predicate on the "description" field. func DescriptionIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldDescription), v...)) }) } // DescriptionNotIn applies the NotIn predicate on the "description" field. func DescriptionNotIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldDescription), v...)) }) } // DescriptionGT applies the GT predicate on the "description" field. func DescriptionGT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldDescription), v)) }) } // DescriptionGTE applies the GTE predicate on the "description" field. func DescriptionGTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldDescription), v)) }) } // DescriptionLT applies the LT predicate on the "description" field. func DescriptionLT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldDescription), v)) }) } // DescriptionLTE applies the LTE predicate on the "description" field. func DescriptionLTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldDescription), v)) }) } // DescriptionContains applies the Contains predicate on the "description" field. func DescriptionContains(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.Contains(s.C(FieldDescription), v)) }) } // DescriptionHasPrefix applies the HasPrefix predicate on the "description" field. func DescriptionHasPrefix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasPrefix(s.C(FieldDescription), v)) }) } // DescriptionHasSuffix applies the HasSuffix predicate on the "description" field. func DescriptionHasSuffix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasSuffix(s.C(FieldDescription), v)) }) } // DescriptionEqualFold applies the EqualFold predicate on the "description" field. func DescriptionEqualFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EqualFold(s.C(FieldDescription), v)) }) } // DescriptionContainsFold applies the ContainsFold predicate on the "description" field. func DescriptionContainsFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.ContainsFold(s.C(FieldDescription), v)) }) } // TypeEQ applies the EQ predicate on the "type" field. func TypeEQ(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldType), vc)) }) } // TypeNEQ applies the NEQ predicate on the "type" field. func TypeNEQ(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldType), vc)) }) } // TypeIn applies the In predicate on the "type" field. func TypeIn(vs ...migrate.RevisionType) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = uint(vs[i]) } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldType), v...)) }) } // TypeNotIn applies the NotIn predicate on the "type" field. func TypeNotIn(vs ...migrate.RevisionType) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = uint(vs[i]) } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldType), v...)) }) } // TypeGT applies the GT predicate on the "type" field. func TypeGT(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldType), vc)) }) } // TypeGTE applies the GTE predicate on the "type" field. func TypeGTE(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldType), vc)) }) } // TypeLT applies the LT predicate on the "type" field. func TypeLT(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldType), vc)) }) } // TypeLTE applies the LTE predicate on the "type" field. func TypeLTE(v migrate.RevisionType) predicate.Revision { vc := uint(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldType), vc)) }) } // AppliedEQ applies the EQ predicate on the "applied" field. func AppliedEQ(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldApplied), v)) }) } // AppliedNEQ applies the NEQ predicate on the "applied" field. func AppliedNEQ(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldApplied), v)) }) } // AppliedIn applies the In predicate on the "applied" field. func AppliedIn(vs ...int) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldApplied), v...)) }) } // AppliedNotIn applies the NotIn predicate on the "applied" field. func AppliedNotIn(vs ...int) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldApplied), v...)) }) } // AppliedGT applies the GT predicate on the "applied" field. func AppliedGT(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldApplied), v)) }) } // AppliedGTE applies the GTE predicate on the "applied" field. func AppliedGTE(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldApplied), v)) }) } // AppliedLT applies the LT predicate on the "applied" field. func AppliedLT(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldApplied), v)) }) } // AppliedLTE applies the LTE predicate on the "applied" field. func AppliedLTE(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldApplied), v)) }) } // TotalEQ applies the EQ predicate on the "total" field. func TotalEQ(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldTotal), v)) }) } // TotalNEQ applies the NEQ predicate on the "total" field. func TotalNEQ(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldTotal), v)) }) } // TotalIn applies the In predicate on the "total" field. func TotalIn(vs ...int) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldTotal), v...)) }) } // TotalNotIn applies the NotIn predicate on the "total" field. func TotalNotIn(vs ...int) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldTotal), v...)) }) } // TotalGT applies the GT predicate on the "total" field. func TotalGT(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldTotal), v)) }) } // TotalGTE applies the GTE predicate on the "total" field. func TotalGTE(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldTotal), v)) }) } // TotalLT applies the LT predicate on the "total" field. func TotalLT(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldTotal), v)) }) } // TotalLTE applies the LTE predicate on the "total" field. func TotalLTE(v int) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldTotal), v)) }) } // ExecutedAtEQ applies the EQ predicate on the "executed_at" field. func ExecutedAtEQ(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldExecutedAt), v)) }) } // ExecutedAtNEQ applies the NEQ predicate on the "executed_at" field. func ExecutedAtNEQ(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldExecutedAt), v)) }) } // ExecutedAtIn applies the In predicate on the "executed_at" field. func ExecutedAtIn(vs ...time.Time) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldExecutedAt), v...)) }) } // ExecutedAtNotIn applies the NotIn predicate on the "executed_at" field. func ExecutedAtNotIn(vs ...time.Time) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldExecutedAt), v...)) }) } // ExecutedAtGT applies the GT predicate on the "executed_at" field. func ExecutedAtGT(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldExecutedAt), v)) }) } // ExecutedAtGTE applies the GTE predicate on the "executed_at" field. func ExecutedAtGTE(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldExecutedAt), v)) }) } // ExecutedAtLT applies the LT predicate on the "executed_at" field. func ExecutedAtLT(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldExecutedAt), v)) }) } // ExecutedAtLTE applies the LTE predicate on the "executed_at" field. func ExecutedAtLTE(v time.Time) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldExecutedAt), v)) }) } // ExecutionTimeEQ applies the EQ predicate on the "execution_time" field. func ExecutionTimeEQ(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldExecutionTime), vc)) }) } // ExecutionTimeNEQ applies the NEQ predicate on the "execution_time" field. func ExecutionTimeNEQ(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldExecutionTime), vc)) }) } // ExecutionTimeIn applies the In predicate on the "execution_time" field. func ExecutionTimeIn(vs ...time.Duration) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = int64(vs[i]) } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldExecutionTime), v...)) }) } // ExecutionTimeNotIn applies the NotIn predicate on the "execution_time" field. func ExecutionTimeNotIn(vs ...time.Duration) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = int64(vs[i]) } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldExecutionTime), v...)) }) } // ExecutionTimeGT applies the GT predicate on the "execution_time" field. func ExecutionTimeGT(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldExecutionTime), vc)) }) } // ExecutionTimeGTE applies the GTE predicate on the "execution_time" field. func ExecutionTimeGTE(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldExecutionTime), vc)) }) } // ExecutionTimeLT applies the LT predicate on the "execution_time" field. func ExecutionTimeLT(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldExecutionTime), vc)) }) } // ExecutionTimeLTE applies the LTE predicate on the "execution_time" field. func ExecutionTimeLTE(v time.Duration) predicate.Revision { vc := int64(v) return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldExecutionTime), vc)) }) } // ErrorEQ applies the EQ predicate on the "error" field. func ErrorEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldError), v)) }) } // ErrorNEQ applies the NEQ predicate on the "error" field. func ErrorNEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldError), v)) }) } // ErrorIn applies the In predicate on the "error" field. func ErrorIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldError), v...)) }) } // ErrorNotIn applies the NotIn predicate on the "error" field. func ErrorNotIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldError), v...)) }) } // ErrorGT applies the GT predicate on the "error" field. func ErrorGT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldError), v)) }) } // ErrorGTE applies the GTE predicate on the "error" field. func ErrorGTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldError), v)) }) } // ErrorLT applies the LT predicate on the "error" field. func ErrorLT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldError), v)) }) } // ErrorLTE applies the LTE predicate on the "error" field. func ErrorLTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldError), v)) }) } // ErrorContains applies the Contains predicate on the "error" field. func ErrorContains(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.Contains(s.C(FieldError), v)) }) } // ErrorHasPrefix applies the HasPrefix predicate on the "error" field. func ErrorHasPrefix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasPrefix(s.C(FieldError), v)) }) } // ErrorHasSuffix applies the HasSuffix predicate on the "error" field. func ErrorHasSuffix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasSuffix(s.C(FieldError), v)) }) } // ErrorIsNil applies the IsNil predicate on the "error" field. func ErrorIsNil() predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.IsNull(s.C(FieldError))) }) } // ErrorNotNil applies the NotNil predicate on the "error" field. func ErrorNotNil() predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NotNull(s.C(FieldError))) }) } // ErrorEqualFold applies the EqualFold predicate on the "error" field. func ErrorEqualFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EqualFold(s.C(FieldError), v)) }) } // ErrorContainsFold applies the ContainsFold predicate on the "error" field. func ErrorContainsFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.ContainsFold(s.C(FieldError), v)) }) } // HashEQ applies the EQ predicate on the "hash" field. func HashEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldHash), v)) }) } // HashNEQ applies the NEQ predicate on the "hash" field. func HashNEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldHash), v)) }) } // HashIn applies the In predicate on the "hash" field. func HashIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldHash), v...)) }) } // HashNotIn applies the NotIn predicate on the "hash" field. func HashNotIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldHash), v...)) }) } // HashGT applies the GT predicate on the "hash" field. func HashGT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldHash), v)) }) } // HashGTE applies the GTE predicate on the "hash" field. func HashGTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldHash), v)) }) } // HashLT applies the LT predicate on the "hash" field. func HashLT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldHash), v)) }) } // HashLTE applies the LTE predicate on the "hash" field. func HashLTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldHash), v)) }) } // HashContains applies the Contains predicate on the "hash" field. func HashContains(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.Contains(s.C(FieldHash), v)) }) } // HashHasPrefix applies the HasPrefix predicate on the "hash" field. func HashHasPrefix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasPrefix(s.C(FieldHash), v)) }) } // HashHasSuffix applies the HasSuffix predicate on the "hash" field. func HashHasSuffix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasSuffix(s.C(FieldHash), v)) }) } // HashEqualFold applies the EqualFold predicate on the "hash" field. func HashEqualFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EqualFold(s.C(FieldHash), v)) }) } // HashContainsFold applies the ContainsFold predicate on the "hash" field. func HashContainsFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.ContainsFold(s.C(FieldHash), v)) }) } // PartialHashesIsNil applies the IsNil predicate on the "partial_hashes" field. func PartialHashesIsNil() predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.IsNull(s.C(FieldPartialHashes))) }) } // PartialHashesNotNil applies the NotNil predicate on the "partial_hashes" field. func PartialHashesNotNil() predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NotNull(s.C(FieldPartialHashes))) }) } // OperatorVersionEQ applies the EQ predicate on the "operator_version" field. func OperatorVersionEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionNEQ applies the NEQ predicate on the "operator_version" field. func OperatorVersionNEQ(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionIn applies the In predicate on the "operator_version" field. func OperatorVersionIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.In(s.C(FieldOperatorVersion), v...)) }) } // OperatorVersionNotIn applies the NotIn predicate on the "operator_version" field. func OperatorVersionNotIn(vs ...string) predicate.Revision { v := make([]interface{}, len(vs)) for i := range v { v[i] = vs[i] } return predicate.Revision(func(s *sql.Selector) { // if not arguments were provided, append the FALSE constants, // since we can't apply "IN ()". This will make this predicate falsy. if len(v) == 0 { s.Where(sql.False()) return } s.Where(sql.NotIn(s.C(FieldOperatorVersion), v...)) }) } // OperatorVersionGT applies the GT predicate on the "operator_version" field. func OperatorVersionGT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionGTE applies the GTE predicate on the "operator_version" field. func OperatorVersionGTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionLT applies the LT predicate on the "operator_version" field. func OperatorVersionLT(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionLTE applies the LTE predicate on the "operator_version" field. func OperatorVersionLTE(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionContains applies the Contains predicate on the "operator_version" field. func OperatorVersionContains(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.Contains(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionHasPrefix applies the HasPrefix predicate on the "operator_version" field. func OperatorVersionHasPrefix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasPrefix(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionHasSuffix applies the HasSuffix predicate on the "operator_version" field. func OperatorVersionHasSuffix(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.HasSuffix(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionEqualFold applies the EqualFold predicate on the "operator_version" field. func OperatorVersionEqualFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.EqualFold(s.C(FieldOperatorVersion), v)) }) } // OperatorVersionContainsFold applies the ContainsFold predicate on the "operator_version" field. func OperatorVersionContainsFold(v string) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s.Where(sql.ContainsFold(s.C(FieldOperatorVersion), v)) }) } // And groups predicates with the AND operator between them. func And(predicates ...predicate.Revision) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s1 := s.Clone().SetP(nil) for _, p := range predicates { p(s1) } s.Where(s1.P()) }) } // Or groups predicates with the OR operator between them. func Or(predicates ...predicate.Revision) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { s1 := s.Clone().SetP(nil) for i, p := range predicates { if i > 0 { s1.Or() } p(s1) } s.Where(s1.P()) }) } // Not applies the not operator on the given predicate. func Not(p predicate.Revision) predicate.Revision { return predicate.Revision(func(s *sql.Selector) { p(s.Not()) }) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision_create.go000066400000000000000000001061051431455511600242450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "errors" "fmt" "time" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/migrate" "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" ) // RevisionCreate is the builder for creating a Revision entity. type RevisionCreate struct { config mutation *RevisionMutation hooks []Hook conflict []sql.ConflictOption } // SetDescription sets the "description" field. func (rc *RevisionCreate) SetDescription(s string) *RevisionCreate { rc.mutation.SetDescription(s) return rc } // SetType sets the "type" field. func (rc *RevisionCreate) SetType(mt migrate.RevisionType) *RevisionCreate { rc.mutation.SetType(mt) return rc } // SetNillableType sets the "type" field if the given value is not nil. func (rc *RevisionCreate) SetNillableType(mt *migrate.RevisionType) *RevisionCreate { if mt != nil { rc.SetType(*mt) } return rc } // SetApplied sets the "applied" field. func (rc *RevisionCreate) SetApplied(i int) *RevisionCreate { rc.mutation.SetApplied(i) return rc } // SetNillableApplied sets the "applied" field if the given value is not nil. func (rc *RevisionCreate) SetNillableApplied(i *int) *RevisionCreate { if i != nil { rc.SetApplied(*i) } return rc } // SetTotal sets the "total" field. func (rc *RevisionCreate) SetTotal(i int) *RevisionCreate { rc.mutation.SetTotal(i) return rc } // SetNillableTotal sets the "total" field if the given value is not nil. func (rc *RevisionCreate) SetNillableTotal(i *int) *RevisionCreate { if i != nil { rc.SetTotal(*i) } return rc } // SetExecutedAt sets the "executed_at" field. func (rc *RevisionCreate) SetExecutedAt(t time.Time) *RevisionCreate { rc.mutation.SetExecutedAt(t) return rc } // SetExecutionTime sets the "execution_time" field. func (rc *RevisionCreate) SetExecutionTime(t time.Duration) *RevisionCreate { rc.mutation.SetExecutionTime(t) return rc } // SetError sets the "error" field. func (rc *RevisionCreate) SetError(s string) *RevisionCreate { rc.mutation.SetError(s) return rc } // SetNillableError sets the "error" field if the given value is not nil. func (rc *RevisionCreate) SetNillableError(s *string) *RevisionCreate { if s != nil { rc.SetError(*s) } return rc } // SetHash sets the "hash" field. func (rc *RevisionCreate) SetHash(s string) *RevisionCreate { rc.mutation.SetHash(s) return rc } // SetPartialHashes sets the "partial_hashes" field. func (rc *RevisionCreate) SetPartialHashes(s []string) *RevisionCreate { rc.mutation.SetPartialHashes(s) return rc } // SetOperatorVersion sets the "operator_version" field. func (rc *RevisionCreate) SetOperatorVersion(s string) *RevisionCreate { rc.mutation.SetOperatorVersion(s) return rc } // SetID sets the "id" field. func (rc *RevisionCreate) SetID(s string) *RevisionCreate { rc.mutation.SetID(s) return rc } // Mutation returns the RevisionMutation object of the builder. func (rc *RevisionCreate) Mutation() *RevisionMutation { return rc.mutation } // Save creates the Revision in the database. func (rc *RevisionCreate) Save(ctx context.Context) (*Revision, error) { var ( err error node *Revision ) rc.defaults() if len(rc.hooks) == 0 { if err = rc.check(); err != nil { return nil, err } node, err = rc.sqlSave(ctx) } else { var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { mutation, ok := m.(*RevisionMutation) if !ok { return nil, fmt.Errorf("unexpected mutation type %T", m) } if err = rc.check(); err != nil { return nil, err } rc.mutation = mutation if node, err = rc.sqlSave(ctx); err != nil { return nil, err } mutation.id = &node.ID mutation.done = true return node, err }) for i := len(rc.hooks) - 1; i >= 0; i-- { if rc.hooks[i] == nil { return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") } mut = rc.hooks[i](mut) } v, err := mut.Mutate(ctx, rc.mutation) if err != nil { return nil, err } nv, ok := v.(*Revision) if !ok { return nil, fmt.Errorf("unexpected node type %T returned from RevisionMutation", v) } node = nv } return node, err } // SaveX calls Save and panics if Save returns an error. func (rc *RevisionCreate) SaveX(ctx context.Context) *Revision { v, err := rc.Save(ctx) if err != nil { panic(err) } return v } // Exec executes the query. func (rc *RevisionCreate) Exec(ctx context.Context) error { _, err := rc.Save(ctx) return err } // ExecX is like Exec, but panics if an error occurs. func (rc *RevisionCreate) ExecX(ctx context.Context) { if err := rc.Exec(ctx); err != nil { panic(err) } } // defaults sets the default values of the builder before save. func (rc *RevisionCreate) defaults() { if _, ok := rc.mutation.GetType(); !ok { v := revision.DefaultType rc.mutation.SetType(v) } if _, ok := rc.mutation.Applied(); !ok { v := revision.DefaultApplied rc.mutation.SetApplied(v) } if _, ok := rc.mutation.Total(); !ok { v := revision.DefaultTotal rc.mutation.SetTotal(v) } } // check runs all checks and user-defined validators on the builder. func (rc *RevisionCreate) check() error { if _, ok := rc.mutation.Description(); !ok { return &ValidationError{Name: "description", err: errors.New(`ent: missing required field "Revision.description"`)} } if _, ok := rc.mutation.GetType(); !ok { return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "Revision.type"`)} } if _, ok := rc.mutation.Applied(); !ok { return &ValidationError{Name: "applied", err: errors.New(`ent: missing required field "Revision.applied"`)} } if v, ok := rc.mutation.Applied(); ok { if err := revision.AppliedValidator(v); err != nil { return &ValidationError{Name: "applied", err: fmt.Errorf(`ent: validator failed for field "Revision.applied": %w`, err)} } } if _, ok := rc.mutation.Total(); !ok { return &ValidationError{Name: "total", err: errors.New(`ent: missing required field "Revision.total"`)} } if v, ok := rc.mutation.Total(); ok { if err := revision.TotalValidator(v); err != nil { return &ValidationError{Name: "total", err: fmt.Errorf(`ent: validator failed for field "Revision.total": %w`, err)} } } if _, ok := rc.mutation.ExecutedAt(); !ok { return &ValidationError{Name: "executed_at", err: errors.New(`ent: missing required field "Revision.executed_at"`)} } if _, ok := rc.mutation.ExecutionTime(); !ok { return &ValidationError{Name: "execution_time", err: errors.New(`ent: missing required field "Revision.execution_time"`)} } if _, ok := rc.mutation.Hash(); !ok { return &ValidationError{Name: "hash", err: errors.New(`ent: missing required field "Revision.hash"`)} } if _, ok := rc.mutation.OperatorVersion(); !ok { return &ValidationError{Name: "operator_version", err: errors.New(`ent: missing required field "Revision.operator_version"`)} } return nil } func (rc *RevisionCreate) sqlSave(ctx context.Context) (*Revision, error) { _node, _spec := rc.createSpec() if err := sqlgraph.CreateNode(ctx, rc.driver, _spec); err != nil { if sqlgraph.IsConstraintError(err) { err = &ConstraintError{msg: err.Error(), wrap: err} } return nil, err } if _spec.ID.Value != nil { if id, ok := _spec.ID.Value.(string); ok { _node.ID = id } else { return nil, fmt.Errorf("unexpected Revision.ID type: %T", _spec.ID.Value) } } return _node, nil } func (rc *RevisionCreate) createSpec() (*Revision, *sqlgraph.CreateSpec) { var ( _node = &Revision{config: rc.config} _spec = &sqlgraph.CreateSpec{ Table: revision.Table, ID: &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldID, }, } ) _spec.Schema = rc.schemaConfig.Revision _spec.OnConflict = rc.conflict if id, ok := rc.mutation.ID(); ok { _node.ID = id _spec.ID.Value = id } if value, ok := rc.mutation.Description(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldDescription, }) _node.Description = value } if value, ok := rc.mutation.GetType(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeUint, Value: value, Column: revision.FieldType, }) _node.Type = value } if value, ok := rc.mutation.Applied(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldApplied, }) _node.Applied = value } if value, ok := rc.mutation.Total(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldTotal, }) _node.Total = value } if value, ok := rc.mutation.ExecutedAt(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeTime, Value: value, Column: revision.FieldExecutedAt, }) _node.ExecutedAt = value } if value, ok := rc.mutation.ExecutionTime(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeInt64, Value: value, Column: revision.FieldExecutionTime, }) _node.ExecutionTime = value } if value, ok := rc.mutation.Error(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldError, }) _node.Error = value } if value, ok := rc.mutation.Hash(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldHash, }) _node.Hash = value } if value, ok := rc.mutation.PartialHashes(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeJSON, Value: value, Column: revision.FieldPartialHashes, }) _node.PartialHashes = value } if value, ok := rc.mutation.OperatorVersion(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldOperatorVersion, }) _node.OperatorVersion = value } return _node, _spec } // OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause // of the `INSERT` statement. For example: // // client.Revision.Create(). // SetDescription(v). // OnConflict( // // Update the row with the new values // // the was proposed for insertion. // sql.ResolveWithNewValues(), // ). // // Override some of the fields with custom // // update values. // Update(func(u *ent.RevisionUpsert) { // SetDescription(v+v). // }). // Exec(ctx) func (rc *RevisionCreate) OnConflict(opts ...sql.ConflictOption) *RevisionUpsertOne { rc.conflict = opts return &RevisionUpsertOne{ create: rc, } } // OnConflictColumns calls `OnConflict` and configures the columns // as conflict target. Using this option is equivalent to using: // // client.Revision.Create(). // OnConflict(sql.ConflictColumns(columns...)). // Exec(ctx) func (rc *RevisionCreate) OnConflictColumns(columns ...string) *RevisionUpsertOne { rc.conflict = append(rc.conflict, sql.ConflictColumns(columns...)) return &RevisionUpsertOne{ create: rc, } } type ( // RevisionUpsertOne is the builder for "upsert"-ing // one Revision node. RevisionUpsertOne struct { create *RevisionCreate } // RevisionUpsert is the "OnConflict" setter. RevisionUpsert struct { *sql.UpdateSet } ) // SetDescription sets the "description" field. func (u *RevisionUpsert) SetDescription(v string) *RevisionUpsert { u.Set(revision.FieldDescription, v) return u } // UpdateDescription sets the "description" field to the value that was provided on create. func (u *RevisionUpsert) UpdateDescription() *RevisionUpsert { u.SetExcluded(revision.FieldDescription) return u } // SetType sets the "type" field. func (u *RevisionUpsert) SetType(v migrate.RevisionType) *RevisionUpsert { u.Set(revision.FieldType, v) return u } // UpdateType sets the "type" field to the value that was provided on create. func (u *RevisionUpsert) UpdateType() *RevisionUpsert { u.SetExcluded(revision.FieldType) return u } // AddType adds v to the "type" field. func (u *RevisionUpsert) AddType(v migrate.RevisionType) *RevisionUpsert { u.Add(revision.FieldType, v) return u } // SetApplied sets the "applied" field. func (u *RevisionUpsert) SetApplied(v int) *RevisionUpsert { u.Set(revision.FieldApplied, v) return u } // UpdateApplied sets the "applied" field to the value that was provided on create. func (u *RevisionUpsert) UpdateApplied() *RevisionUpsert { u.SetExcluded(revision.FieldApplied) return u } // AddApplied adds v to the "applied" field. func (u *RevisionUpsert) AddApplied(v int) *RevisionUpsert { u.Add(revision.FieldApplied, v) return u } // SetTotal sets the "total" field. func (u *RevisionUpsert) SetTotal(v int) *RevisionUpsert { u.Set(revision.FieldTotal, v) return u } // UpdateTotal sets the "total" field to the value that was provided on create. func (u *RevisionUpsert) UpdateTotal() *RevisionUpsert { u.SetExcluded(revision.FieldTotal) return u } // AddTotal adds v to the "total" field. func (u *RevisionUpsert) AddTotal(v int) *RevisionUpsert { u.Add(revision.FieldTotal, v) return u } // SetExecutedAt sets the "executed_at" field. func (u *RevisionUpsert) SetExecutedAt(v time.Time) *RevisionUpsert { u.Set(revision.FieldExecutedAt, v) return u } // UpdateExecutedAt sets the "executed_at" field to the value that was provided on create. func (u *RevisionUpsert) UpdateExecutedAt() *RevisionUpsert { u.SetExcluded(revision.FieldExecutedAt) return u } // SetExecutionTime sets the "execution_time" field. func (u *RevisionUpsert) SetExecutionTime(v time.Duration) *RevisionUpsert { u.Set(revision.FieldExecutionTime, v) return u } // UpdateExecutionTime sets the "execution_time" field to the value that was provided on create. func (u *RevisionUpsert) UpdateExecutionTime() *RevisionUpsert { u.SetExcluded(revision.FieldExecutionTime) return u } // AddExecutionTime adds v to the "execution_time" field. func (u *RevisionUpsert) AddExecutionTime(v time.Duration) *RevisionUpsert { u.Add(revision.FieldExecutionTime, v) return u } // SetError sets the "error" field. func (u *RevisionUpsert) SetError(v string) *RevisionUpsert { u.Set(revision.FieldError, v) return u } // UpdateError sets the "error" field to the value that was provided on create. func (u *RevisionUpsert) UpdateError() *RevisionUpsert { u.SetExcluded(revision.FieldError) return u } // ClearError clears the value of the "error" field. func (u *RevisionUpsert) ClearError() *RevisionUpsert { u.SetNull(revision.FieldError) return u } // SetHash sets the "hash" field. func (u *RevisionUpsert) SetHash(v string) *RevisionUpsert { u.Set(revision.FieldHash, v) return u } // UpdateHash sets the "hash" field to the value that was provided on create. func (u *RevisionUpsert) UpdateHash() *RevisionUpsert { u.SetExcluded(revision.FieldHash) return u } // SetPartialHashes sets the "partial_hashes" field. func (u *RevisionUpsert) SetPartialHashes(v []string) *RevisionUpsert { u.Set(revision.FieldPartialHashes, v) return u } // UpdatePartialHashes sets the "partial_hashes" field to the value that was provided on create. func (u *RevisionUpsert) UpdatePartialHashes() *RevisionUpsert { u.SetExcluded(revision.FieldPartialHashes) return u } // ClearPartialHashes clears the value of the "partial_hashes" field. func (u *RevisionUpsert) ClearPartialHashes() *RevisionUpsert { u.SetNull(revision.FieldPartialHashes) return u } // SetOperatorVersion sets the "operator_version" field. func (u *RevisionUpsert) SetOperatorVersion(v string) *RevisionUpsert { u.Set(revision.FieldOperatorVersion, v) return u } // UpdateOperatorVersion sets the "operator_version" field to the value that was provided on create. func (u *RevisionUpsert) UpdateOperatorVersion() *RevisionUpsert { u.SetExcluded(revision.FieldOperatorVersion) return u } // UpdateNewValues updates the mutable fields using the new values that were set on create except the ID field. // Using this option is equivalent to using: // // client.Revision.Create(). // OnConflict( // sql.ResolveWithNewValues(), // sql.ResolveWith(func(u *sql.UpdateSet) { // u.SetIgnore(revision.FieldID) // }), // ). // Exec(ctx) func (u *RevisionUpsertOne) UpdateNewValues() *RevisionUpsertOne { u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues()) u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) { if _, exists := u.create.mutation.ID(); exists { s.SetIgnore(revision.FieldID) } if _, exists := u.create.mutation.Description(); exists { s.SetIgnore(revision.FieldDescription) } if _, exists := u.create.mutation.ExecutedAt(); exists { s.SetIgnore(revision.FieldExecutedAt) } })) return u } // Ignore sets each column to itself in case of conflict. // Using this option is equivalent to using: // // client.Revision.Create(). // OnConflict(sql.ResolveWithIgnore()). // Exec(ctx) func (u *RevisionUpsertOne) Ignore() *RevisionUpsertOne { u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore()) return u } // DoNothing configures the conflict_action to `DO NOTHING`. // Supported only by SQLite and PostgreSQL. func (u *RevisionUpsertOne) DoNothing() *RevisionUpsertOne { u.create.conflict = append(u.create.conflict, sql.DoNothing()) return u } // Update allows overriding fields `UPDATE` values. See the RevisionCreate.OnConflict // documentation for more info. func (u *RevisionUpsertOne) Update(set func(*RevisionUpsert)) *RevisionUpsertOne { u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) { set(&RevisionUpsert{UpdateSet: update}) })) return u } // SetDescription sets the "description" field. func (u *RevisionUpsertOne) SetDescription(v string) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetDescription(v) }) } // UpdateDescription sets the "description" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateDescription() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateDescription() }) } // SetType sets the "type" field. func (u *RevisionUpsertOne) SetType(v migrate.RevisionType) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetType(v) }) } // AddType adds v to the "type" field. func (u *RevisionUpsertOne) AddType(v migrate.RevisionType) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.AddType(v) }) } // UpdateType sets the "type" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateType() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateType() }) } // SetApplied sets the "applied" field. func (u *RevisionUpsertOne) SetApplied(v int) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetApplied(v) }) } // AddApplied adds v to the "applied" field. func (u *RevisionUpsertOne) AddApplied(v int) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.AddApplied(v) }) } // UpdateApplied sets the "applied" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateApplied() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateApplied() }) } // SetTotal sets the "total" field. func (u *RevisionUpsertOne) SetTotal(v int) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetTotal(v) }) } // AddTotal adds v to the "total" field. func (u *RevisionUpsertOne) AddTotal(v int) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.AddTotal(v) }) } // UpdateTotal sets the "total" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateTotal() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateTotal() }) } // SetExecutedAt sets the "executed_at" field. func (u *RevisionUpsertOne) SetExecutedAt(v time.Time) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetExecutedAt(v) }) } // UpdateExecutedAt sets the "executed_at" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateExecutedAt() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateExecutedAt() }) } // SetExecutionTime sets the "execution_time" field. func (u *RevisionUpsertOne) SetExecutionTime(v time.Duration) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetExecutionTime(v) }) } // AddExecutionTime adds v to the "execution_time" field. func (u *RevisionUpsertOne) AddExecutionTime(v time.Duration) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.AddExecutionTime(v) }) } // UpdateExecutionTime sets the "execution_time" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateExecutionTime() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateExecutionTime() }) } // SetError sets the "error" field. func (u *RevisionUpsertOne) SetError(v string) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetError(v) }) } // UpdateError sets the "error" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateError() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateError() }) } // ClearError clears the value of the "error" field. func (u *RevisionUpsertOne) ClearError() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.ClearError() }) } // SetHash sets the "hash" field. func (u *RevisionUpsertOne) SetHash(v string) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetHash(v) }) } // UpdateHash sets the "hash" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateHash() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateHash() }) } // SetPartialHashes sets the "partial_hashes" field. func (u *RevisionUpsertOne) SetPartialHashes(v []string) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetPartialHashes(v) }) } // UpdatePartialHashes sets the "partial_hashes" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdatePartialHashes() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdatePartialHashes() }) } // ClearPartialHashes clears the value of the "partial_hashes" field. func (u *RevisionUpsertOne) ClearPartialHashes() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.ClearPartialHashes() }) } // SetOperatorVersion sets the "operator_version" field. func (u *RevisionUpsertOne) SetOperatorVersion(v string) *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.SetOperatorVersion(v) }) } // UpdateOperatorVersion sets the "operator_version" field to the value that was provided on create. func (u *RevisionUpsertOne) UpdateOperatorVersion() *RevisionUpsertOne { return u.Update(func(s *RevisionUpsert) { s.UpdateOperatorVersion() }) } // Exec executes the query. func (u *RevisionUpsertOne) Exec(ctx context.Context) error { if len(u.create.conflict) == 0 { return errors.New("ent: missing options for RevisionCreate.OnConflict") } return u.create.Exec(ctx) } // ExecX is like Exec, but panics if an error occurs. func (u *RevisionUpsertOne) ExecX(ctx context.Context) { if err := u.create.Exec(ctx); err != nil { panic(err) } } // Exec executes the UPSERT query and returns the inserted/updated ID. func (u *RevisionUpsertOne) ID(ctx context.Context) (id string, err error) { if u.create.driver.Dialect() == dialect.MySQL { // In case of "ON CONFLICT", there is no way to get back non-numeric ID // fields from the database since MySQL does not support the RETURNING clause. return id, errors.New("ent: RevisionUpsertOne.ID is not supported by MySQL driver. Use RevisionUpsertOne.Exec instead") } node, err := u.create.Save(ctx) if err != nil { return id, err } return node.ID, nil } // IDX is like ID, but panics if an error occurs. func (u *RevisionUpsertOne) IDX(ctx context.Context) string { id, err := u.ID(ctx) if err != nil { panic(err) } return id } // RevisionCreateBulk is the builder for creating many Revision entities in bulk. type RevisionCreateBulk struct { config builders []*RevisionCreate conflict []sql.ConflictOption } // Save creates the Revision entities in the database. func (rcb *RevisionCreateBulk) Save(ctx context.Context) ([]*Revision, error) { specs := make([]*sqlgraph.CreateSpec, len(rcb.builders)) nodes := make([]*Revision, len(rcb.builders)) mutators := make([]Mutator, len(rcb.builders)) for i := range rcb.builders { func(i int, root context.Context) { builder := rcb.builders[i] builder.defaults() var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { mutation, ok := m.(*RevisionMutation) if !ok { return nil, fmt.Errorf("unexpected mutation type %T", m) } if err := builder.check(); err != nil { return nil, err } builder.mutation = mutation nodes[i], specs[i] = builder.createSpec() var err error if i < len(mutators)-1 { _, err = mutators[i+1].Mutate(root, rcb.builders[i+1].mutation) } else { spec := &sqlgraph.BatchCreateSpec{Nodes: specs} spec.OnConflict = rcb.conflict // Invoke the actual operation on the latest mutation in the chain. if err = sqlgraph.BatchCreate(ctx, rcb.driver, spec); err != nil { if sqlgraph.IsConstraintError(err) { err = &ConstraintError{msg: err.Error(), wrap: err} } } } if err != nil { return nil, err } mutation.id = &nodes[i].ID mutation.done = true return nodes[i], nil }) for i := len(builder.hooks) - 1; i >= 0; i-- { mut = builder.hooks[i](mut) } mutators[i] = mut }(i, ctx) } if len(mutators) > 0 { if _, err := mutators[0].Mutate(ctx, rcb.builders[0].mutation); err != nil { return nil, err } } return nodes, nil } // SaveX is like Save, but panics if an error occurs. func (rcb *RevisionCreateBulk) SaveX(ctx context.Context) []*Revision { v, err := rcb.Save(ctx) if err != nil { panic(err) } return v } // Exec executes the query. func (rcb *RevisionCreateBulk) Exec(ctx context.Context) error { _, err := rcb.Save(ctx) return err } // ExecX is like Exec, but panics if an error occurs. func (rcb *RevisionCreateBulk) ExecX(ctx context.Context) { if err := rcb.Exec(ctx); err != nil { panic(err) } } // OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause // of the `INSERT` statement. For example: // // client.Revision.CreateBulk(builders...). // OnConflict( // // Update the row with the new values // // the was proposed for insertion. // sql.ResolveWithNewValues(), // ). // // Override some of the fields with custom // // update values. // Update(func(u *ent.RevisionUpsert) { // SetDescription(v+v). // }). // Exec(ctx) func (rcb *RevisionCreateBulk) OnConflict(opts ...sql.ConflictOption) *RevisionUpsertBulk { rcb.conflict = opts return &RevisionUpsertBulk{ create: rcb, } } // OnConflictColumns calls `OnConflict` and configures the columns // as conflict target. Using this option is equivalent to using: // // client.Revision.Create(). // OnConflict(sql.ConflictColumns(columns...)). // Exec(ctx) func (rcb *RevisionCreateBulk) OnConflictColumns(columns ...string) *RevisionUpsertBulk { rcb.conflict = append(rcb.conflict, sql.ConflictColumns(columns...)) return &RevisionUpsertBulk{ create: rcb, } } // RevisionUpsertBulk is the builder for "upsert"-ing // a bulk of Revision nodes. type RevisionUpsertBulk struct { create *RevisionCreateBulk } // UpdateNewValues updates the mutable fields using the new values that // were set on create. Using this option is equivalent to using: // // client.Revision.Create(). // OnConflict( // sql.ResolveWithNewValues(), // sql.ResolveWith(func(u *sql.UpdateSet) { // u.SetIgnore(revision.FieldID) // }), // ). // Exec(ctx) func (u *RevisionUpsertBulk) UpdateNewValues() *RevisionUpsertBulk { u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues()) u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) { for _, b := range u.create.builders { if _, exists := b.mutation.ID(); exists { s.SetIgnore(revision.FieldID) return } if _, exists := b.mutation.Description(); exists { s.SetIgnore(revision.FieldDescription) } if _, exists := b.mutation.ExecutedAt(); exists { s.SetIgnore(revision.FieldExecutedAt) } } })) return u } // Ignore sets each column to itself in case of conflict. // Using this option is equivalent to using: // // client.Revision.Create(). // OnConflict(sql.ResolveWithIgnore()). // Exec(ctx) func (u *RevisionUpsertBulk) Ignore() *RevisionUpsertBulk { u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore()) return u } // DoNothing configures the conflict_action to `DO NOTHING`. // Supported only by SQLite and PostgreSQL. func (u *RevisionUpsertBulk) DoNothing() *RevisionUpsertBulk { u.create.conflict = append(u.create.conflict, sql.DoNothing()) return u } // Update allows overriding fields `UPDATE` values. See the RevisionCreateBulk.OnConflict // documentation for more info. func (u *RevisionUpsertBulk) Update(set func(*RevisionUpsert)) *RevisionUpsertBulk { u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) { set(&RevisionUpsert{UpdateSet: update}) })) return u } // SetDescription sets the "description" field. func (u *RevisionUpsertBulk) SetDescription(v string) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetDescription(v) }) } // UpdateDescription sets the "description" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateDescription() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateDescription() }) } // SetType sets the "type" field. func (u *RevisionUpsertBulk) SetType(v migrate.RevisionType) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetType(v) }) } // AddType adds v to the "type" field. func (u *RevisionUpsertBulk) AddType(v migrate.RevisionType) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.AddType(v) }) } // UpdateType sets the "type" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateType() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateType() }) } // SetApplied sets the "applied" field. func (u *RevisionUpsertBulk) SetApplied(v int) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetApplied(v) }) } // AddApplied adds v to the "applied" field. func (u *RevisionUpsertBulk) AddApplied(v int) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.AddApplied(v) }) } // UpdateApplied sets the "applied" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateApplied() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateApplied() }) } // SetTotal sets the "total" field. func (u *RevisionUpsertBulk) SetTotal(v int) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetTotal(v) }) } // AddTotal adds v to the "total" field. func (u *RevisionUpsertBulk) AddTotal(v int) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.AddTotal(v) }) } // UpdateTotal sets the "total" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateTotal() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateTotal() }) } // SetExecutedAt sets the "executed_at" field. func (u *RevisionUpsertBulk) SetExecutedAt(v time.Time) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetExecutedAt(v) }) } // UpdateExecutedAt sets the "executed_at" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateExecutedAt() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateExecutedAt() }) } // SetExecutionTime sets the "execution_time" field. func (u *RevisionUpsertBulk) SetExecutionTime(v time.Duration) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetExecutionTime(v) }) } // AddExecutionTime adds v to the "execution_time" field. func (u *RevisionUpsertBulk) AddExecutionTime(v time.Duration) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.AddExecutionTime(v) }) } // UpdateExecutionTime sets the "execution_time" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateExecutionTime() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateExecutionTime() }) } // SetError sets the "error" field. func (u *RevisionUpsertBulk) SetError(v string) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetError(v) }) } // UpdateError sets the "error" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateError() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateError() }) } // ClearError clears the value of the "error" field. func (u *RevisionUpsertBulk) ClearError() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.ClearError() }) } // SetHash sets the "hash" field. func (u *RevisionUpsertBulk) SetHash(v string) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetHash(v) }) } // UpdateHash sets the "hash" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateHash() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateHash() }) } // SetPartialHashes sets the "partial_hashes" field. func (u *RevisionUpsertBulk) SetPartialHashes(v []string) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetPartialHashes(v) }) } // UpdatePartialHashes sets the "partial_hashes" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdatePartialHashes() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdatePartialHashes() }) } // ClearPartialHashes clears the value of the "partial_hashes" field. func (u *RevisionUpsertBulk) ClearPartialHashes() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.ClearPartialHashes() }) } // SetOperatorVersion sets the "operator_version" field. func (u *RevisionUpsertBulk) SetOperatorVersion(v string) *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.SetOperatorVersion(v) }) } // UpdateOperatorVersion sets the "operator_version" field to the value that was provided on create. func (u *RevisionUpsertBulk) UpdateOperatorVersion() *RevisionUpsertBulk { return u.Update(func(s *RevisionUpsert) { s.UpdateOperatorVersion() }) } // Exec executes the query. func (u *RevisionUpsertBulk) Exec(ctx context.Context) error { for i, b := range u.create.builders { if len(b.conflict) != 0 { return fmt.Errorf("ent: OnConflict was set for builder %d. Set it on the RevisionCreateBulk instead", i) } } if len(u.create.conflict) == 0 { return errors.New("ent: missing options for RevisionCreateBulk.OnConflict") } return u.create.Exec(ctx) } // ExecX is like Exec, but panics if an error occurs. func (u *RevisionUpsertBulk) ExecX(ctx context.Context) { if err := u.create.Exec(ctx); err != nil { panic(err) } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision_delete.go000066400000000000000000000061471431455511600242510ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "fmt" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/internal" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/predicate" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" ) // RevisionDelete is the builder for deleting a Revision entity. type RevisionDelete struct { config hooks []Hook mutation *RevisionMutation } // Where appends a list predicates to the RevisionDelete builder. func (rd *RevisionDelete) Where(ps ...predicate.Revision) *RevisionDelete { rd.mutation.Where(ps...) return rd } // Exec executes the deletion query and returns how many vertices were deleted. func (rd *RevisionDelete) Exec(ctx context.Context) (int, error) { var ( err error affected int ) if len(rd.hooks) == 0 { affected, err = rd.sqlExec(ctx) } else { var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { mutation, ok := m.(*RevisionMutation) if !ok { return nil, fmt.Errorf("unexpected mutation type %T", m) } rd.mutation = mutation affected, err = rd.sqlExec(ctx) mutation.done = true return affected, err }) for i := len(rd.hooks) - 1; i >= 0; i-- { if rd.hooks[i] == nil { return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") } mut = rd.hooks[i](mut) } if _, err := mut.Mutate(ctx, rd.mutation); err != nil { return 0, err } } return affected, err } // ExecX is like Exec, but panics if an error occurs. func (rd *RevisionDelete) ExecX(ctx context.Context) int { n, err := rd.Exec(ctx) if err != nil { panic(err) } return n } func (rd *RevisionDelete) sqlExec(ctx context.Context) (int, error) { _spec := &sqlgraph.DeleteSpec{ Node: &sqlgraph.NodeSpec{ Table: revision.Table, ID: &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldID, }, }, } _spec.Node.Schema = rd.schemaConfig.Revision ctx = internal.NewSchemaConfigContext(ctx, rd.schemaConfig) if ps := rd.mutation.predicates; len(ps) > 0 { _spec.Predicate = func(selector *sql.Selector) { for i := range ps { ps[i](selector) } } } affected, err := sqlgraph.DeleteNodes(ctx, rd.driver, _spec) if err != nil && sqlgraph.IsConstraintError(err) { err = &ConstraintError{msg: err.Error(), wrap: err} } return affected, err } // RevisionDeleteOne is the builder for deleting a single Revision entity. type RevisionDeleteOne struct { rd *RevisionDelete } // Exec executes the deletion query. func (rdo *RevisionDeleteOne) Exec(ctx context.Context) error { n, err := rdo.rd.Exec(ctx) switch { case err != nil: return err case n == 0: return &NotFoundError{revision.Label} default: return nil } } // ExecX is like Exec, but panics if an error occurs. func (rdo *RevisionDeleteOne) ExecX(ctx context.Context) { rdo.rd.ExecX(ctx) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision_query.go000066400000000000000000000344141431455511600241520ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "fmt" "math" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/internal" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/predicate" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" ) // RevisionQuery is the builder for querying Revision entities. type RevisionQuery struct { config limit *int offset *int unique *bool order []OrderFunc fields []string predicates []predicate.Revision // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) } // Where adds a new predicate for the RevisionQuery builder. func (rq *RevisionQuery) Where(ps ...predicate.Revision) *RevisionQuery { rq.predicates = append(rq.predicates, ps...) return rq } // Limit adds a limit step to the query. func (rq *RevisionQuery) Limit(limit int) *RevisionQuery { rq.limit = &limit return rq } // Offset adds an offset step to the query. func (rq *RevisionQuery) Offset(offset int) *RevisionQuery { rq.offset = &offset return rq } // Unique configures the query builder to filter duplicate records on query. // By default, unique is set to true, and can be disabled using this method. func (rq *RevisionQuery) Unique(unique bool) *RevisionQuery { rq.unique = &unique return rq } // Order adds an order step to the query. func (rq *RevisionQuery) Order(o ...OrderFunc) *RevisionQuery { rq.order = append(rq.order, o...) return rq } // First returns the first Revision entity from the query. // Returns a *NotFoundError when no Revision was found. func (rq *RevisionQuery) First(ctx context.Context) (*Revision, error) { nodes, err := rq.Limit(1).All(ctx) if err != nil { return nil, err } if len(nodes) == 0 { return nil, &NotFoundError{revision.Label} } return nodes[0], nil } // FirstX is like First, but panics if an error occurs. func (rq *RevisionQuery) FirstX(ctx context.Context) *Revision { node, err := rq.First(ctx) if err != nil && !IsNotFound(err) { panic(err) } return node } // FirstID returns the first Revision ID from the query. // Returns a *NotFoundError when no Revision ID was found. func (rq *RevisionQuery) FirstID(ctx context.Context) (id string, err error) { var ids []string if ids, err = rq.Limit(1).IDs(ctx); err != nil { return } if len(ids) == 0 { err = &NotFoundError{revision.Label} return } return ids[0], nil } // FirstIDX is like FirstID, but panics if an error occurs. func (rq *RevisionQuery) FirstIDX(ctx context.Context) string { id, err := rq.FirstID(ctx) if err != nil && !IsNotFound(err) { panic(err) } return id } // Only returns a single Revision entity found by the query, ensuring it only returns one. // Returns a *NotSingularError when more than one Revision entity is found. // Returns a *NotFoundError when no Revision entities are found. func (rq *RevisionQuery) Only(ctx context.Context) (*Revision, error) { nodes, err := rq.Limit(2).All(ctx) if err != nil { return nil, err } switch len(nodes) { case 1: return nodes[0], nil case 0: return nil, &NotFoundError{revision.Label} default: return nil, &NotSingularError{revision.Label} } } // OnlyX is like Only, but panics if an error occurs. func (rq *RevisionQuery) OnlyX(ctx context.Context) *Revision { node, err := rq.Only(ctx) if err != nil { panic(err) } return node } // OnlyID is like Only, but returns the only Revision ID in the query. // Returns a *NotSingularError when more than one Revision ID is found. // Returns a *NotFoundError when no entities are found. func (rq *RevisionQuery) OnlyID(ctx context.Context) (id string, err error) { var ids []string if ids, err = rq.Limit(2).IDs(ctx); err != nil { return } switch len(ids) { case 1: id = ids[0] case 0: err = &NotFoundError{revision.Label} default: err = &NotSingularError{revision.Label} } return } // OnlyIDX is like OnlyID, but panics if an error occurs. func (rq *RevisionQuery) OnlyIDX(ctx context.Context) string { id, err := rq.OnlyID(ctx) if err != nil { panic(err) } return id } // All executes the query and returns a list of Revisions. func (rq *RevisionQuery) All(ctx context.Context) ([]*Revision, error) { if err := rq.prepareQuery(ctx); err != nil { return nil, err } return rq.sqlAll(ctx) } // AllX is like All, but panics if an error occurs. func (rq *RevisionQuery) AllX(ctx context.Context) []*Revision { nodes, err := rq.All(ctx) if err != nil { panic(err) } return nodes } // IDs executes the query and returns a list of Revision IDs. func (rq *RevisionQuery) IDs(ctx context.Context) ([]string, error) { var ids []string if err := rq.Select(revision.FieldID).Scan(ctx, &ids); err != nil { return nil, err } return ids, nil } // IDsX is like IDs, but panics if an error occurs. func (rq *RevisionQuery) IDsX(ctx context.Context) []string { ids, err := rq.IDs(ctx) if err != nil { panic(err) } return ids } // Count returns the count of the given query. func (rq *RevisionQuery) Count(ctx context.Context) (int, error) { if err := rq.prepareQuery(ctx); err != nil { return 0, err } return rq.sqlCount(ctx) } // CountX is like Count, but panics if an error occurs. func (rq *RevisionQuery) CountX(ctx context.Context) int { count, err := rq.Count(ctx) if err != nil { panic(err) } return count } // Exist returns true if the query has elements in the graph. func (rq *RevisionQuery) Exist(ctx context.Context) (bool, error) { if err := rq.prepareQuery(ctx); err != nil { return false, err } return rq.sqlExist(ctx) } // ExistX is like Exist, but panics if an error occurs. func (rq *RevisionQuery) ExistX(ctx context.Context) bool { exist, err := rq.Exist(ctx) if err != nil { panic(err) } return exist } // Clone returns a duplicate of the RevisionQuery builder, including all associated steps. It can be // used to prepare common query builders and use them differently after the clone is made. func (rq *RevisionQuery) Clone() *RevisionQuery { if rq == nil { return nil } return &RevisionQuery{ config: rq.config, limit: rq.limit, offset: rq.offset, order: append([]OrderFunc{}, rq.order...), predicates: append([]predicate.Revision{}, rq.predicates...), // clone intermediate query. sql: rq.sql.Clone(), path: rq.path, unique: rq.unique, } } // GroupBy is used to group vertices by one or more fields/columns. // It is often used with aggregate functions, like: count, max, mean, min, sum. // // Example: // // var v []struct { // Description string `json:"description,omitempty"` // Count int `json:"count,omitempty"` // } // // client.Revision.Query(). // GroupBy(revision.FieldDescription). // Aggregate(ent.Count()). // Scan(ctx, &v) func (rq *RevisionQuery) GroupBy(field string, fields ...string) *RevisionGroupBy { grbuild := &RevisionGroupBy{config: rq.config} grbuild.fields = append([]string{field}, fields...) grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { if err := rq.prepareQuery(ctx); err != nil { return nil, err } return rq.sqlQuery(ctx), nil } grbuild.label = revision.Label grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan return grbuild } // Select allows the selection one or more fields/columns for the given query, // instead of selecting all fields in the entity. // // Example: // // var v []struct { // Description string `json:"description,omitempty"` // } // // client.Revision.Query(). // Select(revision.FieldDescription). // Scan(ctx, &v) func (rq *RevisionQuery) Select(fields ...string) *RevisionSelect { rq.fields = append(rq.fields, fields...) selbuild := &RevisionSelect{RevisionQuery: rq} selbuild.label = revision.Label selbuild.flds, selbuild.scan = &rq.fields, selbuild.Scan return selbuild } func (rq *RevisionQuery) prepareQuery(ctx context.Context) error { for _, f := range rq.fields { if !revision.ValidColumn(f) { return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} } } if rq.path != nil { prev, err := rq.path(ctx) if err != nil { return err } rq.sql = prev } return nil } func (rq *RevisionQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Revision, error) { var ( nodes = []*Revision{} _spec = rq.querySpec() ) _spec.ScanValues = func(columns []string) ([]interface{}, error) { return (*Revision).scanValues(nil, columns) } _spec.Assign = func(columns []string, values []interface{}) error { node := &Revision{config: rq.config} nodes = append(nodes, node) return node.assignValues(columns, values) } _spec.Node.Schema = rq.schemaConfig.Revision ctx = internal.NewSchemaConfigContext(ctx, rq.schemaConfig) for i := range hooks { hooks[i](ctx, _spec) } if err := sqlgraph.QueryNodes(ctx, rq.driver, _spec); err != nil { return nil, err } if len(nodes) == 0 { return nodes, nil } return nodes, nil } func (rq *RevisionQuery) sqlCount(ctx context.Context) (int, error) { _spec := rq.querySpec() _spec.Node.Schema = rq.schemaConfig.Revision ctx = internal.NewSchemaConfigContext(ctx, rq.schemaConfig) _spec.Node.Columns = rq.fields if len(rq.fields) > 0 { _spec.Unique = rq.unique != nil && *rq.unique } return sqlgraph.CountNodes(ctx, rq.driver, _spec) } func (rq *RevisionQuery) sqlExist(ctx context.Context) (bool, error) { n, err := rq.sqlCount(ctx) if err != nil { return false, fmt.Errorf("ent: check existence: %w", err) } return n > 0, nil } func (rq *RevisionQuery) querySpec() *sqlgraph.QuerySpec { _spec := &sqlgraph.QuerySpec{ Node: &sqlgraph.NodeSpec{ Table: revision.Table, Columns: revision.Columns, ID: &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldID, }, }, From: rq.sql, Unique: true, } if unique := rq.unique; unique != nil { _spec.Unique = *unique } if fields := rq.fields; len(fields) > 0 { _spec.Node.Columns = make([]string, 0, len(fields)) _spec.Node.Columns = append(_spec.Node.Columns, revision.FieldID) for i := range fields { if fields[i] != revision.FieldID { _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) } } } if ps := rq.predicates; len(ps) > 0 { _spec.Predicate = func(selector *sql.Selector) { for i := range ps { ps[i](selector) } } } if limit := rq.limit; limit != nil { _spec.Limit = *limit } if offset := rq.offset; offset != nil { _spec.Offset = *offset } if ps := rq.order; len(ps) > 0 { _spec.Order = func(selector *sql.Selector) { for i := range ps { ps[i](selector) } } } return _spec } func (rq *RevisionQuery) sqlQuery(ctx context.Context) *sql.Selector { builder := sql.Dialect(rq.driver.Dialect()) t1 := builder.Table(revision.Table) columns := rq.fields if len(columns) == 0 { columns = revision.Columns } selector := builder.Select(t1.Columns(columns...)...).From(t1) if rq.sql != nil { selector = rq.sql selector.Select(selector.Columns(columns...)...) } if rq.unique != nil && *rq.unique { selector.Distinct() } t1.Schema(rq.schemaConfig.Revision) ctx = internal.NewSchemaConfigContext(ctx, rq.schemaConfig) selector.WithContext(ctx) for _, p := range rq.predicates { p(selector) } for _, p := range rq.order { p(selector) } if offset := rq.offset; offset != nil { // limit is mandatory for offset clause. We start // with default value, and override it below if needed. selector.Offset(*offset).Limit(math.MaxInt32) } if limit := rq.limit; limit != nil { selector.Limit(*limit) } return selector } // RevisionGroupBy is the group-by builder for Revision entities. type RevisionGroupBy struct { config selector fields []string fns []AggregateFunc // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) } // Aggregate adds the given aggregation functions to the group-by query. func (rgb *RevisionGroupBy) Aggregate(fns ...AggregateFunc) *RevisionGroupBy { rgb.fns = append(rgb.fns, fns...) return rgb } // Scan applies the group-by query and scans the result into the given value. func (rgb *RevisionGroupBy) Scan(ctx context.Context, v interface{}) error { query, err := rgb.path(ctx) if err != nil { return err } rgb.sql = query return rgb.sqlScan(ctx, v) } func (rgb *RevisionGroupBy) sqlScan(ctx context.Context, v interface{}) error { for _, f := range rgb.fields { if !revision.ValidColumn(f) { return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} } } selector := rgb.sqlQuery() if err := selector.Err(); err != nil { return err } rows := &sql.Rows{} query, args := selector.Query() if err := rgb.driver.Query(ctx, query, args, rows); err != nil { return err } defer rows.Close() return sql.ScanSlice(rows, v) } func (rgb *RevisionGroupBy) sqlQuery() *sql.Selector { selector := rgb.sql.Select() aggregation := make([]string, 0, len(rgb.fns)) for _, fn := range rgb.fns { aggregation = append(aggregation, fn(selector)) } // If no columns were selected in a custom aggregation function, the default // selection is the fields used for "group-by", and the aggregation functions. if len(selector.SelectedColumns()) == 0 { columns := make([]string, 0, len(rgb.fields)+len(rgb.fns)) for _, f := range rgb.fields { columns = append(columns, selector.C(f)) } columns = append(columns, aggregation...) selector.Select(columns...) } return selector.GroupBy(selector.Columns(rgb.fields...)...) } // RevisionSelect is the builder for selecting fields of Revision entities. type RevisionSelect struct { *RevisionQuery selector // intermediate query (i.e. traversal path). sql *sql.Selector } // Scan applies the selector query and scans the result into the given value. func (rs *RevisionSelect) Scan(ctx context.Context, v interface{}) error { if err := rs.prepareQuery(ctx); err != nil { return err } rs.sql = rs.RevisionQuery.sqlQuery(ctx) return rs.sqlScan(ctx, v) } func (rs *RevisionSelect) sqlScan(ctx context.Context, v interface{}) error { rows := &sql.Rows{} query, args := rs.sql.Query() if err := rs.driver.Query(ctx, query, args, rows); err != nil { return err } defer rows.Close() return sql.ScanSlice(rows, v) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/revision_update.go000066400000000000000000000477131431455511600242750ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" "errors" "fmt" "time" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/internal" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/predicate" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/migrate" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" ) // RevisionUpdate is the builder for updating Revision entities. type RevisionUpdate struct { config hooks []Hook mutation *RevisionMutation } // Where appends a list predicates to the RevisionUpdate builder. func (ru *RevisionUpdate) Where(ps ...predicate.Revision) *RevisionUpdate { ru.mutation.Where(ps...) return ru } // SetType sets the "type" field. func (ru *RevisionUpdate) SetType(mt migrate.RevisionType) *RevisionUpdate { ru.mutation.ResetType() ru.mutation.SetType(mt) return ru } // SetNillableType sets the "type" field if the given value is not nil. func (ru *RevisionUpdate) SetNillableType(mt *migrate.RevisionType) *RevisionUpdate { if mt != nil { ru.SetType(*mt) } return ru } // AddType adds mt to the "type" field. func (ru *RevisionUpdate) AddType(mt migrate.RevisionType) *RevisionUpdate { ru.mutation.AddType(mt) return ru } // SetApplied sets the "applied" field. func (ru *RevisionUpdate) SetApplied(i int) *RevisionUpdate { ru.mutation.ResetApplied() ru.mutation.SetApplied(i) return ru } // SetNillableApplied sets the "applied" field if the given value is not nil. func (ru *RevisionUpdate) SetNillableApplied(i *int) *RevisionUpdate { if i != nil { ru.SetApplied(*i) } return ru } // AddApplied adds i to the "applied" field. func (ru *RevisionUpdate) AddApplied(i int) *RevisionUpdate { ru.mutation.AddApplied(i) return ru } // SetTotal sets the "total" field. func (ru *RevisionUpdate) SetTotal(i int) *RevisionUpdate { ru.mutation.ResetTotal() ru.mutation.SetTotal(i) return ru } // SetNillableTotal sets the "total" field if the given value is not nil. func (ru *RevisionUpdate) SetNillableTotal(i *int) *RevisionUpdate { if i != nil { ru.SetTotal(*i) } return ru } // AddTotal adds i to the "total" field. func (ru *RevisionUpdate) AddTotal(i int) *RevisionUpdate { ru.mutation.AddTotal(i) return ru } // SetExecutionTime sets the "execution_time" field. func (ru *RevisionUpdate) SetExecutionTime(t time.Duration) *RevisionUpdate { ru.mutation.ResetExecutionTime() ru.mutation.SetExecutionTime(t) return ru } // AddExecutionTime adds t to the "execution_time" field. func (ru *RevisionUpdate) AddExecutionTime(t time.Duration) *RevisionUpdate { ru.mutation.AddExecutionTime(t) return ru } // SetError sets the "error" field. func (ru *RevisionUpdate) SetError(s string) *RevisionUpdate { ru.mutation.SetError(s) return ru } // SetNillableError sets the "error" field if the given value is not nil. func (ru *RevisionUpdate) SetNillableError(s *string) *RevisionUpdate { if s != nil { ru.SetError(*s) } return ru } // ClearError clears the value of the "error" field. func (ru *RevisionUpdate) ClearError() *RevisionUpdate { ru.mutation.ClearError() return ru } // SetHash sets the "hash" field. func (ru *RevisionUpdate) SetHash(s string) *RevisionUpdate { ru.mutation.SetHash(s) return ru } // SetPartialHashes sets the "partial_hashes" field. func (ru *RevisionUpdate) SetPartialHashes(s []string) *RevisionUpdate { ru.mutation.SetPartialHashes(s) return ru } // ClearPartialHashes clears the value of the "partial_hashes" field. func (ru *RevisionUpdate) ClearPartialHashes() *RevisionUpdate { ru.mutation.ClearPartialHashes() return ru } // SetOperatorVersion sets the "operator_version" field. func (ru *RevisionUpdate) SetOperatorVersion(s string) *RevisionUpdate { ru.mutation.SetOperatorVersion(s) return ru } // Mutation returns the RevisionMutation object of the builder. func (ru *RevisionUpdate) Mutation() *RevisionMutation { return ru.mutation } // Save executes the query and returns the number of nodes affected by the update operation. func (ru *RevisionUpdate) Save(ctx context.Context) (int, error) { var ( err error affected int ) if len(ru.hooks) == 0 { if err = ru.check(); err != nil { return 0, err } affected, err = ru.sqlSave(ctx) } else { var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { mutation, ok := m.(*RevisionMutation) if !ok { return nil, fmt.Errorf("unexpected mutation type %T", m) } if err = ru.check(); err != nil { return 0, err } ru.mutation = mutation affected, err = ru.sqlSave(ctx) mutation.done = true return affected, err }) for i := len(ru.hooks) - 1; i >= 0; i-- { if ru.hooks[i] == nil { return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") } mut = ru.hooks[i](mut) } if _, err := mut.Mutate(ctx, ru.mutation); err != nil { return 0, err } } return affected, err } // SaveX is like Save, but panics if an error occurs. func (ru *RevisionUpdate) SaveX(ctx context.Context) int { affected, err := ru.Save(ctx) if err != nil { panic(err) } return affected } // Exec executes the query. func (ru *RevisionUpdate) Exec(ctx context.Context) error { _, err := ru.Save(ctx) return err } // ExecX is like Exec, but panics if an error occurs. func (ru *RevisionUpdate) ExecX(ctx context.Context) { if err := ru.Exec(ctx); err != nil { panic(err) } } // check runs all checks and user-defined validators on the builder. func (ru *RevisionUpdate) check() error { if v, ok := ru.mutation.Applied(); ok { if err := revision.AppliedValidator(v); err != nil { return &ValidationError{Name: "applied", err: fmt.Errorf(`ent: validator failed for field "Revision.applied": %w`, err)} } } if v, ok := ru.mutation.Total(); ok { if err := revision.TotalValidator(v); err != nil { return &ValidationError{Name: "total", err: fmt.Errorf(`ent: validator failed for field "Revision.total": %w`, err)} } } return nil } func (ru *RevisionUpdate) sqlSave(ctx context.Context) (n int, err error) { _spec := &sqlgraph.UpdateSpec{ Node: &sqlgraph.NodeSpec{ Table: revision.Table, Columns: revision.Columns, ID: &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldID, }, }, } if ps := ru.mutation.predicates; len(ps) > 0 { _spec.Predicate = func(selector *sql.Selector) { for i := range ps { ps[i](selector) } } } if value, ok := ru.mutation.GetType(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeUint, Value: value, Column: revision.FieldType, }) } if value, ok := ru.mutation.AddedType(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeUint, Value: value, Column: revision.FieldType, }) } if value, ok := ru.mutation.Applied(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldApplied, }) } if value, ok := ru.mutation.AddedApplied(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldApplied, }) } if value, ok := ru.mutation.Total(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldTotal, }) } if value, ok := ru.mutation.AddedTotal(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldTotal, }) } if value, ok := ru.mutation.ExecutionTime(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeInt64, Value: value, Column: revision.FieldExecutionTime, }) } if value, ok := ru.mutation.AddedExecutionTime(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeInt64, Value: value, Column: revision.FieldExecutionTime, }) } if value, ok := ru.mutation.Error(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldError, }) } if ru.mutation.ErrorCleared() { _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldError, }) } if value, ok := ru.mutation.Hash(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldHash, }) } if value, ok := ru.mutation.PartialHashes(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeJSON, Value: value, Column: revision.FieldPartialHashes, }) } if ru.mutation.PartialHashesCleared() { _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ Type: field.TypeJSON, Column: revision.FieldPartialHashes, }) } if value, ok := ru.mutation.OperatorVersion(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldOperatorVersion, }) } _spec.Node.Schema = ru.schemaConfig.Revision ctx = internal.NewSchemaConfigContext(ctx, ru.schemaConfig) if n, err = sqlgraph.UpdateNodes(ctx, ru.driver, _spec); err != nil { if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{revision.Label} } else if sqlgraph.IsConstraintError(err) { err = &ConstraintError{msg: err.Error(), wrap: err} } return 0, err } return n, nil } // RevisionUpdateOne is the builder for updating a single Revision entity. type RevisionUpdateOne struct { config fields []string hooks []Hook mutation *RevisionMutation } // SetType sets the "type" field. func (ruo *RevisionUpdateOne) SetType(mt migrate.RevisionType) *RevisionUpdateOne { ruo.mutation.ResetType() ruo.mutation.SetType(mt) return ruo } // SetNillableType sets the "type" field if the given value is not nil. func (ruo *RevisionUpdateOne) SetNillableType(mt *migrate.RevisionType) *RevisionUpdateOne { if mt != nil { ruo.SetType(*mt) } return ruo } // AddType adds mt to the "type" field. func (ruo *RevisionUpdateOne) AddType(mt migrate.RevisionType) *RevisionUpdateOne { ruo.mutation.AddType(mt) return ruo } // SetApplied sets the "applied" field. func (ruo *RevisionUpdateOne) SetApplied(i int) *RevisionUpdateOne { ruo.mutation.ResetApplied() ruo.mutation.SetApplied(i) return ruo } // SetNillableApplied sets the "applied" field if the given value is not nil. func (ruo *RevisionUpdateOne) SetNillableApplied(i *int) *RevisionUpdateOne { if i != nil { ruo.SetApplied(*i) } return ruo } // AddApplied adds i to the "applied" field. func (ruo *RevisionUpdateOne) AddApplied(i int) *RevisionUpdateOne { ruo.mutation.AddApplied(i) return ruo } // SetTotal sets the "total" field. func (ruo *RevisionUpdateOne) SetTotal(i int) *RevisionUpdateOne { ruo.mutation.ResetTotal() ruo.mutation.SetTotal(i) return ruo } // SetNillableTotal sets the "total" field if the given value is not nil. func (ruo *RevisionUpdateOne) SetNillableTotal(i *int) *RevisionUpdateOne { if i != nil { ruo.SetTotal(*i) } return ruo } // AddTotal adds i to the "total" field. func (ruo *RevisionUpdateOne) AddTotal(i int) *RevisionUpdateOne { ruo.mutation.AddTotal(i) return ruo } // SetExecutionTime sets the "execution_time" field. func (ruo *RevisionUpdateOne) SetExecutionTime(t time.Duration) *RevisionUpdateOne { ruo.mutation.ResetExecutionTime() ruo.mutation.SetExecutionTime(t) return ruo } // AddExecutionTime adds t to the "execution_time" field. func (ruo *RevisionUpdateOne) AddExecutionTime(t time.Duration) *RevisionUpdateOne { ruo.mutation.AddExecutionTime(t) return ruo } // SetError sets the "error" field. func (ruo *RevisionUpdateOne) SetError(s string) *RevisionUpdateOne { ruo.mutation.SetError(s) return ruo } // SetNillableError sets the "error" field if the given value is not nil. func (ruo *RevisionUpdateOne) SetNillableError(s *string) *RevisionUpdateOne { if s != nil { ruo.SetError(*s) } return ruo } // ClearError clears the value of the "error" field. func (ruo *RevisionUpdateOne) ClearError() *RevisionUpdateOne { ruo.mutation.ClearError() return ruo } // SetHash sets the "hash" field. func (ruo *RevisionUpdateOne) SetHash(s string) *RevisionUpdateOne { ruo.mutation.SetHash(s) return ruo } // SetPartialHashes sets the "partial_hashes" field. func (ruo *RevisionUpdateOne) SetPartialHashes(s []string) *RevisionUpdateOne { ruo.mutation.SetPartialHashes(s) return ruo } // ClearPartialHashes clears the value of the "partial_hashes" field. func (ruo *RevisionUpdateOne) ClearPartialHashes() *RevisionUpdateOne { ruo.mutation.ClearPartialHashes() return ruo } // SetOperatorVersion sets the "operator_version" field. func (ruo *RevisionUpdateOne) SetOperatorVersion(s string) *RevisionUpdateOne { ruo.mutation.SetOperatorVersion(s) return ruo } // Mutation returns the RevisionMutation object of the builder. func (ruo *RevisionUpdateOne) Mutation() *RevisionMutation { return ruo.mutation } // Select allows selecting one or more fields (columns) of the returned entity. // The default is selecting all fields defined in the entity schema. func (ruo *RevisionUpdateOne) Select(field string, fields ...string) *RevisionUpdateOne { ruo.fields = append([]string{field}, fields...) return ruo } // Save executes the query and returns the updated Revision entity. func (ruo *RevisionUpdateOne) Save(ctx context.Context) (*Revision, error) { var ( err error node *Revision ) if len(ruo.hooks) == 0 { if err = ruo.check(); err != nil { return nil, err } node, err = ruo.sqlSave(ctx) } else { var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { mutation, ok := m.(*RevisionMutation) if !ok { return nil, fmt.Errorf("unexpected mutation type %T", m) } if err = ruo.check(); err != nil { return nil, err } ruo.mutation = mutation node, err = ruo.sqlSave(ctx) mutation.done = true return node, err }) for i := len(ruo.hooks) - 1; i >= 0; i-- { if ruo.hooks[i] == nil { return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") } mut = ruo.hooks[i](mut) } v, err := mut.Mutate(ctx, ruo.mutation) if err != nil { return nil, err } nv, ok := v.(*Revision) if !ok { return nil, fmt.Errorf("unexpected node type %T returned from RevisionMutation", v) } node = nv } return node, err } // SaveX is like Save, but panics if an error occurs. func (ruo *RevisionUpdateOne) SaveX(ctx context.Context) *Revision { node, err := ruo.Save(ctx) if err != nil { panic(err) } return node } // Exec executes the query on the entity. func (ruo *RevisionUpdateOne) Exec(ctx context.Context) error { _, err := ruo.Save(ctx) return err } // ExecX is like Exec, but panics if an error occurs. func (ruo *RevisionUpdateOne) ExecX(ctx context.Context) { if err := ruo.Exec(ctx); err != nil { panic(err) } } // check runs all checks and user-defined validators on the builder. func (ruo *RevisionUpdateOne) check() error { if v, ok := ruo.mutation.Applied(); ok { if err := revision.AppliedValidator(v); err != nil { return &ValidationError{Name: "applied", err: fmt.Errorf(`ent: validator failed for field "Revision.applied": %w`, err)} } } if v, ok := ruo.mutation.Total(); ok { if err := revision.TotalValidator(v); err != nil { return &ValidationError{Name: "total", err: fmt.Errorf(`ent: validator failed for field "Revision.total": %w`, err)} } } return nil } func (ruo *RevisionUpdateOne) sqlSave(ctx context.Context) (_node *Revision, err error) { _spec := &sqlgraph.UpdateSpec{ Node: &sqlgraph.NodeSpec{ Table: revision.Table, Columns: revision.Columns, ID: &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldID, }, }, } id, ok := ruo.mutation.ID() if !ok { return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Revision.id" for update`)} } _spec.Node.ID.Value = id if fields := ruo.fields; len(fields) > 0 { _spec.Node.Columns = make([]string, 0, len(fields)) _spec.Node.Columns = append(_spec.Node.Columns, revision.FieldID) for _, f := range fields { if !revision.ValidColumn(f) { return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} } if f != revision.FieldID { _spec.Node.Columns = append(_spec.Node.Columns, f) } } } if ps := ruo.mutation.predicates; len(ps) > 0 { _spec.Predicate = func(selector *sql.Selector) { for i := range ps { ps[i](selector) } } } if value, ok := ruo.mutation.GetType(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeUint, Value: value, Column: revision.FieldType, }) } if value, ok := ruo.mutation.AddedType(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeUint, Value: value, Column: revision.FieldType, }) } if value, ok := ruo.mutation.Applied(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldApplied, }) } if value, ok := ruo.mutation.AddedApplied(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldApplied, }) } if value, ok := ruo.mutation.Total(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldTotal, }) } if value, ok := ruo.mutation.AddedTotal(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeInt, Value: value, Column: revision.FieldTotal, }) } if value, ok := ruo.mutation.ExecutionTime(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeInt64, Value: value, Column: revision.FieldExecutionTime, }) } if value, ok := ruo.mutation.AddedExecutionTime(); ok { _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ Type: field.TypeInt64, Value: value, Column: revision.FieldExecutionTime, }) } if value, ok := ruo.mutation.Error(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldError, }) } if ruo.mutation.ErrorCleared() { _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ Type: field.TypeString, Column: revision.FieldError, }) } if value, ok := ruo.mutation.Hash(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldHash, }) } if value, ok := ruo.mutation.PartialHashes(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeJSON, Value: value, Column: revision.FieldPartialHashes, }) } if ruo.mutation.PartialHashesCleared() { _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ Type: field.TypeJSON, Column: revision.FieldPartialHashes, }) } if value, ok := ruo.mutation.OperatorVersion(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, Value: value, Column: revision.FieldOperatorVersion, }) } _spec.Node.Schema = ruo.schemaConfig.Revision ctx = internal.NewSchemaConfigContext(ctx, ruo.schemaConfig) _node = &Revision{config: ruo.config} _spec.Assign = _node.assignValues _spec.ScanValues = _node.scanValues if err = sqlgraph.UpdateNode(ctx, ruo.driver, _spec); err != nil { if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{revision.Label} } else if sqlgraph.IsConstraintError(err) { err = &ConstraintError{msg: err.Error(), wrap: err} } return nil, err } return _node, nil } atlas-0.7.2/cmd/atlas/internal/migrate/ent/runtime.go000066400000000000000000000034561431455511600225540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/schema" "ariga.io/atlas/sql/migrate" ) // The init function reads all schema descriptors with runtime code // (default values, validators, hooks and policies) and stitches it // to their package variables. func init() { revisionFields := schema.Revision{}.Fields() _ = revisionFields // revisionDescType is the schema descriptor for type field. revisionDescType := revisionFields[2].Descriptor() // revision.DefaultType holds the default value on creation for the type field. revision.DefaultType = migrate.RevisionType(revisionDescType.Default.(uint)) // revisionDescApplied is the schema descriptor for applied field. revisionDescApplied := revisionFields[3].Descriptor() // revision.DefaultApplied holds the default value on creation for the applied field. revision.DefaultApplied = revisionDescApplied.Default.(int) // revision.AppliedValidator is a validator for the "applied" field. It is called by the builders before save. revision.AppliedValidator = revisionDescApplied.Validators[0].(func(int) error) // revisionDescTotal is the schema descriptor for total field. revisionDescTotal := revisionFields[4].Descriptor() // revision.DefaultTotal holds the default value on creation for the total field. revision.DefaultTotal = revisionDescTotal.Default.(int) // revision.TotalValidator is a validator for the "total" field. It is called by the builders before save. revision.TotalValidator = revisionDescTotal.Validators[0].(func(int) error) } atlas-0.7.2/cmd/atlas/internal/migrate/ent/runtime/000077500000000000000000000000001431455511600222155ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/runtime/runtime.go000066400000000000000000000010401431455511600242220ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package runtime // The schema-stitching logic is generated in ariga.io/atlas/cmd/atlas/internal/migrate/ent/runtime.go const ( Version = "v0.11.1" // Version of ent codegen. Sum = "h1:im67R+2W3Nee2bNS2YnoYz8oAF0Qz4AOlIvKRIAEISY=" // Sum of ent codegen. ) atlas-0.7.2/cmd/atlas/internal/migrate/ent/schema/000077500000000000000000000000001431455511600217725ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/schema/revision.go000066400000000000000000000026471431455511600241700ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema import ( "time" "ariga.io/atlas/sql/migrate" "entgo.io/ent" "entgo.io/ent/dialect/entsql" "entgo.io/ent/schema" "entgo.io/ent/schema/field" ) // DefaultRevisionSchema is the default schema for storing revisions table. const DefaultRevisionSchema = "atlas_schema_revisions" // Revision holds the schema definition for the Revision entity. type Revision struct { ent.Schema } // Fields of the Revision. func (Revision) Fields() []ent.Field { return []ent.Field{ field.String("id"). StorageKey("version"). Immutable(), field.String("description"). Immutable(), field.Uint("type"). GoType(migrate.RevisionType(0)). Default(uint(migrate.RevisionTypeExecute)), field.Int("applied"). NonNegative(). Default(0), field.Int("total"). NonNegative(). Default(0), field.Time("executed_at"). Immutable(), field.Int64("execution_time"). GoType(time.Duration(0)), field.Text("error"). Optional(), field.String("hash"), field.Strings("partial_hashes"). Optional(), field.String("operator_version"), } } // Annotations of the Revision. func (Revision) Annotations() []schema.Annotation { return []schema.Annotation{ entsql.Annotation{Table: DefaultRevisionSchema}, } } atlas-0.7.2/cmd/atlas/internal/migrate/ent/template/000077500000000000000000000000001431455511600223455ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/migrate/ent/template/convert.tmpl000066400000000000000000000022201431455511600247170ustar00rootroot00000000000000{{/* gotype: entgo.io/ent/entc/gen.Graph */}} {{ define "convert" }} {{ $pkg := base $.Config.Package }} {{ template "header" $ }} import "ariga.io/atlas/sql/migrate" {{ range $n := $.Nodes }} {{ if eq $n.Name "Revision" }} {{ $builder := $n.CreateName }} {{ $receiver := receiver $builder }} // SetRevision takes the values for each field from the given migrate.Revision. func ({{ $receiver }} *{{ $builder }}) SetRevision(rev *migrate.Revision) *{{ $builder }} { {{ $receiver }}.SetID(rev.Version) {{- range $f := $n.Fields }} {{ $receiver }}.Set{{ $f.StructField }}(rev.{{ $f.StructField }}) {{- end }} return {{ $receiver }} } // AtlasRevision returns an migrate.Revision from the current Revision. func({{ $n.Receiver}} *Revision) AtlasRevision() *migrate.Revision { return &migrate.Revision{ Version: r.ID, {{- range $f := $n.Fields }} {{ $f.StructField }}: r.{{ $f.StructField }}, {{- end }} } } {{ end }} {{ end }} {{ end }} atlas-0.7.2/cmd/atlas/internal/migrate/ent/tx.go000066400000000000000000000163301431455511600215170ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Code generated by entc, DO NOT EDIT. package ent import ( "context" stdsql "database/sql" "fmt" "sync" "entgo.io/ent/dialect" ) // Tx is a transactional client that is created by calling Client.Tx(). type Tx struct { config // Revision is the client for interacting with the Revision builders. Revision *RevisionClient // lazily loaded. client *Client clientOnce sync.Once // completion callbacks. mu sync.Mutex onCommit []CommitHook onRollback []RollbackHook // ctx lives for the life of the transaction. It is // the same context used by the underlying connection. ctx context.Context } type ( // Committer is the interface that wraps the Commit method. Committer interface { Commit(context.Context, *Tx) error } // The CommitFunc type is an adapter to allow the use of ordinary // function as a Committer. If f is a function with the appropriate // signature, CommitFunc(f) is a Committer that calls f. CommitFunc func(context.Context, *Tx) error // CommitHook defines the "commit middleware". A function that gets a Committer // and returns a Committer. For example: // // hook := func(next ent.Committer) ent.Committer { // return ent.CommitFunc(func(ctx context.Context, tx *ent.Tx) error { // // Do some stuff before. // if err := next.Commit(ctx, tx); err != nil { // return err // } // // Do some stuff after. // return nil // }) // } // CommitHook func(Committer) Committer ) // Commit calls f(ctx, m). func (f CommitFunc) Commit(ctx context.Context, tx *Tx) error { return f(ctx, tx) } // Commit commits the transaction. func (tx *Tx) Commit() error { txDriver := tx.config.driver.(*txDriver) var fn Committer = CommitFunc(func(context.Context, *Tx) error { return txDriver.tx.Commit() }) tx.mu.Lock() hooks := append([]CommitHook(nil), tx.onCommit...) tx.mu.Unlock() for i := len(hooks) - 1; i >= 0; i-- { fn = hooks[i](fn) } return fn.Commit(tx.ctx, tx) } // OnCommit adds a hook to call on commit. func (tx *Tx) OnCommit(f CommitHook) { tx.mu.Lock() defer tx.mu.Unlock() tx.onCommit = append(tx.onCommit, f) } type ( // Rollbacker is the interface that wraps the Rollback method. Rollbacker interface { Rollback(context.Context, *Tx) error } // The RollbackFunc type is an adapter to allow the use of ordinary // function as a Rollbacker. If f is a function with the appropriate // signature, RollbackFunc(f) is a Rollbacker that calls f. RollbackFunc func(context.Context, *Tx) error // RollbackHook defines the "rollback middleware". A function that gets a Rollbacker // and returns a Rollbacker. For example: // // hook := func(next ent.Rollbacker) ent.Rollbacker { // return ent.RollbackFunc(func(ctx context.Context, tx *ent.Tx) error { // // Do some stuff before. // if err := next.Rollback(ctx, tx); err != nil { // return err // } // // Do some stuff after. // return nil // }) // } // RollbackHook func(Rollbacker) Rollbacker ) // Rollback calls f(ctx, m). func (f RollbackFunc) Rollback(ctx context.Context, tx *Tx) error { return f(ctx, tx) } // Rollback rollbacks the transaction. func (tx *Tx) Rollback() error { txDriver := tx.config.driver.(*txDriver) var fn Rollbacker = RollbackFunc(func(context.Context, *Tx) error { return txDriver.tx.Rollback() }) tx.mu.Lock() hooks := append([]RollbackHook(nil), tx.onRollback...) tx.mu.Unlock() for i := len(hooks) - 1; i >= 0; i-- { fn = hooks[i](fn) } return fn.Rollback(tx.ctx, tx) } // OnRollback adds a hook to call on rollback. func (tx *Tx) OnRollback(f RollbackHook) { tx.mu.Lock() defer tx.mu.Unlock() tx.onRollback = append(tx.onRollback, f) } // Client returns a Client that binds to current transaction. func (tx *Tx) Client() *Client { tx.clientOnce.Do(func() { tx.client = &Client{config: tx.config} tx.client.init() }) return tx.client } func (tx *Tx) init() { tx.Revision = NewRevisionClient(tx.config) } // txDriver wraps the given dialect.Tx with a nop dialect.Driver implementation. // The idea is to support transactions without adding any extra code to the builders. // When a builder calls to driver.Tx(), it gets the same dialect.Tx instance. // Commit and Rollback are nop for the internal builders and the user must call one // of them in order to commit or rollback the transaction. // // If a closed transaction is embedded in one of the generated entities, and the entity // applies a query, for example: Revision.QueryXXX(), the query will be executed // through the driver which created this transaction. // // Note that txDriver is not goroutine safe. type txDriver struct { // the driver we started the transaction from. drv dialect.Driver // tx is the underlying transaction. tx dialect.Tx } // newTx creates a new transactional driver. func newTx(ctx context.Context, drv dialect.Driver) (*txDriver, error) { tx, err := drv.Tx(ctx) if err != nil { return nil, err } return &txDriver{tx: tx, drv: drv}, nil } // Tx returns the transaction wrapper (txDriver) to avoid Commit or Rollback calls // from the internal builders. Should be called only by the internal builders. func (tx *txDriver) Tx(context.Context) (dialect.Tx, error) { return tx, nil } // Dialect returns the dialect of the driver we started the transaction from. func (tx *txDriver) Dialect() string { return tx.drv.Dialect() } // Close is a nop close. func (*txDriver) Close() error { return nil } // Commit is a nop commit for the internal builders. // User must call `Tx.Commit` in order to commit the transaction. func (*txDriver) Commit() error { return nil } // Rollback is a nop rollback for the internal builders. // User must call `Tx.Rollback` in order to rollback the transaction. func (*txDriver) Rollback() error { return nil } // Exec calls tx.Exec. func (tx *txDriver) Exec(ctx context.Context, query string, args, v interface{}) error { return tx.tx.Exec(ctx, query, args, v) } // Query calls tx.Query. func (tx *txDriver) Query(ctx context.Context, query string, args, v interface{}) error { return tx.tx.Query(ctx, query, args, v) } var _ dialect.Driver = (*txDriver)(nil) // ExecContext allows calling the underlying ExecContext method of the transaction if it is supported by it. // See, database/sql#Tx.ExecContext for more information. func (tx *txDriver) ExecContext(ctx context.Context, query string, args ...interface{}) (stdsql.Result, error) { ex, ok := tx.tx.(interface { ExecContext(context.Context, string, ...interface{}) (stdsql.Result, error) }) if !ok { return nil, fmt.Errorf("Tx.ExecContext is not supported") } return ex.ExecContext(ctx, query, args...) } // QueryContext allows calling the underlying QueryContext method of the transaction if it is supported by it. // See, database/sql#Tx.QueryContext for more information. func (tx *txDriver) QueryContext(ctx context.Context, query string, args ...interface{}) (*stdsql.Rows, error) { q, ok := tx.tx.(interface { QueryContext(context.Context, string, ...interface{}) (*stdsql.Rows, error) }) if !ok { return nil, fmt.Errorf("Tx.QueryContext is not supported") } return q.QueryContext(ctx, query, args...) } atlas-0.7.2/cmd/atlas/internal/migrate/migrate.go000066400000000000000000000114331431455511600217250ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate import ( "context" "ariga.io/atlas/cmd/atlas/internal/migrate/ent" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql" entschema "entgo.io/ent/dialect/sql/schema" ) type ( // EntRevisions provides implementation for the migrate.RevisionReadWriter interface. EntRevisions struct { ac *sqlclient.Client // underlying Atlas client ec *ent.Client // underlying Ent client schema string // name of the schema the revision table resides in } // Option allows to configure EntRevisions by using functional arguments. Option func(*EntRevisions) error ) // NewEntRevisions creates a new EntRevisions with the given sqlclient.Client. func NewEntRevisions(ctx context.Context, ac *sqlclient.Client, opts ...Option) (*EntRevisions, error) { r := &EntRevisions{ac: ac} for _, opt := range opts { if err := opt(r); err != nil { return nil, err } } // Create the connection with the underlying migrate.Driver to have it inside a possible transaction. entopts := []ent.Option{ent.Driver(sql.NewDriver(r.ac.Name, sql.Conn{ExecQuerier: r.ac.Driver}))} // SQLite does not support multiple schema, therefore schema-config is only needed for other dialects. if r.ac.Name != dialect.SQLite { // Make sure the schema to store the revisions table in does exist. _, err := r.ac.InspectSchema(ctx, r.schema, &schema.InspectOptions{Mode: schema.InspectSchemas}) if err != nil && !schema.IsNotExistError(err) { return nil, err } if schema.IsNotExistError(err) { if err := r.ac.ApplyChanges(ctx, []schema.Change{ &schema.AddSchema{S: &schema.Schema{Name: r.schema}}, }); err != nil { return nil, err } } // Tell Ent to operate on a given schema. if r.schema != "" { entopts = append(entopts, ent.AlternateSchema(ent.SchemaConfig{Revision: r.schema})) } } // Instantiate the Ent client and migrate the revision schema. r.ec = ent.NewClient(entopts...) return r, nil } // WithSchema configures the schema to use for the revision table. func WithSchema(s string) Option { return func(r *EntRevisions) error { r.schema = s return nil } } // Ident returns the table identifier. func (r *EntRevisions) Ident() *migrate.TableIdent { return &migrate.TableIdent{Name: revision.Table, Schema: r.schema} } // ReadRevision reads a revision from the revisions table. // // ReadRevision will not return results only saved in cache. func (r *EntRevisions) ReadRevision(ctx context.Context, v string) (*migrate.Revision, error) { rev, err := r.ec.Revision.Get(ctx, v) if err != nil && !ent.IsNotFound(err) { return nil, err } if ent.IsNotFound(err) { return nil, migrate.ErrRevisionNotExist } return rev.AtlasRevision(), nil } // ReadRevisions reads the revisions from the revisions table. // // ReadRevisions will not return results only saved to cache. func (r *EntRevisions) ReadRevisions(ctx context.Context) ([]*migrate.Revision, error) { revs, err := r.ec.Revision.Query().Order(ent.Asc(revision.FieldID)).All(ctx) if err != nil { return nil, err } ret := make([]*migrate.Revision, len(revs)) for i, rev := range revs { ret[i] = rev.AtlasRevision() } return ret, nil } // WriteRevision writes a revision to the revisions table. func (r *EntRevisions) WriteRevision(ctx context.Context, rev *migrate.Revision) error { return r.ec.Revision.Create(). SetRevision(rev). OnConflict(sql.ConflictColumns(revision.FieldID)). UpdateNewValues(). Exec(ctx) } // DeleteRevision deletes a revision from the revisions table. func (r *EntRevisions) DeleteRevision(ctx context.Context, v string) error { return r.ec.Revision.DeleteOneID(v).Exec(ctx) } // Migrate attempts to create / update the revisions table. This is separated since Ent attempts to wrap the migration // execution in a transaction and assumes the underlying connection is of type *sql.DB, which is not true for actually // reading and writing revisions. func (r *EntRevisions) Migrate(ctx context.Context) error { c := ent.NewClient(ent.Driver(sql.OpenDB(r.ac.Name, r.ac.DB))) // Ensure the ent client is bound to the requested revision schema. Open a new connection, if not. if r.ac.Name != dialect.SQLite && r.ac.URL.Schema != r.schema { sc, err := sqlclient.OpenURL(ctx, r.ac.URL.URL, sqlclient.OpenSchema(r.schema)) if err != nil { return err } defer sc.Close() c = ent.NewClient(ent.Driver(sql.OpenDB(sc.Name, sc.DB))) } return c.Schema.Create(ctx, entschema.WithDropColumn(true)) } var _ migrate.RevisionReadWriter = (*EntRevisions)(nil) atlas-0.7.2/cmd/atlas/internal/migrate/migrate_test.go000066400000000000000000000017111431455511600227620ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate import ( "context" "fmt" "path/filepath" "testing" "ariga.io/atlas/cmd/atlas/internal/migrate/ent/revision" "ariga.io/atlas/sql/sqlclient" _ "github.com/mattn/go-sqlite3" "github.com/stretchr/testify/require" ) func TestNewEntRevisions(t *testing.T) { c, err := sqlclient.Open( context.Background(), fmt.Sprintf("sqlite://%s?cache=shared&mode=memory&_fk=true", filepath.Join(t.TempDir(), "revision")), ) require.NoError(t, err) r, err := NewEntRevisions(context.Background(), c) require.NoError(t, err) require.NoError(t, r.Migrate(context.Background())) s, err := c.Driver.InspectSchema(context.Background(), "", nil) require.NoError(t, err) require.Len(t, s.Tables, 1) _, ok := s.Table(revision.Table) require.True(t, ok) } atlas-0.7.2/cmd/atlas/internal/sqlparse/000077500000000000000000000000001431455511600201465ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/sqlparse/myparse/000077500000000000000000000000001431455511600216265ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/sqlparse/myparse/myparse.go000066400000000000000000000120741431455511600236410ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package myparse import ( "fmt" "ariga.io/atlas/cmd/atlas/internal/sqlparse/parseutil" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/pingcap/tidb/parser" "github.com/pingcap/tidb/parser/ast" "github.com/pingcap/tidb/parser/mysql" _ "github.com/pingcap/tidb/parser/test_driver" "golang.org/x/exp/slices" ) // Parser implements the sqlparse.Parser type Parser struct{} // ColumnFilledBefore checks if the column was filled before the given position. func (p *Parser) ColumnFilledBefore(f migrate.File, t *schema.Table, c *schema.Column, pos int) (bool, error) { return parseutil.MatchStmtBefore(f, pos, func(s *migrate.Stmt) (bool, error) { stmt, err := parser.New().ParseOneStmt(s.Text, "", "") if err != nil { return false, err } u, ok := stmt.(*ast.UpdateStmt) // Ensure the table was updated. if !ok || !tableUpdated(u, t) { return false, nil } // Accept UPDATE that fills all rows or those with NULL values as we cannot // determine if NULL values were filled in case there is a custom filtering. affectC := func() bool { if u.Where == nil { return true } is, ok := u.Where.(*ast.IsNullExpr) if !ok || is.Not { return false } n, ok := is.Expr.(*ast.ColumnNameExpr) return ok && n.Name.Name.O == c.Name }() idx := slices.IndexFunc(u.List, func(a *ast.Assignment) bool { return a.Column.Name.String() == c.Name && a.Expr != nil && a.Expr.GetType().GetType() != mysql.TypeNull }) // Ensure the column was filled. return affectC && idx != -1, nil }) } // FixChange fixes the changes according to the given statement. func (p *Parser) FixChange(d migrate.Driver, s string, changes schema.Changes) (schema.Changes, error) { stmt, err := parser.New().ParseOneStmt(s, "", "") if err != nil { return nil, err } if len(changes) == 0 { return changes, nil } switch stmt := stmt.(type) { case *ast.AlterTableStmt: if changes, err = renameTable(d, stmt, changes); err != nil { return nil, err } modify, ok := changes[0].(*schema.ModifyTable) if !ok { return nil, fmt.Errorf("expected modify-table change for alter-table statement, but got: %T", changes[0]) } for _, r := range renameColumns(stmt) { parseutil.RenameColumn(modify, r) } for _, r := range renameIndexes(stmt) { parseutil.RenameIndex(modify, r) } case *ast.RenameTableStmt: for _, t := range stmt.TableToTables { changes = parseutil.RenameTable( changes, &parseutil.Rename{ From: t.OldTable.Name.O, To: t.NewTable.Name.O, }) } } return changes, nil } // renameColumns returns all renamed columns that exist in the statement. func renameColumns(stmt *ast.AlterTableStmt) (rename []*parseutil.Rename) { for _, s := range stmt.Specs { if s.Tp == ast.AlterTableRenameColumn { rename = append(rename, &parseutil.Rename{ From: s.OldColumnName.Name.O, To: s.NewColumnName.Name.O, }) } } return } // renameIndexes returns all renamed indexes that exist in the statement. func renameIndexes(stmt *ast.AlterTableStmt) (rename []*parseutil.Rename) { for _, s := range stmt.Specs { if s.Tp == ast.AlterTableRenameIndex { rename = append(rename, &parseutil.Rename{ From: s.FromKey.O, To: s.ToKey.O, }) } } return } // renameTable fixes the changes from ALTER command with RENAME into ModifyTable and RenameTable. func renameTable(drv migrate.Driver, stmt *ast.AlterTableStmt, changes schema.Changes) (schema.Changes, error) { var r *ast.AlterTableSpec for _, s := range stmt.Specs { if s.Tp == ast.AlterTableRenameTable { r = s break } } if r == nil { return changes, nil } if len(changes) != 2 { return nil, fmt.Errorf("unexected number fo changes for ALTER command with RENAME clause: %d", len(changes)) } i, j := changes.IndexDropTable(stmt.Table.Name.O), changes.IndexAddTable(r.NewTable.Name.O) if i == -1 { return nil, fmt.Errorf("DropTable %q change was not found in changes", stmt.Table.Name) } if j == -1 { return nil, fmt.Errorf("AddTable %q change was not found in changes", r.NewTable.Name) } fromT, toT := changes[0].(*schema.DropTable).T, changes[1].(*schema.AddTable).T fromT.Name = toT.Name diff, err := drv.TableDiff(fromT, toT) if err != nil { return nil, err } changeT := *toT changeT.Name = stmt.Table.Name.O return schema.Changes{ // Modify the table first. &schema.ModifyTable{T: &changeT, Changes: diff}, // Then, apply the RENAME. &schema.RenameTable{From: &changeT, To: toT}, }, nil } // tableUpdated checks if the table was updated in the statement. func tableUpdated(u *ast.UpdateStmt, t *schema.Table) bool { if u.TableRefs == nil || u.TableRefs.TableRefs == nil || u.TableRefs.TableRefs.Left == nil { return false } ts, ok := u.TableRefs.TableRefs.Left.(*ast.TableSource) if !ok { return false } n, ok := ts.Source.(*ast.TableName) return ok && n.Name.O == t.Name && (n.Schema.O == "" || n.Schema.O == t.Schema.Name) } atlas-0.7.2/cmd/atlas/internal/sqlparse/myparse/myparse_test.go000066400000000000000000000137231431455511600247020ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package myparse_test import ( "strconv" "testing" "ariga.io/atlas/cmd/atlas/internal/sqlparse/myparse" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestFixChange_RenameColumns(t *testing.T) { var p myparse.Parser _, err := p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", schema.Changes{&schema.AddTable{}}, ) require.Error(t, err) changes, err := p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.DropColumn{C: schema.NewColumn("c1")}, &schema.AddColumn{C: schema.NewColumn("c2")}, }, }, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.RenameColumn{From: schema.NewColumn("c1"), To: schema.NewColumn("c2")}, }, }, }, changes, ) changes, err = p.FixChange( nil, "ALTER TABLE t ADD INDEX i(id), RENAME COLUMN c1 TO c2, ADD COLUMN c3 int, DROP COLUMN c4", schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.AddIndex{I: schema.NewIndex("i").AddColumns(schema.NewColumn("id"))}, &schema.DropColumn{C: schema.NewColumn("c1")}, &schema.AddColumn{C: schema.NewColumn("c2")}, &schema.AddColumn{C: schema.NewColumn("c3")}, &schema.AddColumn{C: schema.NewColumn("c4")}, }, }, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.AddIndex{I: schema.NewIndex("i").AddColumns(schema.NewColumn("id"))}, &schema.RenameColumn{From: schema.NewColumn("c1"), To: schema.NewColumn("c2")}, &schema.AddColumn{C: schema.NewColumn("c3")}, &schema.AddColumn{C: schema.NewColumn("c4")}, }, }, }, changes, ) } func TestFixChange_RenameIndexes(t *testing.T) { var p myparse.Parser changes, err := p.FixChange( nil, "ALTER TABLE t RENAME Index i1 TO i2", schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.DropIndex{I: schema.NewIndex("i1")}, &schema.AddIndex{I: schema.NewIndex("i2")}, }, }, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.RenameIndex{From: schema.NewIndex("i1"), To: schema.NewIndex("i2")}, }, }, }, changes, ) } func TestFixChange_RenameTable(t *testing.T) { var p myparse.Parser changes, err := p.FixChange( nil, "RENAME TABLE t1 TO t2", schema.Changes{ &schema.DropTable{T: schema.NewTable("t1")}, &schema.AddTable{T: schema.NewTable("t2")}, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.RenameTable{From: schema.NewTable("t1"), To: schema.NewTable("t2")}, }, changes, ) changes, err = p.FixChange( nil, "RENAME TABLE t1 TO t2, t3 TO t4", schema.Changes{ &schema.DropTable{T: schema.NewTable("t1")}, &schema.AddTable{T: schema.NewTable("t2")}, &schema.DropTable{T: schema.NewTable("t3")}, &schema.AddTable{T: schema.NewTable("t4")}, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.RenameTable{From: schema.NewTable("t1"), To: schema.NewTable("t2")}, &schema.RenameTable{From: schema.NewTable("t3"), To: schema.NewTable("t4")}, }, changes, ) } func TestFixChange_AlterAndRename(t *testing.T) { var ( p myparse.Parser drv = &mockDriver{} ) drv.changes = append(drv.changes, &schema.AddColumn{C: schema.NewIntColumn("c2", "int")}) changes, err := p.FixChange( drv, "ALTER TABLE t1 RENAME TO t2, ADD COLUMN c2 int", schema.Changes{ &schema.DropTable{T: schema.NewTable("t1").AddColumns(schema.NewIntColumn("c1", "int"))}, &schema.AddTable{T: schema.NewTable("t2").AddColumns(schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int"))}, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("t1").AddColumns(schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int")), Changes: schema.Changes{ &schema.AddColumn{C: schema.NewIntColumn("c2", "int")}, }, }, &schema.RenameTable{ From: schema.NewTable("t1").AddColumns(schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int")), To: schema.NewTable("t2").AddColumns(schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int")), }, }, changes, ) } func TestColumnFilledBefore(t *testing.T) { for i, tt := range []struct { file string pos int wantFilled bool wantErr bool }{ { file: `UPDATE t SET c = NULL;`, pos: 100, }, { file: `UPDATE t SET c = 2;`, pos: 100, wantFilled: true, }, { file: `UPDATE t SET c = 2 WHERE c IS NULL;`, pos: 100, wantFilled: true, }, { file: `UPDATE t SET c = 2 WHERE c IS NOT NULL;`, pos: 100, wantFilled: false, }, { file: `UPDATE t SET c = 2 WHERE c <> NULL`, pos: 100, wantFilled: false, }, { file: ` ALTER TABLE t MODIFY COLUMN c INT NOT NULL; UPDATE t SET c = 2 WHERE c IS NULL; `, pos: 2, wantFilled: false, }, { file: ` UPDATE t SET c = 2 WHERE c IS NULL; ALTER TABLE t MODIFY COLUMN c INT NOT NULL; `, pos: 30, wantFilled: true, }, } { t.Run(strconv.Itoa(i), func(t *testing.T) { var ( p myparse.Parser f = migrate.NewLocalFile("file", []byte(tt.file)) ) filled, err := p.ColumnFilledBefore(f, schema.NewTable("t"), schema.NewColumn("c"), tt.pos) require.Equal(t, err != nil, tt.wantErr, err) require.Equal(t, filled, tt.wantFilled) }) } } type mockDriver struct { migrate.Driver changes schema.Changes } func (d mockDriver) TableDiff(_, _ *schema.Table) ([]schema.Change, error) { return d.changes, nil } atlas-0.7.2/cmd/atlas/internal/sqlparse/parseutil/000077500000000000000000000000001431455511600221565ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/sqlparse/parseutil/parseutil.go000066400000000000000000000046631431455511600245260ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Package parseutil exposes shared functions used by the different parsers. package parseutil import ( "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "golang.org/x/exp/slices" ) // Rename describes rename of a resource. type Rename struct { From, To string } // RenameColumn patches DROP/ADD column commands to RENAME. func RenameColumn(modify *schema.ModifyTable, r *Rename) { changes := schema.Changes(modify.Changes) i := changes.IndexDropColumn(r.From) j := changes.IndexAddColumn(r.To) if i != -1 && j != -1 { changes[max(i, j)] = &schema.RenameColumn{ From: changes[i].(*schema.DropColumn).C, To: changes[j].(*schema.AddColumn).C, } changes.RemoveIndex(min(i, j)) modify.Changes = changes } } // RenameIndex patches DROP/ADD index commands to RENAME. func RenameIndex(modify *schema.ModifyTable, r *Rename) { changes := schema.Changes(modify.Changes) i := changes.IndexDropIndex(r.From) j := changes.IndexAddIndex(r.To) if i != -1 && j != -1 { changes[max(i, j)] = &schema.RenameIndex{ From: changes[i].(*schema.DropIndex).I, To: changes[j].(*schema.AddIndex).I, } changes.RemoveIndex(min(i, j)) modify.Changes = changes } } // RenameTable patches DROP/ADD table commands to RENAME. func RenameTable(changes schema.Changes, r *Rename) schema.Changes { i := changes.IndexDropTable(r.From) j := changes.IndexAddTable(r.To) if i != -1 && j != -1 { changes[max(i, j)] = &schema.RenameTable{ From: changes[i].(*schema.DropTable).T, To: changes[j].(*schema.AddTable).T, } changes.RemoveIndex(min(i, j)) } return changes } // MatchStmtBefore reports if the file contains any statement that matches the predicate before the given position. func MatchStmtBefore(f migrate.File, pos int, p func(*migrate.Stmt) (bool, error)) (bool, error) { stmts, err := f.StmtDecls() if err != nil { return false, err } i := slices.IndexFunc(stmts, func(s *migrate.Stmt) bool { return s.Pos >= pos }) if i != -1 { stmts = stmts[:i] } for _, s := range stmts { m, err := p(s) if err != nil { return false, err } if m { return true, nil } } return false, nil } func max(i, j int) int { if i > j { return i } return j } func min(i, j int) int { if i < j { return i } return j } atlas-0.7.2/cmd/atlas/internal/sqlparse/pgparse/000077500000000000000000000000001431455511600216075ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/sqlparse/pgparse/pgparse.go000066400000000000000000000070001431455511600235740ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package pgparse import ( "fmt" "ariga.io/atlas/cmd/atlas/internal/sqlparse/parseutil" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/auxten/postgresql-parser/pkg/sql/parser" "github.com/auxten/postgresql-parser/pkg/sql/sem/tree" "golang.org/x/exp/slices" ) // Parser implements the sqlparse.Parser type Parser struct{} // ColumnFilledBefore checks if the column was filled before the given position. func (p *Parser) ColumnFilledBefore(f migrate.File, t *schema.Table, c *schema.Column, pos int) (bool, error) { return parseutil.MatchStmtBefore(f, pos, func(s *migrate.Stmt) (bool, error) { stmt, err := parser.ParseOne(s.Text) if err != nil { return false, err } u, ok := stmt.AST.(*tree.Update) if !ok || !tableUpdated(u, t) { return false, nil } // Accept UPDATE that fills all rows or those with NULL values as we cannot // determine if NULL values were filled in case there is a custom filtering. affectC := func() bool { if u.Where == nil { return true } x, ok := u.Where.Expr.(*tree.ComparisonExpr) if !ok || x.Operator != tree.IsNotDistinctFrom || x.SubOperator != tree.EQ { return false } return x.Left.String() == c.Name && x.Right == tree.DNull }() idx := slices.IndexFunc(u.Exprs, func(x *tree.UpdateExpr) bool { return slices.Contains(x.Names, tree.Name(c.Name)) && x.Expr != tree.DNull }) // Ensure the column was filled. return affectC && idx != -1, nil }) } // FixChange fixes the changes according to the given statement. func (p *Parser) FixChange(_ migrate.Driver, s string, changes schema.Changes) (schema.Changes, error) { stmt, err := parser.ParseOne(s) if err != nil { return nil, err } switch stmt := stmt.AST.(type) { case *tree.AlterTable: if r, ok := renameColumn(stmt); ok { modify, err := expectModify(changes) if err != nil { return nil, err } parseutil.RenameColumn(modify, r) } case *tree.RenameIndex: modify, err := expectModify(changes) if err != nil { return nil, err } parseutil.RenameIndex(modify, &parseutil.Rename{ From: stmt.Index.String(), To: stmt.NewName.String(), }) case *tree.RenameTable: changes = parseutil.RenameTable(changes, &parseutil.Rename{ From: stmt.Name.String(), To: stmt.NewName.String(), }) } return changes, nil } // renameColumn returns the renamed column exists in the statement, is any. func renameColumn(stmt *tree.AlterTable) (*parseutil.Rename, bool) { for _, c := range stmt.Cmds { if r, ok := c.(*tree.AlterTableRenameColumn); ok { return &parseutil.Rename{ From: r.Column.String(), To: r.NewName.String(), }, true } } return nil, false } func expectModify(changes schema.Changes) (*schema.ModifyTable, error) { if len(changes) != 1 { return nil, fmt.Errorf("unexected number fo changes: %d", len(changes)) } modify, ok := changes[0].(*schema.ModifyTable) if !ok { return nil, fmt.Errorf("expected modify-table change for alter-table statement, but got: %T", changes[0]) } return modify, nil } // tableUpdated checks if the table was updated in the statement. func tableUpdated(u *tree.Update, t *schema.Table) bool { at, ok := u.Table.(*tree.AliasedTableExpr) if !ok { return false } n, ok := at.Expr.(*tree.TableName) return ok && n.Table() == t.Name && (n.Schema() == "" || n.Schema() == t.Schema.Name) } atlas-0.7.2/cmd/atlas/internal/sqlparse/pgparse/pgparse_test.go000066400000000000000000000071201431455511600246360ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package pgparse_test import ( "strconv" "testing" "ariga.io/atlas/cmd/atlas/internal/sqlparse/pgparse" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestFixChange_RenameColumns(t *testing.T) { var p pgparse.Parser _, err := p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", nil, ) require.Error(t, err) _, err = p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", schema.Changes{&schema.AddTable{}}, ) require.Error(t, err) changes, err := p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.DropColumn{C: schema.NewColumn("c1")}, &schema.AddColumn{C: schema.NewColumn("c2")}, }, }, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.RenameColumn{From: schema.NewColumn("c1"), To: schema.NewColumn("c2")}, }, }, }, changes, ) } func TestFixChange_RenameIndexes(t *testing.T) { var p pgparse.Parser changes, err := p.FixChange( nil, "ALTER INDEX IF EXISTS i1 RENAME TO i2", schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.DropIndex{I: schema.NewIndex("i1")}, &schema.AddIndex{I: schema.NewIndex("i2")}, }, }, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.RenameIndex{From: schema.NewIndex("i1"), To: schema.NewIndex("i2")}, }, }, }, changes, ) } func TestFixChange_RenameTable(t *testing.T) { var p pgparse.Parser changes, err := p.FixChange( nil, "ALTER TABLE t1 RENAME TO t2", schema.Changes{ &schema.DropTable{T: schema.NewTable("t1")}, &schema.AddTable{T: schema.NewTable("t2")}, &schema.AddTable{T: schema.NewTable("t3")}, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.RenameTable{From: schema.NewTable("t1"), To: schema.NewTable("t2")}, &schema.AddTable{T: schema.NewTable("t3")}, }, changes, ) } func TestColumnFilledBefore(t *testing.T) { for i, tt := range []struct { file string pos int wantFilled bool wantErr bool }{ { file: `UPDATE t SET c = NULL;`, pos: 100, }, { file: `UPDATE t SET c = 2;`, }, { file: `UPDATE t SET c = 2;`, }, { file: `UPDATE t SET c = 2;`, pos: 100, wantFilled: true, }, { file: `UPDATE t SET c = 2 WHERE c IS NULL;`, pos: 100, wantFilled: true, }, { file: `UPDATE t SET c = 2 WHERE c IS NOT NULL;`, pos: 100, wantFilled: false, }, { file: `UPDATE t SET c = 2 WHERE c <> NULL`, pos: 100, wantFilled: false, }, { file: ` ALTER TABLE t MODIFY COLUMN c INT NOT NULL; UPDATE t SET c = 2 WHERE c IS NULL; `, pos: 2, wantFilled: false, }, { file: ` UPDATE t SET c = 2 WHERE c IS NULL; ALTER TABLE t MODIFY COLUMN c INT NOT NULL; `, pos: 30, wantFilled: true, }, } { t.Run(strconv.Itoa(i), func(t *testing.T) { var ( p pgparse.Parser f = migrate.NewLocalFile("file", []byte(tt.file)) ) filled, err := p.ColumnFilledBefore(f, schema.NewTable("t"), schema.NewColumn("c"), tt.pos) require.Equal(t, err != nil, tt.wantErr, err) require.Equal(t, filled, tt.wantFilled) }) } } atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/000077500000000000000000000000001431455511600225025ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/Lexer.g4000066400000000000000000000163451431455511600240260ustar00rootroot00000000000000/* * The MIT License (MIT) * * Copyright (c) 2020 by Martin Mirchev * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, * sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT * NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Project : sqlite-parser; an ANTLR4 grammar for SQLite https://github.com/bkiers/sqlite-parser * Developed by : Bart Kiers, bart@big-o.nl */ // $antlr-format alignTrailingComments on, columnLimit 150, maxEmptyLinesToKeep 1, reflowComments off, useTab off // $antlr-format allowShortRulesOnASingleLine on, alignSemicolons ownLine lexer grammar Lexer; options { caseInsensitive = true; } SCOL: ';'; DOT: '.'; OPEN_PAR: '('; CLOSE_PAR: ')'; COMMA: ','; ASSIGN: '='; STAR: '*'; PLUS: '+'; MINUS: '-'; TILDE: '~'; PIPE2: '||'; DIV: '/'; MOD: '%'; LT2: '<<'; GT2: '>>'; AMP: '&'; PIPE: '|'; LT: '<'; LT_EQ: '<='; GT: '>'; GT_EQ: '>='; EQ: '=='; NOT_EQ1: '!='; NOT_EQ2: '<>'; // http://www.sqlite.org/lang_keywords.html ABORT_: 'ABORT'; ACTION_: 'ACTION'; ADD_: 'ADD'; AFTER_: 'AFTER'; ALL_: 'ALL'; ALTER_: 'ALTER'; ANALYZE_: 'ANALYZE'; AND_: 'AND'; AS_: 'AS'; ASC_: 'ASC'; ATTACH_: 'ATTACH'; AUTOINCREMENT_: 'AUTOINCREMENT'; BEFORE_: 'BEFORE'; BEGIN_: 'BEGIN'; BETWEEN_: 'BETWEEN'; BY_: 'BY'; CASCADE_: 'CASCADE'; CASE_: 'CASE'; CAST_: 'CAST'; CHECK_: 'CHECK'; COLLATE_: 'COLLATE'; COLUMN_: 'COLUMN'; COMMIT_: 'COMMIT'; CONFLICT_: 'CONFLICT'; CONSTRAINT_: 'CONSTRAINT'; CREATE_: 'CREATE'; CROSS_: 'CROSS'; CURRENT_DATE_: 'CURRENT_DATE'; CURRENT_TIME_: 'CURRENT_TIME'; CURRENT_TIMESTAMP_: 'CURRENT_TIMESTAMP'; DATABASE_: 'DATABASE'; DEFAULT_: 'DEFAULT'; DEFERRABLE_: 'DEFERRABLE'; DEFERRED_: 'DEFERRED'; DELETE_: 'DELETE'; DESC_: 'DESC'; DETACH_: 'DETACH'; DISTINCT_: 'DISTINCT'; DROP_: 'DROP'; EACH_: 'EACH'; ELSE_: 'ELSE'; END_: 'END'; ESCAPE_: 'ESCAPE'; EXCEPT_: 'EXCEPT'; EXCLUSIVE_: 'EXCLUSIVE'; EXISTS_: 'EXISTS'; EXPLAIN_: 'EXPLAIN'; FAIL_: 'FAIL'; FOR_: 'FOR'; FOREIGN_: 'FOREIGN'; FROM_: 'FROM'; FULL_: 'FULL'; GLOB_: 'GLOB'; GROUP_: 'GROUP'; HAVING_: 'HAVING'; IF_: 'IF'; IGNORE_: 'IGNORE'; IMMEDIATE_: 'IMMEDIATE'; IN_: 'IN'; INDEX_: 'INDEX'; INDEXED_: 'INDEXED'; INITIALLY_: 'INITIALLY'; INNER_: 'INNER'; INSERT_: 'INSERT'; INSTEAD_: 'INSTEAD'; INTERSECT_: 'INTERSECT'; INTO_: 'INTO'; IS_: 'IS'; ISNULL_: 'ISNULL'; JOIN_: 'JOIN'; KEY_: 'KEY'; LEFT_: 'LEFT'; LIKE_: 'LIKE'; LIMIT_: 'LIMIT'; MATCH_: 'MATCH'; NATURAL_: 'NATURAL'; NO_: 'NO'; NOT_: 'NOT'; NOTNULL_: 'NOTNULL'; NULL_: 'NULL'; OF_: 'OF'; OFFSET_: 'OFFSET'; ON_: 'ON'; OR_: 'OR'; ORDER_: 'ORDER'; OUTER_: 'OUTER'; PLAN_: 'PLAN'; PRAGMA_: 'PRAGMA'; PRIMARY_: 'PRIMARY'; QUERY_: 'QUERY'; RAISE_: 'RAISE'; RECURSIVE_: 'RECURSIVE'; REFERENCES_: 'REFERENCES'; REGEXP_: 'REGEXP'; REINDEX_: 'REINDEX'; RELEASE_: 'RELEASE'; RENAME_: 'RENAME'; REPLACE_: 'REPLACE'; RESTRICT_: 'RESTRICT'; RETURNING_: 'RETURNING'; RIGHT_: 'RIGHT'; ROLLBACK_: 'ROLLBACK'; ROW_: 'ROW'; ROWS_: 'ROWS'; SAVEPOINT_: 'SAVEPOINT'; SELECT_: 'SELECT'; SET_: 'SET'; TABLE_: 'TABLE'; TEMP_: 'TEMP'; TEMPORARY_: 'TEMPORARY'; THEN_: 'THEN'; TO_: 'TO'; TRANSACTION_: 'TRANSACTION'; TRIGGER_: 'TRIGGER'; UNION_: 'UNION'; UNIQUE_: 'UNIQUE'; UPDATE_: 'UPDATE'; USING_: 'USING'; VACUUM_: 'VACUUM'; VALUES_: 'VALUES'; VIEW_: 'VIEW'; VIRTUAL_: 'VIRTUAL'; WHEN_: 'WHEN'; WHERE_: 'WHERE'; WITH_: 'WITH'; WITHOUT_: 'WITHOUT'; FIRST_VALUE_: 'FIRST_VALUE'; OVER_: 'OVER'; PARTITION_: 'PARTITION'; RANGE_: 'RANGE'; PRECEDING_: 'PRECEDING'; UNBOUNDED_: 'UNBOUNDED'; CURRENT_: 'CURRENT'; FOLLOWING_: 'FOLLOWING'; CUME_DIST_: 'CUME_DIST'; DENSE_RANK_: 'DENSE_RANK'; LAG_: 'LAG'; LAST_VALUE_: 'LAST_VALUE'; LEAD_: 'LEAD'; NTH_VALUE_: 'NTH_VALUE'; NTILE_: 'NTILE'; PERCENT_RANK_: 'PERCENT_RANK'; RANK_: 'RANK'; ROW_NUMBER_: 'ROW_NUMBER'; GENERATED_: 'GENERATED'; ALWAYS_: 'ALWAYS'; STORED_: 'STORED'; TRUE_: 'TRUE'; FALSE_: 'FALSE'; WINDOW_: 'WINDOW'; NULLS_: 'NULLS'; FIRST_: 'FIRST'; LAST_: 'LAST'; FILTER_: 'FILTER'; GROUPS_: 'GROUPS'; EXCLUDE_: 'EXCLUDE'; TIES_: 'TIES'; OTHERS_: 'OTHERS'; DO_: 'DO'; NOTHING_: 'NOTHING'; IDENTIFIER: '"' (~'"' | '""')* '"' | '`' (~'`' | '``')* '`' | '[' ~']'* ']' | [A-Z_] [A-Z_0-9]* ; // TODO check: needs more chars in set NUMERIC_LITERAL: ((DIGIT+ ('.' DIGIT*)?) | ('.' DIGIT+)) ('E' [-+]? DIGIT+)? | '0x' HEX_DIGIT+; BIND_PARAMETER: '?' DIGIT* | [:@$] IDENTIFIER; STRING_LITERAL: '\'' ( ~'\'' | '\'\'')* '\''; BLOB_LITERAL: 'X' STRING_LITERAL; SINGLE_LINE_COMMENT: '--' ~[\r\n]* (('\r'? '\n') | EOF) -> channel(HIDDEN); MULTILINE_COMMENT: '/*' .*? '*/' -> channel(HIDDEN); SPACES: [ \u000B\t\r\n] -> channel(HIDDEN); UNEXPECTED_CHAR: .; fragment HEX_DIGIT: [0-9A-F]; fragment DIGIT: [0-9]; atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/Parser.g4000066400000000000000000000454171431455511600242050ustar00rootroot00000000000000/* * The MIT License (MIT) * * Copyright (c) 2014 by Bart Kiers * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, * sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT * NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Project : sqlite-parser; an ANTLR4 grammar for SQLite https://github.com/bkiers/sqlite-parser * Developed by: * Bart Kiers, bart@big-o.nl * Martin Mirchev, marti_2203@abv.bg * Mike Lische, mike@lischke-online.de */ // $antlr-format alignTrailingComments on, columnLimit 130, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments off // $antlr-format useTab off, allowShortRulesOnASingleLine off, allowShortBlocksOnASingleLine on, alignSemicolons ownLine parser grammar Parser; options { tokenVocab = Lexer; } parse: (sql_stmt_list)* EOF ; sql_stmt_list: SCOL* sql_stmt (SCOL+ sql_stmt)* SCOL* ; sql_stmt: (EXPLAIN_ (QUERY_ PLAN_)?)? ( alter_table_stmt | analyze_stmt | attach_stmt | begin_stmt | commit_stmt | create_index_stmt | create_table_stmt | create_trigger_stmt | create_view_stmt | create_virtual_table_stmt | delete_stmt | delete_stmt_limited | detach_stmt | drop_stmt | insert_stmt | pragma_stmt | reindex_stmt | release_stmt | rollback_stmt | savepoint_stmt | select_stmt | update_stmt | update_stmt_limited | vacuum_stmt ) ; alter_table_stmt: ALTER_ TABLE_ (schema_name DOT)? table_name ( RENAME_ ( TO_ new_table_name = table_name | COLUMN_? old_column_name = column_name TO_ new_column_name = column_name ) | ADD_ COLUMN_? column_def | DROP_ COLUMN_? column_name ) ; analyze_stmt: ANALYZE_ (schema_name | (schema_name DOT)? table_or_index_name)? ; attach_stmt: ATTACH_ DATABASE_? expr AS_ schema_name ; begin_stmt: BEGIN_ (DEFERRED_ | IMMEDIATE_ | EXCLUSIVE_)? ( TRANSACTION_ transaction_name? )? ; commit_stmt: (COMMIT_ | END_) TRANSACTION_? ; rollback_stmt: ROLLBACK_ TRANSACTION_? (TO_ SAVEPOINT_? savepoint_name)? ; savepoint_stmt: SAVEPOINT_ savepoint_name ; release_stmt: RELEASE_ SAVEPOINT_? savepoint_name ; create_index_stmt: CREATE_ UNIQUE_? INDEX_ (IF_ NOT_ EXISTS_)? (schema_name DOT)? index_name ON_ table_name OPEN_PAR indexed_column (COMMA indexed_column)* CLOSE_PAR (WHERE_ expr)? ; indexed_column: (column_name | expr) (COLLATE_ collation_name)? asc_desc? ; create_table_stmt: CREATE_ (TEMP_ | TEMPORARY_)? TABLE_ (IF_ NOT_ EXISTS_)? ( schema_name DOT )? table_name ( OPEN_PAR column_def (COMMA column_def)*? (COMMA table_constraint)* CLOSE_PAR ( WITHOUT_ row_ROW_ID = IDENTIFIER )? | AS_ select_stmt ) ; column_def: column_name type_name? column_constraint* ; type_name: name+? ( OPEN_PAR signed_number CLOSE_PAR | OPEN_PAR signed_number COMMA signed_number CLOSE_PAR )? ; column_constraint: (CONSTRAINT_ name)? ( (PRIMARY_ KEY_ asc_desc? conflict_clause? AUTOINCREMENT_?) | (NOT_ NULL_ | UNIQUE_) conflict_clause? | CHECK_ OPEN_PAR expr CLOSE_PAR | DEFAULT_ (signed_number | literal_value | OPEN_PAR expr CLOSE_PAR) | COLLATE_ collation_name | foreign_key_clause | (GENERATED_ ALWAYS_)? AS_ OPEN_PAR expr CLOSE_PAR ( STORED_ | VIRTUAL_ )? ) ; signed_number: (PLUS | MINUS)? NUMERIC_LITERAL ; table_constraint: (CONSTRAINT_ name)? ( (PRIMARY_ KEY_ | UNIQUE_) OPEN_PAR indexed_column ( COMMA indexed_column )* CLOSE_PAR conflict_clause? | CHECK_ OPEN_PAR expr CLOSE_PAR | FOREIGN_ KEY_ OPEN_PAR column_name (COMMA column_name)* CLOSE_PAR foreign_key_clause ) ; foreign_key_clause: REFERENCES_ foreign_table ( OPEN_PAR column_name (COMMA column_name)* CLOSE_PAR )? ( ON_ (DELETE_ | UPDATE_) ( SET_ (NULL_ | DEFAULT_) | CASCADE_ | RESTRICT_ | NO_ ACTION_ ) | MATCH_ name )* (NOT_? DEFERRABLE_ (INITIALLY_ (DEFERRED_ | IMMEDIATE_))?)? ; conflict_clause: ON_ CONFLICT_ ( ROLLBACK_ | ABORT_ | FAIL_ | IGNORE_ | REPLACE_ ) ; create_trigger_stmt: CREATE_ (TEMP_ | TEMPORARY_)? TRIGGER_ (IF_ NOT_ EXISTS_)? ( schema_name DOT )? trigger_name (BEFORE_ | AFTER_ | INSTEAD_ OF_)? ( DELETE_ | INSERT_ | UPDATE_ (OF_ column_name ( COMMA column_name)*)? ) ON_ table_name (FOR_ EACH_ ROW_)? (WHEN_ expr)? BEGIN_ ( (update_stmt | insert_stmt | delete_stmt | select_stmt) SCOL )+ END_ ; create_view_stmt: CREATE_ (TEMP_ | TEMPORARY_)? VIEW_ (IF_ NOT_ EXISTS_)? ( schema_name DOT )? view_name (OPEN_PAR column_name (COMMA column_name)* CLOSE_PAR)? AS_ select_stmt ; create_virtual_table_stmt: CREATE_ VIRTUAL_ TABLE_ (IF_ NOT_ EXISTS_)? (schema_name DOT)? table_name USING_ module_name ( OPEN_PAR module_argument (COMMA module_argument)* CLOSE_PAR )? ; with_clause: WITH_ RECURSIVE_? cte_table_name AS_ OPEN_PAR select_stmt CLOSE_PAR ( COMMA cte_table_name AS_ OPEN_PAR select_stmt CLOSE_PAR )* ; cte_table_name: table_name (OPEN_PAR column_name ( COMMA column_name)* CLOSE_PAR)? ; recursive_cte: cte_table_name AS_ OPEN_PAR initial_select UNION_ ALL_? recursive_select CLOSE_PAR ; common_table_expression: table_name (OPEN_PAR column_name ( COMMA column_name)* CLOSE_PAR)? AS_ OPEN_PAR select_stmt CLOSE_PAR ; delete_stmt: with_clause? DELETE_ FROM_ qualified_table_name (WHERE_ expr)? returning_clause? ; delete_stmt_limited: with_clause? DELETE_ FROM_ qualified_table_name (WHERE_ expr)? returning_clause? ( order_by_stmt? limit_stmt )? ; detach_stmt: DETACH_ DATABASE_? schema_name ; drop_stmt: DROP_ object = (INDEX_ | TABLE_ | TRIGGER_ | VIEW_) ( IF_ EXISTS_ )? (schema_name DOT)? any_name ; /* SQLite understands the following binary operators, in order from highest to lowest precedence: || * / % + - << >> & | < <= > >= = == != <> IS IS NOT IN LIKE GLOB MATCH REGEXP AND OR */ expr: literal_value | BIND_PARAMETER | ((schema_name DOT)? table_name DOT)? column_name | unary_operator expr | expr PIPE2 expr | expr ( STAR | DIV | MOD) expr | expr ( PLUS | MINUS) expr | expr ( LT2 | GT2 | AMP | PIPE) expr | expr ( LT | LT_EQ | GT | GT_EQ) expr | expr ( ASSIGN | EQ | NOT_EQ1 | NOT_EQ2 | IS_ | IS_ NOT_ | IN_ | LIKE_ | GLOB_ | MATCH_ | REGEXP_ ) expr | expr AND_ expr | expr OR_ expr | function_name OPEN_PAR ((DISTINCT_? expr ( COMMA expr)*) | STAR)? CLOSE_PAR filter_clause? over_clause? | OPEN_PAR expr (COMMA expr)* CLOSE_PAR | CAST_ OPEN_PAR expr AS_ type_name CLOSE_PAR | expr COLLATE_ collation_name | expr NOT_? (LIKE_ | GLOB_ | REGEXP_ | MATCH_) expr ( ESCAPE_ expr )? | expr ( ISNULL_ | NOTNULL_ | NOT_ NULL_) | expr IS_ NOT_? expr | expr NOT_? BETWEEN_ expr AND_ expr | expr NOT_? IN_ ( OPEN_PAR (select_stmt | expr ( COMMA expr)*)? CLOSE_PAR | ( schema_name DOT)? table_name | (schema_name DOT)? table_function_name OPEN_PAR (expr (COMMA expr)*)? CLOSE_PAR ) | ((NOT_)? EXISTS_)? OPEN_PAR select_stmt CLOSE_PAR | CASE_ expr? (WHEN_ expr THEN_ expr)+ (ELSE_ expr)? END_ | raise_function ; raise_function: RAISE_ OPEN_PAR ( IGNORE_ | (ROLLBACK_ | ABORT_ | FAIL_) COMMA error_message ) CLOSE_PAR ; literal_value: NUMERIC_LITERAL | STRING_LITERAL | BLOB_LITERAL | NULL_ | TRUE_ | FALSE_ | CURRENT_TIME_ | CURRENT_DATE_ | CURRENT_TIMESTAMP_ ; insert_stmt: with_clause? ( INSERT_ | REPLACE_ | INSERT_ OR_ ( REPLACE_ | ROLLBACK_ | ABORT_ | FAIL_ | IGNORE_ ) ) INTO_ (schema_name DOT)? table_name (AS_ table_alias)? ( OPEN_PAR column_name ( COMMA column_name)* CLOSE_PAR )? ( ( ( VALUES_ OPEN_PAR expr (COMMA expr)* CLOSE_PAR ( COMMA OPEN_PAR expr ( COMMA expr)* CLOSE_PAR )* | select_stmt ) upsert_clause? ) | DEFAULT_ VALUES_ ) returning_clause? ; returning_clause: RETURNING_ result_column (COMMA result_column)* ; upsert_clause: ON_ CONFLICT_ ( OPEN_PAR indexed_column (COMMA indexed_column)* CLOSE_PAR (WHERE_ expr)? )? DO_ ( NOTHING_ | UPDATE_ SET_ ( (column_name | column_name_list) ASSIGN expr ( COMMA (column_name | column_name_list) ASSIGN expr )* (WHERE_ expr)? ) ) ; pragma_stmt: PRAGMA_ (schema_name DOT)? pragma_name ( ASSIGN pragma_value | OPEN_PAR pragma_value CLOSE_PAR )? ; pragma_value: signed_number | name | STRING_LITERAL ; reindex_stmt: REINDEX_ (collation_name | (schema_name DOT)? (table_name | index_name))? ; select_stmt: common_table_stmt? select_core (compound_operator select_core)* order_by_stmt? limit_stmt? ; join_clause: table_or_subquery (join_operator table_or_subquery join_constraint?)* ; select_core: ( SELECT_ (DISTINCT_ | ALL_)? result_column (COMMA result_column)* ( FROM_ (table_or_subquery (COMMA table_or_subquery)* | join_clause) )? (WHERE_ expr)? (GROUP_ BY_ expr (COMMA expr)* (HAVING_ expr)?)? ( WINDOW_ window_name AS_ window_defn ( COMMA window_name AS_ window_defn )* )? ) | VALUES_ OPEN_PAR expr (COMMA expr)* CLOSE_PAR ( COMMA OPEN_PAR expr ( COMMA expr)* CLOSE_PAR )* ; factored_select_stmt: select_stmt ; simple_select_stmt: common_table_stmt? select_core order_by_stmt? limit_stmt? ; compound_select_stmt: common_table_stmt? select_core ( (UNION_ ALL_? | INTERSECT_ | EXCEPT_) select_core )+ order_by_stmt? limit_stmt? ; table_or_subquery: ( (schema_name DOT)? table_name (AS_? table_alias)? ( INDEXED_ BY_ index_name | NOT_ INDEXED_ )? ) | (schema_name DOT)? table_function_name OPEN_PAR expr (COMMA expr)* CLOSE_PAR ( AS_? table_alias )? | OPEN_PAR (table_or_subquery (COMMA table_or_subquery)* | join_clause) CLOSE_PAR | OPEN_PAR select_stmt CLOSE_PAR (AS_? table_alias)? ; result_column: STAR | table_name DOT STAR | expr ( AS_? column_alias)? ; join_operator: COMMA | NATURAL_? (LEFT_ OUTER_? | INNER_ | CROSS_)? JOIN_ ; join_constraint: ON_ expr | USING_ OPEN_PAR column_name ( COMMA column_name)* CLOSE_PAR ; compound_operator: UNION_ ALL_? | INTERSECT_ | EXCEPT_ ; update_stmt: with_clause? UPDATE_ ( OR_ (ROLLBACK_ | ABORT_ | REPLACE_ | FAIL_ | IGNORE_) )? qualified_table_name SET_ assignment_list ( FROM_ (table_or_subquery (COMMA table_or_subquery)* | join_clause) )? (WHERE_ where = expr)? returning_clause? ; assignment_list: assignment (COMMA assignment)* ; assignment: (column_name | column_name_list) ASSIGN expr ; column_name_list: OPEN_PAR column_name (COMMA column_name)* CLOSE_PAR ; update_stmt_limited: with_clause? UPDATE_ ( OR_ (ROLLBACK_ | ABORT_ | REPLACE_ | FAIL_ | IGNORE_) )? qualified_table_name SET_ (column_name | column_name_list) ASSIGN expr ( COMMA (column_name | column_name_list) ASSIGN expr )* (WHERE_ expr)? returning_clause? (order_by_stmt? limit_stmt)? ; qualified_table_name: (schema_name DOT)? table_name (AS_ alias)? ( INDEXED_ BY_ index_name | NOT_ INDEXED_ )? ; vacuum_stmt: VACUUM_ schema_name? (INTO_ filename)? ; filter_clause: FILTER_ OPEN_PAR WHERE_ expr CLOSE_PAR ; window_defn: OPEN_PAR base_window_name? (PARTITION_ BY_ expr (COMMA expr)*)? ( ORDER_ BY_ ordering_term (COMMA ordering_term)* ) frame_spec? CLOSE_PAR ; over_clause: OVER_ ( window_name | OPEN_PAR base_window_name? (PARTITION_ BY_ expr (COMMA expr)*)? ( ORDER_ BY_ ordering_term (COMMA ordering_term)* )? frame_spec? CLOSE_PAR ) ; frame_spec: frame_clause ( EXCLUDE_ (NO_ OTHERS_) | CURRENT_ ROW_ | GROUP_ | TIES_ )? ; frame_clause: (RANGE_ | ROWS_ | GROUPS_) ( frame_single | BETWEEN_ frame_left AND_ frame_right ) ; simple_function_invocation: simple_func OPEN_PAR (expr (COMMA expr)* | STAR) CLOSE_PAR ; aggregate_function_invocation: aggregate_func OPEN_PAR (DISTINCT_? expr (COMMA expr)* | STAR)? CLOSE_PAR filter_clause? ; window_function_invocation: window_function OPEN_PAR (expr (COMMA expr)* | STAR)? CLOSE_PAR filter_clause? OVER_ ( window_defn | window_name ) ; common_table_stmt: //additional structures WITH_ RECURSIVE_? common_table_expression (COMMA common_table_expression)* ; order_by_stmt: ORDER_ BY_ ordering_term (COMMA ordering_term)* ; limit_stmt: LIMIT_ expr ((OFFSET_ | COMMA) expr)? ; ordering_term: expr (COLLATE_ collation_name)? asc_desc? (NULLS_ (FIRST_ | LAST_))? ; asc_desc: ASC_ | DESC_ ; frame_left: expr PRECEDING_ | expr FOLLOWING_ | CURRENT_ ROW_ | UNBOUNDED_ PRECEDING_ ; frame_right: expr PRECEDING_ | expr FOLLOWING_ | CURRENT_ ROW_ | UNBOUNDED_ FOLLOWING_ ; frame_single: expr PRECEDING_ | UNBOUNDED_ PRECEDING_ | CURRENT_ ROW_ ; // unknown window_function: (FIRST_VALUE_ | LAST_VALUE_) OPEN_PAR expr CLOSE_PAR OVER_ OPEN_PAR partition_by? order_by_expr_asc_desc frame_clause ? CLOSE_PAR | (CUME_DIST_ | PERCENT_RANK_) OPEN_PAR CLOSE_PAR OVER_ OPEN_PAR partition_by? order_by_expr? CLOSE_PAR | (DENSE_RANK_ | RANK_ | ROW_NUMBER_) OPEN_PAR CLOSE_PAR OVER_ OPEN_PAR partition_by? order_by_expr_asc_desc CLOSE_PAR | (LAG_ | LEAD_) OPEN_PAR expr offset? default_value? CLOSE_PAR OVER_ OPEN_PAR partition_by? order_by_expr_asc_desc CLOSE_PAR | NTH_VALUE_ OPEN_PAR expr COMMA signed_number CLOSE_PAR OVER_ OPEN_PAR partition_by? order_by_expr_asc_desc frame_clause? CLOSE_PAR | NTILE_ OPEN_PAR expr CLOSE_PAR OVER_ OPEN_PAR partition_by? order_by_expr_asc_desc CLOSE_PAR ; offset: COMMA signed_number ; default_value: COMMA signed_number ; partition_by: PARTITION_ BY_ expr+ ; order_by_expr: ORDER_ BY_ expr+ ; order_by_expr_asc_desc: ORDER_ BY_ expr_asc_desc ; expr_asc_desc: expr asc_desc? (COMMA expr asc_desc?)* ; //TODO BOTH OF THESE HAVE TO BE REWORKED TO FOLLOW THE SPEC initial_select: select_stmt ; recursive_select: select_stmt ; unary_operator: MINUS | PLUS | TILDE | NOT_ ; error_message: STRING_LITERAL ; module_argument: // TODO check what exactly is permitted here expr | column_def ; column_alias: IDENTIFIER | STRING_LITERAL ; keyword: ABORT_ | ACTION_ | ADD_ | AFTER_ | ALL_ | ALTER_ | ANALYZE_ | AND_ | AS_ | ASC_ | ATTACH_ | AUTOINCREMENT_ | BEFORE_ | BEGIN_ | BETWEEN_ | BY_ | CASCADE_ | CASE_ | CAST_ | CHECK_ | COLLATE_ | COLUMN_ | COMMIT_ | CONFLICT_ | CONSTRAINT_ | CREATE_ | CROSS_ | CURRENT_DATE_ | CURRENT_TIME_ | CURRENT_TIMESTAMP_ | DATABASE_ | DEFAULT_ | DEFERRABLE_ | DEFERRED_ | DELETE_ | DESC_ | DETACH_ | DISTINCT_ | DROP_ | EACH_ | ELSE_ | END_ | ESCAPE_ | EXCEPT_ | EXCLUSIVE_ | EXISTS_ | EXPLAIN_ | FAIL_ | FOR_ | FOREIGN_ | FROM_ | FULL_ | GLOB_ | GROUP_ | HAVING_ | IF_ | IGNORE_ | IMMEDIATE_ | IN_ | INDEX_ | INDEXED_ | INITIALLY_ | INNER_ | INSERT_ | INSTEAD_ | INTERSECT_ | INTO_ | IS_ | ISNULL_ | JOIN_ | KEY_ | LEFT_ | LIKE_ | LIMIT_ | MATCH_ | NATURAL_ | NO_ | NOT_ | NOTNULL_ | NULL_ | OF_ | OFFSET_ | ON_ | OR_ | ORDER_ | OUTER_ | PLAN_ | PRAGMA_ | PRIMARY_ | QUERY_ | RAISE_ | RECURSIVE_ | REFERENCES_ | REGEXP_ | REINDEX_ | RELEASE_ | RENAME_ | REPLACE_ | RESTRICT_ | RIGHT_ | ROLLBACK_ | ROW_ | ROWS_ | SAVEPOINT_ | SELECT_ | SET_ | TABLE_ | TEMP_ | TEMPORARY_ | THEN_ | TO_ | TRANSACTION_ | TRIGGER_ | UNION_ | UNIQUE_ | UPDATE_ | USING_ | VACUUM_ | VALUES_ | VIEW_ | VIRTUAL_ | WHEN_ | WHERE_ | WITH_ | WITHOUT_ | FIRST_VALUE_ | OVER_ | PARTITION_ | RANGE_ | PRECEDING_ | UNBOUNDED_ | CURRENT_ | FOLLOWING_ | CUME_DIST_ | DENSE_RANK_ | LAG_ | LAST_VALUE_ | LEAD_ | NTH_VALUE_ | NTILE_ | PERCENT_RANK_ | RANK_ | ROW_NUMBER_ | GENERATED_ | ALWAYS_ | STORED_ | TRUE_ | FALSE_ | WINDOW_ | NULLS_ | FIRST_ | LAST_ | FILTER_ | GROUPS_ | EXCLUDE_ ; // TODO: check all names below name: any_name ; function_name: any_name ; schema_name: any_name ; table_name: any_name ; table_or_index_name: any_name ; column_name: any_name ; collation_name: any_name ; foreign_table: any_name ; index_name: any_name ; trigger_name: any_name ; view_name: any_name ; module_name: any_name ; pragma_name: any_name ; savepoint_name: any_name ; table_alias: any_name ; transaction_name: any_name ; window_name: any_name ; alias: any_name ; filename: any_name ; base_window_name: any_name ; simple_func: any_name ; aggregate_func: any_name ; table_function_name: any_name ; any_name: IDENTIFIER | keyword | STRING_LITERAL | OPEN_PAR any_name CLOSE_PAR ; atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/README.md000066400000000000000000000007361431455511600237670ustar00rootroot00000000000000### SQLite parser based on ANTLR4 #### Resources 1. SQLite syntax: https://www.sqlite.org/syntaxdiagrams.html 2. Grammar file: https://github.com/antlr/grammars-v4/tree/master/sql/sqlite #### Run codegen 1. Install `antlr4`: https://github.com/antlr/antlr4/blob/master/doc/getting-started.md#unix 2. Run: ```bash antlr4 -Dlanguage=Go -package sqliteparse -visitor Lexer.g4 Parser.g4 \ && mv _lexer.go lexer.go \ && mv _parser.go parser.go \ && rm *.interp *.tokens ```atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/lexer.go000066400000000000000000002146401431455511600241570ustar00rootroot00000000000000// Code generated from Lexer.g4 by ANTLR 4.10.1. DO NOT EDIT. package sqliteparse import ( "fmt" "sync" "unicode" "github.com/antlr/antlr4/runtime/Go/antlr" ) // Suppress unused import error var _ = fmt.Printf var _ = sync.Once{} var _ = unicode.IsLetter type Lexer struct { *antlr.BaseLexer channelNames []string modeNames []string // TODO: EOF string } var lexerLexerStaticData struct { once sync.Once serializedATN []int32 channelNames []string modeNames []string literalNames []string symbolicNames []string ruleNames []string predictionContextCache *antlr.PredictionContextCache atn *antlr.ATN decisionToDFA []*antlr.DFA } func lexerLexerInit() { staticData := &lexerLexerStaticData staticData.channelNames = []string{ "DEFAULT_TOKEN_CHANNEL", "HIDDEN", } staticData.modeNames = []string{ "DEFAULT_MODE", } staticData.literalNames = []string{ "", "';'", "'.'", "'('", "')'", "','", "'='", "'*'", "'+'", "'-'", "'~'", "'||'", "'/'", "'%'", "'<<'", "'>>'", "'&'", "'|'", "'<'", "'<='", "'>'", "'>='", "'=='", "'!='", "'<>'", "'ABORT'", "'ACTION'", "'ADD'", "'AFTER'", "'ALL'", "'ALTER'", "'ANALYZE'", "'AND'", "'AS'", "'ASC'", "'ATTACH'", "'AUTOINCREMENT'", "'BEFORE'", "'BEGIN'", "'BETWEEN'", "'BY'", "'CASCADE'", "'CASE'", "'CAST'", "'CHECK'", "'COLLATE'", "'COLUMN'", "'COMMIT'", "'CONFLICT'", "'CONSTRAINT'", "'CREATE'", "'CROSS'", "'CURRENT_DATE'", "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DATABASE'", "'DEFAULT'", "'DEFERRABLE'", "'DEFERRED'", "'DELETE'", "'DESC'", "'DETACH'", "'DISTINCT'", "'DROP'", "'EACH'", "'ELSE'", "'END'", "'ESCAPE'", "'EXCEPT'", "'EXCLUSIVE'", "'EXISTS'", "'EXPLAIN'", "'FAIL'", "'FOR'", "'FOREIGN'", "'FROM'", "'FULL'", "'GLOB'", "'GROUP'", "'HAVING'", "'IF'", "'IGNORE'", "'IMMEDIATE'", "'IN'", "'INDEX'", "'INDEXED'", "'INITIALLY'", "'INNER'", "'INSERT'", "'INSTEAD'", "'INTERSECT'", "'INTO'", "'IS'", "'ISNULL'", "'JOIN'", "'KEY'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MATCH'", "'NATURAL'", "'NO'", "'NOT'", "'NOTNULL'", "'NULL'", "'OF'", "'OFFSET'", "'ON'", "'OR'", "'ORDER'", "'OUTER'", "'PLAN'", "'PRAGMA'", "'PRIMARY'", "'QUERY'", "'RAISE'", "'RECURSIVE'", "'REFERENCES'", "'REGEXP'", "'REINDEX'", "'RELEASE'", "'RENAME'", "'REPLACE'", "'RESTRICT'", "'RETURNING'", "'RIGHT'", "'ROLLBACK'", "'ROW'", "'ROWS'", "'SAVEPOINT'", "'SELECT'", "'SET'", "'TABLE'", "'TEMP'", "'TEMPORARY'", "'THEN'", "'TO'", "'TRANSACTION'", "'TRIGGER'", "'UNION'", "'UNIQUE'", "'UPDATE'", "'USING'", "'VACUUM'", "'VALUES'", "'VIEW'", "'VIRTUAL'", "'WHEN'", "'WHERE'", "'WITH'", "'WITHOUT'", "'FIRST_VALUE'", "'OVER'", "'PARTITION'", "'RANGE'", "'PRECEDING'", "'UNBOUNDED'", "'CURRENT'", "'FOLLOWING'", "'CUME_DIST'", "'DENSE_RANK'", "'LAG'", "'LAST_VALUE'", "'LEAD'", "'NTH_VALUE'", "'NTILE'", "'PERCENT_RANK'", "'RANK'", "'ROW_NUMBER'", "'GENERATED'", "'ALWAYS'", "'STORED'", "'TRUE'", "'FALSE'", "'WINDOW'", "'NULLS'", "'FIRST'", "'LAST'", "'FILTER'", "'GROUPS'", "'EXCLUDE'", "'TIES'", "'OTHERS'", "'DO'", "'NOTHING'", } staticData.symbolicNames = []string{ "", "SCOL", "DOT", "OPEN_PAR", "CLOSE_PAR", "COMMA", "ASSIGN", "STAR", "PLUS", "MINUS", "TILDE", "PIPE2", "DIV", "MOD", "LT2", "GT2", "AMP", "PIPE", "LT", "LT_EQ", "GT", "GT_EQ", "EQ", "NOT_EQ1", "NOT_EQ2", "ABORT_", "ACTION_", "ADD_", "AFTER_", "ALL_", "ALTER_", "ANALYZE_", "AND_", "AS_", "ASC_", "ATTACH_", "AUTOINCREMENT_", "BEFORE_", "BEGIN_", "BETWEEN_", "BY_", "CASCADE_", "CASE_", "CAST_", "CHECK_", "COLLATE_", "COLUMN_", "COMMIT_", "CONFLICT_", "CONSTRAINT_", "CREATE_", "CROSS_", "CURRENT_DATE_", "CURRENT_TIME_", "CURRENT_TIMESTAMP_", "DATABASE_", "DEFAULT_", "DEFERRABLE_", "DEFERRED_", "DELETE_", "DESC_", "DETACH_", "DISTINCT_", "DROP_", "EACH_", "ELSE_", "END_", "ESCAPE_", "EXCEPT_", "EXCLUSIVE_", "EXISTS_", "EXPLAIN_", "FAIL_", "FOR_", "FOREIGN_", "FROM_", "FULL_", "GLOB_", "GROUP_", "HAVING_", "IF_", "IGNORE_", "IMMEDIATE_", "IN_", "INDEX_", "INDEXED_", "INITIALLY_", "INNER_", "INSERT_", "INSTEAD_", "INTERSECT_", "INTO_", "IS_", "ISNULL_", "JOIN_", "KEY_", "LEFT_", "LIKE_", "LIMIT_", "MATCH_", "NATURAL_", "NO_", "NOT_", "NOTNULL_", "NULL_", "OF_", "OFFSET_", "ON_", "OR_", "ORDER_", "OUTER_", "PLAN_", "PRAGMA_", "PRIMARY_", "QUERY_", "RAISE_", "RECURSIVE_", "REFERENCES_", "REGEXP_", "REINDEX_", "RELEASE_", "RENAME_", "REPLACE_", "RESTRICT_", "RETURNING_", "RIGHT_", "ROLLBACK_", "ROW_", "ROWS_", "SAVEPOINT_", "SELECT_", "SET_", "TABLE_", "TEMP_", "TEMPORARY_", "THEN_", "TO_", "TRANSACTION_", "TRIGGER_", "UNION_", "UNIQUE_", "UPDATE_", "USING_", "VACUUM_", "VALUES_", "VIEW_", "VIRTUAL_", "WHEN_", "WHERE_", "WITH_", "WITHOUT_", "FIRST_VALUE_", "OVER_", "PARTITION_", "RANGE_", "PRECEDING_", "UNBOUNDED_", "CURRENT_", "FOLLOWING_", "CUME_DIST_", "DENSE_RANK_", "LAG_", "LAST_VALUE_", "LEAD_", "NTH_VALUE_", "NTILE_", "PERCENT_RANK_", "RANK_", "ROW_NUMBER_", "GENERATED_", "ALWAYS_", "STORED_", "TRUE_", "FALSE_", "WINDOW_", "NULLS_", "FIRST_", "LAST_", "FILTER_", "GROUPS_", "EXCLUDE_", "TIES_", "OTHERS_", "DO_", "NOTHING_", "IDENTIFIER", "NUMERIC_LITERAL", "BIND_PARAMETER", "STRING_LITERAL", "BLOB_LITERAL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "UNEXPECTED_CHAR", } staticData.ruleNames = []string{ "SCOL", "DOT", "OPEN_PAR", "CLOSE_PAR", "COMMA", "ASSIGN", "STAR", "PLUS", "MINUS", "TILDE", "PIPE2", "DIV", "MOD", "LT2", "GT2", "AMP", "PIPE", "LT", "LT_EQ", "GT", "GT_EQ", "EQ", "NOT_EQ1", "NOT_EQ2", "ABORT_", "ACTION_", "ADD_", "AFTER_", "ALL_", "ALTER_", "ANALYZE_", "AND_", "AS_", "ASC_", "ATTACH_", "AUTOINCREMENT_", "BEFORE_", "BEGIN_", "BETWEEN_", "BY_", "CASCADE_", "CASE_", "CAST_", "CHECK_", "COLLATE_", "COLUMN_", "COMMIT_", "CONFLICT_", "CONSTRAINT_", "CREATE_", "CROSS_", "CURRENT_DATE_", "CURRENT_TIME_", "CURRENT_TIMESTAMP_", "DATABASE_", "DEFAULT_", "DEFERRABLE_", "DEFERRED_", "DELETE_", "DESC_", "DETACH_", "DISTINCT_", "DROP_", "EACH_", "ELSE_", "END_", "ESCAPE_", "EXCEPT_", "EXCLUSIVE_", "EXISTS_", "EXPLAIN_", "FAIL_", "FOR_", "FOREIGN_", "FROM_", "FULL_", "GLOB_", "GROUP_", "HAVING_", "IF_", "IGNORE_", "IMMEDIATE_", "IN_", "INDEX_", "INDEXED_", "INITIALLY_", "INNER_", "INSERT_", "INSTEAD_", "INTERSECT_", "INTO_", "IS_", "ISNULL_", "JOIN_", "KEY_", "LEFT_", "LIKE_", "LIMIT_", "MATCH_", "NATURAL_", "NO_", "NOT_", "NOTNULL_", "NULL_", "OF_", "OFFSET_", "ON_", "OR_", "ORDER_", "OUTER_", "PLAN_", "PRAGMA_", "PRIMARY_", "QUERY_", "RAISE_", "RECURSIVE_", "REFERENCES_", "REGEXP_", "REINDEX_", "RELEASE_", "RENAME_", "REPLACE_", "RESTRICT_", "RETURNING_", "RIGHT_", "ROLLBACK_", "ROW_", "ROWS_", "SAVEPOINT_", "SELECT_", "SET_", "TABLE_", "TEMP_", "TEMPORARY_", "THEN_", "TO_", "TRANSACTION_", "TRIGGER_", "UNION_", "UNIQUE_", "UPDATE_", "USING_", "VACUUM_", "VALUES_", "VIEW_", "VIRTUAL_", "WHEN_", "WHERE_", "WITH_", "WITHOUT_", "FIRST_VALUE_", "OVER_", "PARTITION_", "RANGE_", "PRECEDING_", "UNBOUNDED_", "CURRENT_", "FOLLOWING_", "CUME_DIST_", "DENSE_RANK_", "LAG_", "LAST_VALUE_", "LEAD_", "NTH_VALUE_", "NTILE_", "PERCENT_RANK_", "RANK_", "ROW_NUMBER_", "GENERATED_", "ALWAYS_", "STORED_", "TRUE_", "FALSE_", "WINDOW_", "NULLS_", "FIRST_", "LAST_", "FILTER_", "GROUPS_", "EXCLUDE_", "TIES_", "OTHERS_", "DO_", "NOTHING_", "IDENTIFIER", "NUMERIC_LITERAL", "BIND_PARAMETER", "STRING_LITERAL", "BLOB_LITERAL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "UNEXPECTED_CHAR", "HEX_DIGIT", "DIGIT", } staticData.predictionContextCache = antlr.NewPredictionContextCache() staticData.serializedATN = []int32{ 4, 0, 193, 1704, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 5, 184, 1562, 8, 184, 10, 184, 12, 184, 1565, 9, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 5, 184, 1572, 8, 184, 10, 184, 12, 184, 1575, 9, 184, 1, 184, 1, 184, 1, 184, 5, 184, 1580, 8, 184, 10, 184, 12, 184, 1583, 9, 184, 1, 184, 1, 184, 1, 184, 5, 184, 1588, 8, 184, 10, 184, 12, 184, 1591, 9, 184, 3, 184, 1593, 8, 184, 1, 185, 4, 185, 1596, 8, 185, 11, 185, 12, 185, 1597, 1, 185, 1, 185, 5, 185, 1602, 8, 185, 10, 185, 12, 185, 1605, 9, 185, 3, 185, 1607, 8, 185, 1, 185, 1, 185, 4, 185, 1611, 8, 185, 11, 185, 12, 185, 1612, 3, 185, 1615, 8, 185, 1, 185, 1, 185, 3, 185, 1619, 8, 185, 1, 185, 4, 185, 1622, 8, 185, 11, 185, 12, 185, 1623, 3, 185, 1626, 8, 185, 1, 185, 1, 185, 1, 185, 1, 185, 4, 185, 1632, 8, 185, 11, 185, 12, 185, 1633, 3, 185, 1636, 8, 185, 1, 186, 1, 186, 5, 186, 1640, 8, 186, 10, 186, 12, 186, 1643, 9, 186, 1, 186, 1, 186, 3, 186, 1647, 8, 186, 1, 187, 1, 187, 1, 187, 1, 187, 5, 187, 1653, 8, 187, 10, 187, 12, 187, 1656, 9, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 5, 189, 1667, 8, 189, 10, 189, 12, 189, 1670, 9, 189, 1, 189, 3, 189, 1673, 8, 189, 1, 189, 1, 189, 3, 189, 1677, 8, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 5, 190, 1685, 8, 190, 10, 190, 12, 190, 1688, 9, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 193, 1, 193, 1, 194, 1, 194, 1, 1686, 0, 195, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, 87, 44, 89, 45, 91, 46, 93, 47, 95, 48, 97, 49, 99, 50, 101, 51, 103, 52, 105, 53, 107, 54, 109, 55, 111, 56, 113, 57, 115, 58, 117, 59, 119, 60, 121, 61, 123, 62, 125, 63, 127, 64, 129, 65, 131, 66, 133, 67, 135, 68, 137, 69, 139, 70, 141, 71, 143, 72, 145, 73, 147, 74, 149, 75, 151, 76, 153, 77, 155, 78, 157, 79, 159, 80, 161, 81, 163, 82, 165, 83, 167, 84, 169, 85, 171, 86, 173, 87, 175, 88, 177, 89, 179, 90, 181, 91, 183, 92, 185, 93, 187, 94, 189, 95, 191, 96, 193, 97, 195, 98, 197, 99, 199, 100, 201, 101, 203, 102, 205, 103, 207, 104, 209, 105, 211, 106, 213, 107, 215, 108, 217, 109, 219, 110, 221, 111, 223, 112, 225, 113, 227, 114, 229, 115, 231, 116, 233, 117, 235, 118, 237, 119, 239, 120, 241, 121, 243, 122, 245, 123, 247, 124, 249, 125, 251, 126, 253, 127, 255, 128, 257, 129, 259, 130, 261, 131, 263, 132, 265, 133, 267, 134, 269, 135, 271, 136, 273, 137, 275, 138, 277, 139, 279, 140, 281, 141, 283, 142, 285, 143, 287, 144, 289, 145, 291, 146, 293, 147, 295, 148, 297, 149, 299, 150, 301, 151, 303, 152, 305, 153, 307, 154, 309, 155, 311, 156, 313, 157, 315, 158, 317, 159, 319, 160, 321, 161, 323, 162, 325, 163, 327, 164, 329, 165, 331, 166, 333, 167, 335, 168, 337, 169, 339, 170, 341, 171, 343, 172, 345, 173, 347, 174, 349, 175, 351, 176, 353, 177, 355, 178, 357, 179, 359, 180, 361, 181, 363, 182, 365, 183, 367, 184, 369, 185, 371, 186, 373, 187, 375, 188, 377, 189, 379, 190, 381, 191, 383, 192, 385, 193, 387, 0, 389, 0, 1, 0, 38, 2, 0, 65, 65, 97, 97, 2, 0, 66, 66, 98, 98, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 84, 84, 116, 116, 2, 0, 67, 67, 99, 99, 2, 0, 73, 73, 105, 105, 2, 0, 78, 78, 110, 110, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102, 102, 2, 0, 69, 69, 101, 101, 2, 0, 76, 76, 108, 108, 2, 0, 89, 89, 121, 121, 2, 0, 90, 90, 122, 122, 2, 0, 83, 83, 115, 115, 2, 0, 72, 72, 104, 104, 2, 0, 85, 85, 117, 117, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 87, 87, 119, 119, 2, 0, 75, 75, 107, 107, 2, 0, 80, 80, 112, 112, 2, 0, 88, 88, 120, 120, 2, 0, 86, 86, 118, 118, 2, 0, 74, 74, 106, 106, 2, 0, 81, 81, 113, 113, 1, 0, 34, 34, 1, 0, 96, 96, 1, 0, 93, 93, 3, 0, 65, 90, 95, 95, 97, 122, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 2, 0, 43, 43, 45, 45, 3, 0, 36, 36, 58, 58, 64, 64, 1, 0, 39, 39, 2, 0, 10, 10, 13, 13, 3, 0, 9, 11, 13, 13, 32, 32, 3, 0, 48, 57, 65, 70, 97, 102, 1, 0, 48, 57, 1728, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 0, 95, 1, 0, 0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, 0, 0, 0, 103, 1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 107, 1, 0, 0, 0, 0, 109, 1, 0, 0, 0, 0, 111, 1, 0, 0, 0, 0, 113, 1, 0, 0, 0, 0, 115, 1, 0, 0, 0, 0, 117, 1, 0, 0, 0, 0, 119, 1, 0, 0, 0, 0, 121, 1, 0, 0, 0, 0, 123, 1, 0, 0, 0, 0, 125, 1, 0, 0, 0, 0, 127, 1, 0, 0, 0, 0, 129, 1, 0, 0, 0, 0, 131, 1, 0, 0, 0, 0, 133, 1, 0, 0, 0, 0, 135, 1, 0, 0, 0, 0, 137, 1, 0, 0, 0, 0, 139, 1, 0, 0, 0, 0, 141, 1, 0, 0, 0, 0, 143, 1, 0, 0, 0, 0, 145, 1, 0, 0, 0, 0, 147, 1, 0, 0, 0, 0, 149, 1, 0, 0, 0, 0, 151, 1, 0, 0, 0, 0, 153, 1, 0, 0, 0, 0, 155, 1, 0, 0, 0, 0, 157, 1, 0, 0, 0, 0, 159, 1, 0, 0, 0, 0, 161, 1, 0, 0, 0, 0, 163, 1, 0, 0, 0, 0, 165, 1, 0, 0, 0, 0, 167, 1, 0, 0, 0, 0, 169, 1, 0, 0, 0, 0, 171, 1, 0, 0, 0, 0, 173, 1, 0, 0, 0, 0, 175, 1, 0, 0, 0, 0, 177, 1, 0, 0, 0, 0, 179, 1, 0, 0, 0, 0, 181, 1, 0, 0, 0, 0, 183, 1, 0, 0, 0, 0, 185, 1, 0, 0, 0, 0, 187, 1, 0, 0, 0, 0, 189, 1, 0, 0, 0, 0, 191, 1, 0, 0, 0, 0, 193, 1, 0, 0, 0, 0, 195, 1, 0, 0, 0, 0, 197, 1, 0, 0, 0, 0, 199, 1, 0, 0, 0, 0, 201, 1, 0, 0, 0, 0, 203, 1, 0, 0, 0, 0, 205, 1, 0, 0, 0, 0, 207, 1, 0, 0, 0, 0, 209, 1, 0, 0, 0, 0, 211, 1, 0, 0, 0, 0, 213, 1, 0, 0, 0, 0, 215, 1, 0, 0, 0, 0, 217, 1, 0, 0, 0, 0, 219, 1, 0, 0, 0, 0, 221, 1, 0, 0, 0, 0, 223, 1, 0, 0, 0, 0, 225, 1, 0, 0, 0, 0, 227, 1, 0, 0, 0, 0, 229, 1, 0, 0, 0, 0, 231, 1, 0, 0, 0, 0, 233, 1, 0, 0, 0, 0, 235, 1, 0, 0, 0, 0, 237, 1, 0, 0, 0, 0, 239, 1, 0, 0, 0, 0, 241, 1, 0, 0, 0, 0, 243, 1, 0, 0, 0, 0, 245, 1, 0, 0, 0, 0, 247, 1, 0, 0, 0, 0, 249, 1, 0, 0, 0, 0, 251, 1, 0, 0, 0, 0, 253, 1, 0, 0, 0, 0, 255, 1, 0, 0, 0, 0, 257, 1, 0, 0, 0, 0, 259, 1, 0, 0, 0, 0, 261, 1, 0, 0, 0, 0, 263, 1, 0, 0, 0, 0, 265, 1, 0, 0, 0, 0, 267, 1, 0, 0, 0, 0, 269, 1, 0, 0, 0, 0, 271, 1, 0, 0, 0, 0, 273, 1, 0, 0, 0, 0, 275, 1, 0, 0, 0, 0, 277, 1, 0, 0, 0, 0, 279, 1, 0, 0, 0, 0, 281, 1, 0, 0, 0, 0, 283, 1, 0, 0, 0, 0, 285, 1, 0, 0, 0, 0, 287, 1, 0, 0, 0, 0, 289, 1, 0, 0, 0, 0, 291, 1, 0, 0, 0, 0, 293, 1, 0, 0, 0, 0, 295, 1, 0, 0, 0, 0, 297, 1, 0, 0, 0, 0, 299, 1, 0, 0, 0, 0, 301, 1, 0, 0, 0, 0, 303, 1, 0, 0, 0, 0, 305, 1, 0, 0, 0, 0, 307, 1, 0, 0, 0, 0, 309, 1, 0, 0, 0, 0, 311, 1, 0, 0, 0, 0, 313, 1, 0, 0, 0, 0, 315, 1, 0, 0, 0, 0, 317, 1, 0, 0, 0, 0, 319, 1, 0, 0, 0, 0, 321, 1, 0, 0, 0, 0, 323, 1, 0, 0, 0, 0, 325, 1, 0, 0, 0, 0, 327, 1, 0, 0, 0, 0, 329, 1, 0, 0, 0, 0, 331, 1, 0, 0, 0, 0, 333, 1, 0, 0, 0, 0, 335, 1, 0, 0, 0, 0, 337, 1, 0, 0, 0, 0, 339, 1, 0, 0, 0, 0, 341, 1, 0, 0, 0, 0, 343, 1, 0, 0, 0, 0, 345, 1, 0, 0, 0, 0, 347, 1, 0, 0, 0, 0, 349, 1, 0, 0, 0, 0, 351, 1, 0, 0, 0, 0, 353, 1, 0, 0, 0, 0, 355, 1, 0, 0, 0, 0, 357, 1, 0, 0, 0, 0, 359, 1, 0, 0, 0, 0, 361, 1, 0, 0, 0, 0, 363, 1, 0, 0, 0, 0, 365, 1, 0, 0, 0, 0, 367, 1, 0, 0, 0, 0, 369, 1, 0, 0, 0, 0, 371, 1, 0, 0, 0, 0, 373, 1, 0, 0, 0, 0, 375, 1, 0, 0, 0, 0, 377, 1, 0, 0, 0, 0, 379, 1, 0, 0, 0, 0, 381, 1, 0, 0, 0, 0, 383, 1, 0, 0, 0, 0, 385, 1, 0, 0, 0, 1, 391, 1, 0, 0, 0, 3, 393, 1, 0, 0, 0, 5, 395, 1, 0, 0, 0, 7, 397, 1, 0, 0, 0, 9, 399, 1, 0, 0, 0, 11, 401, 1, 0, 0, 0, 13, 403, 1, 0, 0, 0, 15, 405, 1, 0, 0, 0, 17, 407, 1, 0, 0, 0, 19, 409, 1, 0, 0, 0, 21, 411, 1, 0, 0, 0, 23, 414, 1, 0, 0, 0, 25, 416, 1, 0, 0, 0, 27, 418, 1, 0, 0, 0, 29, 421, 1, 0, 0, 0, 31, 424, 1, 0, 0, 0, 33, 426, 1, 0, 0, 0, 35, 428, 1, 0, 0, 0, 37, 430, 1, 0, 0, 0, 39, 433, 1, 0, 0, 0, 41, 435, 1, 0, 0, 0, 43, 438, 1, 0, 0, 0, 45, 441, 1, 0, 0, 0, 47, 444, 1, 0, 0, 0, 49, 447, 1, 0, 0, 0, 51, 453, 1, 0, 0, 0, 53, 460, 1, 0, 0, 0, 55, 464, 1, 0, 0, 0, 57, 470, 1, 0, 0, 0, 59, 474, 1, 0, 0, 0, 61, 480, 1, 0, 0, 0, 63, 488, 1, 0, 0, 0, 65, 492, 1, 0, 0, 0, 67, 495, 1, 0, 0, 0, 69, 499, 1, 0, 0, 0, 71, 506, 1, 0, 0, 0, 73, 520, 1, 0, 0, 0, 75, 527, 1, 0, 0, 0, 77, 533, 1, 0, 0, 0, 79, 541, 1, 0, 0, 0, 81, 544, 1, 0, 0, 0, 83, 552, 1, 0, 0, 0, 85, 557, 1, 0, 0, 0, 87, 562, 1, 0, 0, 0, 89, 568, 1, 0, 0, 0, 91, 576, 1, 0, 0, 0, 93, 583, 1, 0, 0, 0, 95, 590, 1, 0, 0, 0, 97, 599, 1, 0, 0, 0, 99, 610, 1, 0, 0, 0, 101, 617, 1, 0, 0, 0, 103, 623, 1, 0, 0, 0, 105, 636, 1, 0, 0, 0, 107, 649, 1, 0, 0, 0, 109, 667, 1, 0, 0, 0, 111, 676, 1, 0, 0, 0, 113, 684, 1, 0, 0, 0, 115, 695, 1, 0, 0, 0, 117, 704, 1, 0, 0, 0, 119, 711, 1, 0, 0, 0, 121, 716, 1, 0, 0, 0, 123, 723, 1, 0, 0, 0, 125, 732, 1, 0, 0, 0, 127, 737, 1, 0, 0, 0, 129, 742, 1, 0, 0, 0, 131, 747, 1, 0, 0, 0, 133, 751, 1, 0, 0, 0, 135, 758, 1, 0, 0, 0, 137, 765, 1, 0, 0, 0, 139, 775, 1, 0, 0, 0, 141, 782, 1, 0, 0, 0, 143, 790, 1, 0, 0, 0, 145, 795, 1, 0, 0, 0, 147, 799, 1, 0, 0, 0, 149, 807, 1, 0, 0, 0, 151, 812, 1, 0, 0, 0, 153, 817, 1, 0, 0, 0, 155, 822, 1, 0, 0, 0, 157, 828, 1, 0, 0, 0, 159, 835, 1, 0, 0, 0, 161, 838, 1, 0, 0, 0, 163, 845, 1, 0, 0, 0, 165, 855, 1, 0, 0, 0, 167, 858, 1, 0, 0, 0, 169, 864, 1, 0, 0, 0, 171, 872, 1, 0, 0, 0, 173, 882, 1, 0, 0, 0, 175, 888, 1, 0, 0, 0, 177, 895, 1, 0, 0, 0, 179, 903, 1, 0, 0, 0, 181, 913, 1, 0, 0, 0, 183, 918, 1, 0, 0, 0, 185, 921, 1, 0, 0, 0, 187, 928, 1, 0, 0, 0, 189, 933, 1, 0, 0, 0, 191, 937, 1, 0, 0, 0, 193, 942, 1, 0, 0, 0, 195, 947, 1, 0, 0, 0, 197, 953, 1, 0, 0, 0, 199, 959, 1, 0, 0, 0, 201, 967, 1, 0, 0, 0, 203, 970, 1, 0, 0, 0, 205, 974, 1, 0, 0, 0, 207, 982, 1, 0, 0, 0, 209, 987, 1, 0, 0, 0, 211, 990, 1, 0, 0, 0, 213, 997, 1, 0, 0, 0, 215, 1000, 1, 0, 0, 0, 217, 1003, 1, 0, 0, 0, 219, 1009, 1, 0, 0, 0, 221, 1015, 1, 0, 0, 0, 223, 1020, 1, 0, 0, 0, 225, 1027, 1, 0, 0, 0, 227, 1035, 1, 0, 0, 0, 229, 1041, 1, 0, 0, 0, 231, 1047, 1, 0, 0, 0, 233, 1057, 1, 0, 0, 0, 235, 1068, 1, 0, 0, 0, 237, 1075, 1, 0, 0, 0, 239, 1083, 1, 0, 0, 0, 241, 1091, 1, 0, 0, 0, 243, 1098, 1, 0, 0, 0, 245, 1106, 1, 0, 0, 0, 247, 1115, 1, 0, 0, 0, 249, 1125, 1, 0, 0, 0, 251, 1131, 1, 0, 0, 0, 253, 1140, 1, 0, 0, 0, 255, 1144, 1, 0, 0, 0, 257, 1149, 1, 0, 0, 0, 259, 1159, 1, 0, 0, 0, 261, 1166, 1, 0, 0, 0, 263, 1170, 1, 0, 0, 0, 265, 1176, 1, 0, 0, 0, 267, 1181, 1, 0, 0, 0, 269, 1191, 1, 0, 0, 0, 271, 1196, 1, 0, 0, 0, 273, 1199, 1, 0, 0, 0, 275, 1211, 1, 0, 0, 0, 277, 1219, 1, 0, 0, 0, 279, 1225, 1, 0, 0, 0, 281, 1232, 1, 0, 0, 0, 283, 1239, 1, 0, 0, 0, 285, 1245, 1, 0, 0, 0, 287, 1252, 1, 0, 0, 0, 289, 1259, 1, 0, 0, 0, 291, 1264, 1, 0, 0, 0, 293, 1272, 1, 0, 0, 0, 295, 1277, 1, 0, 0, 0, 297, 1283, 1, 0, 0, 0, 299, 1288, 1, 0, 0, 0, 301, 1296, 1, 0, 0, 0, 303, 1308, 1, 0, 0, 0, 305, 1313, 1, 0, 0, 0, 307, 1323, 1, 0, 0, 0, 309, 1329, 1, 0, 0, 0, 311, 1339, 1, 0, 0, 0, 313, 1349, 1, 0, 0, 0, 315, 1357, 1, 0, 0, 0, 317, 1367, 1, 0, 0, 0, 319, 1377, 1, 0, 0, 0, 321, 1388, 1, 0, 0, 0, 323, 1392, 1, 0, 0, 0, 325, 1403, 1, 0, 0, 0, 327, 1408, 1, 0, 0, 0, 329, 1418, 1, 0, 0, 0, 331, 1424, 1, 0, 0, 0, 333, 1437, 1, 0, 0, 0, 335, 1442, 1, 0, 0, 0, 337, 1453, 1, 0, 0, 0, 339, 1463, 1, 0, 0, 0, 341, 1470, 1, 0, 0, 0, 343, 1477, 1, 0, 0, 0, 345, 1482, 1, 0, 0, 0, 347, 1488, 1, 0, 0, 0, 349, 1495, 1, 0, 0, 0, 351, 1501, 1, 0, 0, 0, 353, 1507, 1, 0, 0, 0, 355, 1512, 1, 0, 0, 0, 357, 1519, 1, 0, 0, 0, 359, 1526, 1, 0, 0, 0, 361, 1534, 1, 0, 0, 0, 363, 1539, 1, 0, 0, 0, 365, 1546, 1, 0, 0, 0, 367, 1549, 1, 0, 0, 0, 369, 1592, 1, 0, 0, 0, 371, 1635, 1, 0, 0, 0, 373, 1646, 1, 0, 0, 0, 375, 1648, 1, 0, 0, 0, 377, 1659, 1, 0, 0, 0, 379, 1662, 1, 0, 0, 0, 381, 1680, 1, 0, 0, 0, 383, 1694, 1, 0, 0, 0, 385, 1698, 1, 0, 0, 0, 387, 1700, 1, 0, 0, 0, 389, 1702, 1, 0, 0, 0, 391, 392, 5, 59, 0, 0, 392, 2, 1, 0, 0, 0, 393, 394, 5, 46, 0, 0, 394, 4, 1, 0, 0, 0, 395, 396, 5, 40, 0, 0, 396, 6, 1, 0, 0, 0, 397, 398, 5, 41, 0, 0, 398, 8, 1, 0, 0, 0, 399, 400, 5, 44, 0, 0, 400, 10, 1, 0, 0, 0, 401, 402, 5, 61, 0, 0, 402, 12, 1, 0, 0, 0, 403, 404, 5, 42, 0, 0, 404, 14, 1, 0, 0, 0, 405, 406, 5, 43, 0, 0, 406, 16, 1, 0, 0, 0, 407, 408, 5, 45, 0, 0, 408, 18, 1, 0, 0, 0, 409, 410, 5, 126, 0, 0, 410, 20, 1, 0, 0, 0, 411, 412, 5, 124, 0, 0, 412, 413, 5, 124, 0, 0, 413, 22, 1, 0, 0, 0, 414, 415, 5, 47, 0, 0, 415, 24, 1, 0, 0, 0, 416, 417, 5, 37, 0, 0, 417, 26, 1, 0, 0, 0, 418, 419, 5, 60, 0, 0, 419, 420, 5, 60, 0, 0, 420, 28, 1, 0, 0, 0, 421, 422, 5, 62, 0, 0, 422, 423, 5, 62, 0, 0, 423, 30, 1, 0, 0, 0, 424, 425, 5, 38, 0, 0, 425, 32, 1, 0, 0, 0, 426, 427, 5, 124, 0, 0, 427, 34, 1, 0, 0, 0, 428, 429, 5, 60, 0, 0, 429, 36, 1, 0, 0, 0, 430, 431, 5, 60, 0, 0, 431, 432, 5, 61, 0, 0, 432, 38, 1, 0, 0, 0, 433, 434, 5, 62, 0, 0, 434, 40, 1, 0, 0, 0, 435, 436, 5, 62, 0, 0, 436, 437, 5, 61, 0, 0, 437, 42, 1, 0, 0, 0, 438, 439, 5, 61, 0, 0, 439, 440, 5, 61, 0, 0, 440, 44, 1, 0, 0, 0, 441, 442, 5, 33, 0, 0, 442, 443, 5, 61, 0, 0, 443, 46, 1, 0, 0, 0, 444, 445, 5, 60, 0, 0, 445, 446, 5, 62, 0, 0, 446, 48, 1, 0, 0, 0, 447, 448, 7, 0, 0, 0, 448, 449, 7, 1, 0, 0, 449, 450, 7, 2, 0, 0, 450, 451, 7, 3, 0, 0, 451, 452, 7, 4, 0, 0, 452, 50, 1, 0, 0, 0, 453, 454, 7, 0, 0, 0, 454, 455, 7, 5, 0, 0, 455, 456, 7, 4, 0, 0, 456, 457, 7, 6, 0, 0, 457, 458, 7, 2, 0, 0, 458, 459, 7, 7, 0, 0, 459, 52, 1, 0, 0, 0, 460, 461, 7, 0, 0, 0, 461, 462, 7, 8, 0, 0, 462, 463, 7, 8, 0, 0, 463, 54, 1, 0, 0, 0, 464, 465, 7, 0, 0, 0, 465, 466, 7, 9, 0, 0, 466, 467, 7, 4, 0, 0, 467, 468, 7, 10, 0, 0, 468, 469, 7, 3, 0, 0, 469, 56, 1, 0, 0, 0, 470, 471, 7, 0, 0, 0, 471, 472, 7, 11, 0, 0, 472, 473, 7, 11, 0, 0, 473, 58, 1, 0, 0, 0, 474, 475, 7, 0, 0, 0, 475, 476, 7, 11, 0, 0, 476, 477, 7, 4, 0, 0, 477, 478, 7, 10, 0, 0, 478, 479, 7, 3, 0, 0, 479, 60, 1, 0, 0, 0, 480, 481, 7, 0, 0, 0, 481, 482, 7, 7, 0, 0, 482, 483, 7, 0, 0, 0, 483, 484, 7, 11, 0, 0, 484, 485, 7, 12, 0, 0, 485, 486, 7, 13, 0, 0, 486, 487, 7, 10, 0, 0, 487, 62, 1, 0, 0, 0, 488, 489, 7, 0, 0, 0, 489, 490, 7, 7, 0, 0, 490, 491, 7, 8, 0, 0, 491, 64, 1, 0, 0, 0, 492, 493, 7, 0, 0, 0, 493, 494, 7, 14, 0, 0, 494, 66, 1, 0, 0, 0, 495, 496, 7, 0, 0, 0, 496, 497, 7, 14, 0, 0, 497, 498, 7, 5, 0, 0, 498, 68, 1, 0, 0, 0, 499, 500, 7, 0, 0, 0, 500, 501, 7, 4, 0, 0, 501, 502, 7, 4, 0, 0, 502, 503, 7, 0, 0, 0, 503, 504, 7, 5, 0, 0, 504, 505, 7, 15, 0, 0, 505, 70, 1, 0, 0, 0, 506, 507, 7, 0, 0, 0, 507, 508, 7, 16, 0, 0, 508, 509, 7, 4, 0, 0, 509, 510, 7, 2, 0, 0, 510, 511, 7, 6, 0, 0, 511, 512, 7, 7, 0, 0, 512, 513, 7, 5, 0, 0, 513, 514, 7, 3, 0, 0, 514, 515, 7, 10, 0, 0, 515, 516, 7, 17, 0, 0, 516, 517, 7, 10, 0, 0, 517, 518, 7, 7, 0, 0, 518, 519, 7, 4, 0, 0, 519, 72, 1, 0, 0, 0, 520, 521, 7, 1, 0, 0, 521, 522, 7, 10, 0, 0, 522, 523, 7, 9, 0, 0, 523, 524, 7, 2, 0, 0, 524, 525, 7, 3, 0, 0, 525, 526, 7, 10, 0, 0, 526, 74, 1, 0, 0, 0, 527, 528, 7, 1, 0, 0, 528, 529, 7, 10, 0, 0, 529, 530, 7, 18, 0, 0, 530, 531, 7, 6, 0, 0, 531, 532, 7, 7, 0, 0, 532, 76, 1, 0, 0, 0, 533, 534, 7, 1, 0, 0, 534, 535, 7, 10, 0, 0, 535, 536, 7, 4, 0, 0, 536, 537, 7, 19, 0, 0, 537, 538, 7, 10, 0, 0, 538, 539, 7, 10, 0, 0, 539, 540, 7, 7, 0, 0, 540, 78, 1, 0, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 12, 0, 0, 543, 80, 1, 0, 0, 0, 544, 545, 7, 5, 0, 0, 545, 546, 7, 0, 0, 0, 546, 547, 7, 14, 0, 0, 547, 548, 7, 5, 0, 0, 548, 549, 7, 0, 0, 0, 549, 550, 7, 8, 0, 0, 550, 551, 7, 10, 0, 0, 551, 82, 1, 0, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 0, 0, 0, 554, 555, 7, 14, 0, 0, 555, 556, 7, 10, 0, 0, 556, 84, 1, 0, 0, 0, 557, 558, 7, 5, 0, 0, 558, 559, 7, 0, 0, 0, 559, 560, 7, 14, 0, 0, 560, 561, 7, 4, 0, 0, 561, 86, 1, 0, 0, 0, 562, 563, 7, 5, 0, 0, 563, 564, 7, 15, 0, 0, 564, 565, 7, 10, 0, 0, 565, 566, 7, 5, 0, 0, 566, 567, 7, 20, 0, 0, 567, 88, 1, 0, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 7, 11, 0, 0, 571, 572, 7, 11, 0, 0, 572, 573, 7, 0, 0, 0, 573, 574, 7, 4, 0, 0, 574, 575, 7, 10, 0, 0, 575, 90, 1, 0, 0, 0, 576, 577, 7, 5, 0, 0, 577, 578, 7, 2, 0, 0, 578, 579, 7, 11, 0, 0, 579, 580, 7, 16, 0, 0, 580, 581, 7, 17, 0, 0, 581, 582, 7, 7, 0, 0, 582, 92, 1, 0, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 2, 0, 0, 585, 586, 7, 17, 0, 0, 586, 587, 7, 17, 0, 0, 587, 588, 7, 6, 0, 0, 588, 589, 7, 4, 0, 0, 589, 94, 1, 0, 0, 0, 590, 591, 7, 5, 0, 0, 591, 592, 7, 2, 0, 0, 592, 593, 7, 7, 0, 0, 593, 594, 7, 9, 0, 0, 594, 595, 7, 11, 0, 0, 595, 596, 7, 6, 0, 0, 596, 597, 7, 5, 0, 0, 597, 598, 7, 4, 0, 0, 598, 96, 1, 0, 0, 0, 599, 600, 7, 5, 0, 0, 600, 601, 7, 2, 0, 0, 601, 602, 7, 7, 0, 0, 602, 603, 7, 14, 0, 0, 603, 604, 7, 4, 0, 0, 604, 605, 7, 3, 0, 0, 605, 606, 7, 0, 0, 0, 606, 607, 7, 6, 0, 0, 607, 608, 7, 7, 0, 0, 608, 609, 7, 4, 0, 0, 609, 98, 1, 0, 0, 0, 610, 611, 7, 5, 0, 0, 611, 612, 7, 3, 0, 0, 612, 613, 7, 10, 0, 0, 613, 614, 7, 0, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 10, 0, 0, 616, 100, 1, 0, 0, 0, 617, 618, 7, 5, 0, 0, 618, 619, 7, 3, 0, 0, 619, 620, 7, 2, 0, 0, 620, 621, 7, 14, 0, 0, 621, 622, 7, 14, 0, 0, 622, 102, 1, 0, 0, 0, 623, 624, 7, 5, 0, 0, 624, 625, 7, 16, 0, 0, 625, 626, 7, 3, 0, 0, 626, 627, 7, 3, 0, 0, 627, 628, 7, 10, 0, 0, 628, 629, 7, 7, 0, 0, 629, 630, 7, 4, 0, 0, 630, 631, 5, 95, 0, 0, 631, 632, 7, 8, 0, 0, 632, 633, 7, 0, 0, 0, 633, 634, 7, 4, 0, 0, 634, 635, 7, 10, 0, 0, 635, 104, 1, 0, 0, 0, 636, 637, 7, 5, 0, 0, 637, 638, 7, 16, 0, 0, 638, 639, 7, 3, 0, 0, 639, 640, 7, 3, 0, 0, 640, 641, 7, 10, 0, 0, 641, 642, 7, 7, 0, 0, 642, 643, 7, 4, 0, 0, 643, 644, 5, 95, 0, 0, 644, 645, 7, 4, 0, 0, 645, 646, 7, 6, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 106, 1, 0, 0, 0, 649, 650, 7, 5, 0, 0, 650, 651, 7, 16, 0, 0, 651, 652, 7, 3, 0, 0, 652, 653, 7, 3, 0, 0, 653, 654, 7, 10, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 4, 0, 0, 656, 657, 5, 95, 0, 0, 657, 658, 7, 4, 0, 0, 658, 659, 7, 6, 0, 0, 659, 660, 7, 17, 0, 0, 660, 661, 7, 10, 0, 0, 661, 662, 7, 14, 0, 0, 662, 663, 7, 4, 0, 0, 663, 664, 7, 0, 0, 0, 664, 665, 7, 17, 0, 0, 665, 666, 7, 21, 0, 0, 666, 108, 1, 0, 0, 0, 667, 668, 7, 8, 0, 0, 668, 669, 7, 0, 0, 0, 669, 670, 7, 4, 0, 0, 670, 671, 7, 0, 0, 0, 671, 672, 7, 1, 0, 0, 672, 673, 7, 0, 0, 0, 673, 674, 7, 14, 0, 0, 674, 675, 7, 10, 0, 0, 675, 110, 1, 0, 0, 0, 676, 677, 7, 8, 0, 0, 677, 678, 7, 10, 0, 0, 678, 679, 7, 9, 0, 0, 679, 680, 7, 0, 0, 0, 680, 681, 7, 16, 0, 0, 681, 682, 7, 11, 0, 0, 682, 683, 7, 4, 0, 0, 683, 112, 1, 0, 0, 0, 684, 685, 7, 8, 0, 0, 685, 686, 7, 10, 0, 0, 686, 687, 7, 9, 0, 0, 687, 688, 7, 10, 0, 0, 688, 689, 7, 3, 0, 0, 689, 690, 7, 3, 0, 0, 690, 691, 7, 0, 0, 0, 691, 692, 7, 1, 0, 0, 692, 693, 7, 11, 0, 0, 693, 694, 7, 10, 0, 0, 694, 114, 1, 0, 0, 0, 695, 696, 7, 8, 0, 0, 696, 697, 7, 10, 0, 0, 697, 698, 7, 9, 0, 0, 698, 699, 7, 10, 0, 0, 699, 700, 7, 3, 0, 0, 700, 701, 7, 3, 0, 0, 701, 702, 7, 10, 0, 0, 702, 703, 7, 8, 0, 0, 703, 116, 1, 0, 0, 0, 704, 705, 7, 8, 0, 0, 705, 706, 7, 10, 0, 0, 706, 707, 7, 11, 0, 0, 707, 708, 7, 10, 0, 0, 708, 709, 7, 4, 0, 0, 709, 710, 7, 10, 0, 0, 710, 118, 1, 0, 0, 0, 711, 712, 7, 8, 0, 0, 712, 713, 7, 10, 0, 0, 713, 714, 7, 14, 0, 0, 714, 715, 7, 5, 0, 0, 715, 120, 1, 0, 0, 0, 716, 717, 7, 8, 0, 0, 717, 718, 7, 10, 0, 0, 718, 719, 7, 4, 0, 0, 719, 720, 7, 0, 0, 0, 720, 721, 7, 5, 0, 0, 721, 722, 7, 15, 0, 0, 722, 122, 1, 0, 0, 0, 723, 724, 7, 8, 0, 0, 724, 725, 7, 6, 0, 0, 725, 726, 7, 14, 0, 0, 726, 727, 7, 4, 0, 0, 727, 728, 7, 6, 0, 0, 728, 729, 7, 7, 0, 0, 729, 730, 7, 5, 0, 0, 730, 731, 7, 4, 0, 0, 731, 124, 1, 0, 0, 0, 732, 733, 7, 8, 0, 0, 733, 734, 7, 3, 0, 0, 734, 735, 7, 2, 0, 0, 735, 736, 7, 21, 0, 0, 736, 126, 1, 0, 0, 0, 737, 738, 7, 10, 0, 0, 738, 739, 7, 0, 0, 0, 739, 740, 7, 5, 0, 0, 740, 741, 7, 15, 0, 0, 741, 128, 1, 0, 0, 0, 742, 743, 7, 10, 0, 0, 743, 744, 7, 11, 0, 0, 744, 745, 7, 14, 0, 0, 745, 746, 7, 10, 0, 0, 746, 130, 1, 0, 0, 0, 747, 748, 7, 10, 0, 0, 748, 749, 7, 7, 0, 0, 749, 750, 7, 8, 0, 0, 750, 132, 1, 0, 0, 0, 751, 752, 7, 10, 0, 0, 752, 753, 7, 14, 0, 0, 753, 754, 7, 5, 0, 0, 754, 755, 7, 0, 0, 0, 755, 756, 7, 21, 0, 0, 756, 757, 7, 10, 0, 0, 757, 134, 1, 0, 0, 0, 758, 759, 7, 10, 0, 0, 759, 760, 7, 22, 0, 0, 760, 761, 7, 5, 0, 0, 761, 762, 7, 10, 0, 0, 762, 763, 7, 21, 0, 0, 763, 764, 7, 4, 0, 0, 764, 136, 1, 0, 0, 0, 765, 766, 7, 10, 0, 0, 766, 767, 7, 22, 0, 0, 767, 768, 7, 5, 0, 0, 768, 769, 7, 11, 0, 0, 769, 770, 7, 16, 0, 0, 770, 771, 7, 14, 0, 0, 771, 772, 7, 6, 0, 0, 772, 773, 7, 23, 0, 0, 773, 774, 7, 10, 0, 0, 774, 138, 1, 0, 0, 0, 775, 776, 7, 10, 0, 0, 776, 777, 7, 22, 0, 0, 777, 778, 7, 6, 0, 0, 778, 779, 7, 14, 0, 0, 779, 780, 7, 4, 0, 0, 780, 781, 7, 14, 0, 0, 781, 140, 1, 0, 0, 0, 782, 783, 7, 10, 0, 0, 783, 784, 7, 22, 0, 0, 784, 785, 7, 21, 0, 0, 785, 786, 7, 11, 0, 0, 786, 787, 7, 0, 0, 0, 787, 788, 7, 6, 0, 0, 788, 789, 7, 7, 0, 0, 789, 142, 1, 0, 0, 0, 790, 791, 7, 9, 0, 0, 791, 792, 7, 0, 0, 0, 792, 793, 7, 6, 0, 0, 793, 794, 7, 11, 0, 0, 794, 144, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 2, 0, 0, 797, 798, 7, 3, 0, 0, 798, 146, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 2, 0, 0, 801, 802, 7, 3, 0, 0, 802, 803, 7, 10, 0, 0, 803, 804, 7, 6, 0, 0, 804, 805, 7, 18, 0, 0, 805, 806, 7, 7, 0, 0, 806, 148, 1, 0, 0, 0, 807, 808, 7, 9, 0, 0, 808, 809, 7, 3, 0, 0, 809, 810, 7, 2, 0, 0, 810, 811, 7, 17, 0, 0, 811, 150, 1, 0, 0, 0, 812, 813, 7, 9, 0, 0, 813, 814, 7, 16, 0, 0, 814, 815, 7, 11, 0, 0, 815, 816, 7, 11, 0, 0, 816, 152, 1, 0, 0, 0, 817, 818, 7, 18, 0, 0, 818, 819, 7, 11, 0, 0, 819, 820, 7, 2, 0, 0, 820, 821, 7, 1, 0, 0, 821, 154, 1, 0, 0, 0, 822, 823, 7, 18, 0, 0, 823, 824, 7, 3, 0, 0, 824, 825, 7, 2, 0, 0, 825, 826, 7, 16, 0, 0, 826, 827, 7, 21, 0, 0, 827, 156, 1, 0, 0, 0, 828, 829, 7, 15, 0, 0, 829, 830, 7, 0, 0, 0, 830, 831, 7, 23, 0, 0, 831, 832, 7, 6, 0, 0, 832, 833, 7, 7, 0, 0, 833, 834, 7, 18, 0, 0, 834, 158, 1, 0, 0, 0, 835, 836, 7, 6, 0, 0, 836, 837, 7, 9, 0, 0, 837, 160, 1, 0, 0, 0, 838, 839, 7, 6, 0, 0, 839, 840, 7, 18, 0, 0, 840, 841, 7, 7, 0, 0, 841, 842, 7, 2, 0, 0, 842, 843, 7, 3, 0, 0, 843, 844, 7, 10, 0, 0, 844, 162, 1, 0, 0, 0, 845, 846, 7, 6, 0, 0, 846, 847, 7, 17, 0, 0, 847, 848, 7, 17, 0, 0, 848, 849, 7, 10, 0, 0, 849, 850, 7, 8, 0, 0, 850, 851, 7, 6, 0, 0, 851, 852, 7, 0, 0, 0, 852, 853, 7, 4, 0, 0, 853, 854, 7, 10, 0, 0, 854, 164, 1, 0, 0, 0, 855, 856, 7, 6, 0, 0, 856, 857, 7, 7, 0, 0, 857, 166, 1, 0, 0, 0, 858, 859, 7, 6, 0, 0, 859, 860, 7, 7, 0, 0, 860, 861, 7, 8, 0, 0, 861, 862, 7, 10, 0, 0, 862, 863, 7, 22, 0, 0, 863, 168, 1, 0, 0, 0, 864, 865, 7, 6, 0, 0, 865, 866, 7, 7, 0, 0, 866, 867, 7, 8, 0, 0, 867, 868, 7, 10, 0, 0, 868, 869, 7, 22, 0, 0, 869, 870, 7, 10, 0, 0, 870, 871, 7, 8, 0, 0, 871, 170, 1, 0, 0, 0, 872, 873, 7, 6, 0, 0, 873, 874, 7, 7, 0, 0, 874, 875, 7, 6, 0, 0, 875, 876, 7, 4, 0, 0, 876, 877, 7, 6, 0, 0, 877, 878, 7, 0, 0, 0, 878, 879, 7, 11, 0, 0, 879, 880, 7, 11, 0, 0, 880, 881, 7, 12, 0, 0, 881, 172, 1, 0, 0, 0, 882, 883, 7, 6, 0, 0, 883, 884, 7, 7, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 10, 0, 0, 886, 887, 7, 3, 0, 0, 887, 174, 1, 0, 0, 0, 888, 889, 7, 6, 0, 0, 889, 890, 7, 7, 0, 0, 890, 891, 7, 14, 0, 0, 891, 892, 7, 10, 0, 0, 892, 893, 7, 3, 0, 0, 893, 894, 7, 4, 0, 0, 894, 176, 1, 0, 0, 0, 895, 896, 7, 6, 0, 0, 896, 897, 7, 7, 0, 0, 897, 898, 7, 14, 0, 0, 898, 899, 7, 4, 0, 0, 899, 900, 7, 10, 0, 0, 900, 901, 7, 0, 0, 0, 901, 902, 7, 8, 0, 0, 902, 178, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 7, 0, 0, 905, 906, 7, 4, 0, 0, 906, 907, 7, 10, 0, 0, 907, 908, 7, 3, 0, 0, 908, 909, 7, 14, 0, 0, 909, 910, 7, 10, 0, 0, 910, 911, 7, 5, 0, 0, 911, 912, 7, 4, 0, 0, 912, 180, 1, 0, 0, 0, 913, 914, 7, 6, 0, 0, 914, 915, 7, 7, 0, 0, 915, 916, 7, 4, 0, 0, 916, 917, 7, 2, 0, 0, 917, 182, 1, 0, 0, 0, 918, 919, 7, 6, 0, 0, 919, 920, 7, 14, 0, 0, 920, 184, 1, 0, 0, 0, 921, 922, 7, 6, 0, 0, 922, 923, 7, 14, 0, 0, 923, 924, 7, 7, 0, 0, 924, 925, 7, 16, 0, 0, 925, 926, 7, 11, 0, 0, 926, 927, 7, 11, 0, 0, 927, 186, 1, 0, 0, 0, 928, 929, 7, 24, 0, 0, 929, 930, 7, 2, 0, 0, 930, 931, 7, 6, 0, 0, 931, 932, 7, 7, 0, 0, 932, 188, 1, 0, 0, 0, 933, 934, 7, 20, 0, 0, 934, 935, 7, 10, 0, 0, 935, 936, 7, 12, 0, 0, 936, 190, 1, 0, 0, 0, 937, 938, 7, 11, 0, 0, 938, 939, 7, 10, 0, 0, 939, 940, 7, 9, 0, 0, 940, 941, 7, 4, 0, 0, 941, 192, 1, 0, 0, 0, 942, 943, 7, 11, 0, 0, 943, 944, 7, 6, 0, 0, 944, 945, 7, 20, 0, 0, 945, 946, 7, 10, 0, 0, 946, 194, 1, 0, 0, 0, 947, 948, 7, 11, 0, 0, 948, 949, 7, 6, 0, 0, 949, 950, 7, 17, 0, 0, 950, 951, 7, 6, 0, 0, 951, 952, 7, 4, 0, 0, 952, 196, 1, 0, 0, 0, 953, 954, 7, 17, 0, 0, 954, 955, 7, 0, 0, 0, 955, 956, 7, 4, 0, 0, 956, 957, 7, 5, 0, 0, 957, 958, 7, 15, 0, 0, 958, 198, 1, 0, 0, 0, 959, 960, 7, 7, 0, 0, 960, 961, 7, 0, 0, 0, 961, 962, 7, 4, 0, 0, 962, 963, 7, 16, 0, 0, 963, 964, 7, 3, 0, 0, 964, 965, 7, 0, 0, 0, 965, 966, 7, 11, 0, 0, 966, 200, 1, 0, 0, 0, 967, 968, 7, 7, 0, 0, 968, 969, 7, 2, 0, 0, 969, 202, 1, 0, 0, 0, 970, 971, 7, 7, 0, 0, 971, 972, 7, 2, 0, 0, 972, 973, 7, 4, 0, 0, 973, 204, 1, 0, 0, 0, 974, 975, 7, 7, 0, 0, 975, 976, 7, 2, 0, 0, 976, 977, 7, 4, 0, 0, 977, 978, 7, 7, 0, 0, 978, 979, 7, 16, 0, 0, 979, 980, 7, 11, 0, 0, 980, 981, 7, 11, 0, 0, 981, 206, 1, 0, 0, 0, 982, 983, 7, 7, 0, 0, 983, 984, 7, 16, 0, 0, 984, 985, 7, 11, 0, 0, 985, 986, 7, 11, 0, 0, 986, 208, 1, 0, 0, 0, 987, 988, 7, 2, 0, 0, 988, 989, 7, 9, 0, 0, 989, 210, 1, 0, 0, 0, 990, 991, 7, 2, 0, 0, 991, 992, 7, 9, 0, 0, 992, 993, 7, 9, 0, 0, 993, 994, 7, 14, 0, 0, 994, 995, 7, 10, 0, 0, 995, 996, 7, 4, 0, 0, 996, 212, 1, 0, 0, 0, 997, 998, 7, 2, 0, 0, 998, 999, 7, 7, 0, 0, 999, 214, 1, 0, 0, 0, 1000, 1001, 7, 2, 0, 0, 1001, 1002, 7, 3, 0, 0, 1002, 216, 1, 0, 0, 0, 1003, 1004, 7, 2, 0, 0, 1004, 1005, 7, 3, 0, 0, 1005, 1006, 7, 8, 0, 0, 1006, 1007, 7, 10, 0, 0, 1007, 1008, 7, 3, 0, 0, 1008, 218, 1, 0, 0, 0, 1009, 1010, 7, 2, 0, 0, 1010, 1011, 7, 16, 0, 0, 1011, 1012, 7, 4, 0, 0, 1012, 1013, 7, 10, 0, 0, 1013, 1014, 7, 3, 0, 0, 1014, 220, 1, 0, 0, 0, 1015, 1016, 7, 21, 0, 0, 1016, 1017, 7, 11, 0, 0, 1017, 1018, 7, 0, 0, 0, 1018, 1019, 7, 7, 0, 0, 1019, 222, 1, 0, 0, 0, 1020, 1021, 7, 21, 0, 0, 1021, 1022, 7, 3, 0, 0, 1022, 1023, 7, 0, 0, 0, 1023, 1024, 7, 18, 0, 0, 1024, 1025, 7, 17, 0, 0, 1025, 1026, 7, 0, 0, 0, 1026, 224, 1, 0, 0, 0, 1027, 1028, 7, 21, 0, 0, 1028, 1029, 7, 3, 0, 0, 1029, 1030, 7, 6, 0, 0, 1030, 1031, 7, 17, 0, 0, 1031, 1032, 7, 0, 0, 0, 1032, 1033, 7, 3, 0, 0, 1033, 1034, 7, 12, 0, 0, 1034, 226, 1, 0, 0, 0, 1035, 1036, 7, 25, 0, 0, 1036, 1037, 7, 16, 0, 0, 1037, 1038, 7, 10, 0, 0, 1038, 1039, 7, 3, 0, 0, 1039, 1040, 7, 12, 0, 0, 1040, 228, 1, 0, 0, 0, 1041, 1042, 7, 3, 0, 0, 1042, 1043, 7, 0, 0, 0, 1043, 1044, 7, 6, 0, 0, 1044, 1045, 7, 14, 0, 0, 1045, 1046, 7, 10, 0, 0, 1046, 230, 1, 0, 0, 0, 1047, 1048, 7, 3, 0, 0, 1048, 1049, 7, 10, 0, 0, 1049, 1050, 7, 5, 0, 0, 1050, 1051, 7, 16, 0, 0, 1051, 1052, 7, 3, 0, 0, 1052, 1053, 7, 14, 0, 0, 1053, 1054, 7, 6, 0, 0, 1054, 1055, 7, 23, 0, 0, 1055, 1056, 7, 10, 0, 0, 1056, 232, 1, 0, 0, 0, 1057, 1058, 7, 3, 0, 0, 1058, 1059, 7, 10, 0, 0, 1059, 1060, 7, 9, 0, 0, 1060, 1061, 7, 10, 0, 0, 1061, 1062, 7, 3, 0, 0, 1062, 1063, 7, 10, 0, 0, 1063, 1064, 7, 7, 0, 0, 1064, 1065, 7, 5, 0, 0, 1065, 1066, 7, 10, 0, 0, 1066, 1067, 7, 14, 0, 0, 1067, 234, 1, 0, 0, 0, 1068, 1069, 7, 3, 0, 0, 1069, 1070, 7, 10, 0, 0, 1070, 1071, 7, 18, 0, 0, 1071, 1072, 7, 10, 0, 0, 1072, 1073, 7, 22, 0, 0, 1073, 1074, 7, 21, 0, 0, 1074, 236, 1, 0, 0, 0, 1075, 1076, 7, 3, 0, 0, 1076, 1077, 7, 10, 0, 0, 1077, 1078, 7, 6, 0, 0, 1078, 1079, 7, 7, 0, 0, 1079, 1080, 7, 8, 0, 0, 1080, 1081, 7, 10, 0, 0, 1081, 1082, 7, 22, 0, 0, 1082, 238, 1, 0, 0, 0, 1083, 1084, 7, 3, 0, 0, 1084, 1085, 7, 10, 0, 0, 1085, 1086, 7, 11, 0, 0, 1086, 1087, 7, 10, 0, 0, 1087, 1088, 7, 0, 0, 0, 1088, 1089, 7, 14, 0, 0, 1089, 1090, 7, 10, 0, 0, 1090, 240, 1, 0, 0, 0, 1091, 1092, 7, 3, 0, 0, 1092, 1093, 7, 10, 0, 0, 1093, 1094, 7, 7, 0, 0, 1094, 1095, 7, 0, 0, 0, 1095, 1096, 7, 17, 0, 0, 1096, 1097, 7, 10, 0, 0, 1097, 242, 1, 0, 0, 0, 1098, 1099, 7, 3, 0, 0, 1099, 1100, 7, 10, 0, 0, 1100, 1101, 7, 21, 0, 0, 1101, 1102, 7, 11, 0, 0, 1102, 1103, 7, 0, 0, 0, 1103, 1104, 7, 5, 0, 0, 1104, 1105, 7, 10, 0, 0, 1105, 244, 1, 0, 0, 0, 1106, 1107, 7, 3, 0, 0, 1107, 1108, 7, 10, 0, 0, 1108, 1109, 7, 14, 0, 0, 1109, 1110, 7, 4, 0, 0, 1110, 1111, 7, 3, 0, 0, 1111, 1112, 7, 6, 0, 0, 1112, 1113, 7, 5, 0, 0, 1113, 1114, 7, 4, 0, 0, 1114, 246, 1, 0, 0, 0, 1115, 1116, 7, 3, 0, 0, 1116, 1117, 7, 10, 0, 0, 1117, 1118, 7, 4, 0, 0, 1118, 1119, 7, 16, 0, 0, 1119, 1120, 7, 3, 0, 0, 1120, 1121, 7, 7, 0, 0, 1121, 1122, 7, 6, 0, 0, 1122, 1123, 7, 7, 0, 0, 1123, 1124, 7, 18, 0, 0, 1124, 248, 1, 0, 0, 0, 1125, 1126, 7, 3, 0, 0, 1126, 1127, 7, 6, 0, 0, 1127, 1128, 7, 18, 0, 0, 1128, 1129, 7, 15, 0, 0, 1129, 1130, 7, 4, 0, 0, 1130, 250, 1, 0, 0, 0, 1131, 1132, 7, 3, 0, 0, 1132, 1133, 7, 2, 0, 0, 1133, 1134, 7, 11, 0, 0, 1134, 1135, 7, 11, 0, 0, 1135, 1136, 7, 1, 0, 0, 1136, 1137, 7, 0, 0, 0, 1137, 1138, 7, 5, 0, 0, 1138, 1139, 7, 20, 0, 0, 1139, 252, 1, 0, 0, 0, 1140, 1141, 7, 3, 0, 0, 1141, 1142, 7, 2, 0, 0, 1142, 1143, 7, 19, 0, 0, 1143, 254, 1, 0, 0, 0, 1144, 1145, 7, 3, 0, 0, 1145, 1146, 7, 2, 0, 0, 1146, 1147, 7, 19, 0, 0, 1147, 1148, 7, 14, 0, 0, 1148, 256, 1, 0, 0, 0, 1149, 1150, 7, 14, 0, 0, 1150, 1151, 7, 0, 0, 0, 1151, 1152, 7, 23, 0, 0, 1152, 1153, 7, 10, 0, 0, 1153, 1154, 7, 21, 0, 0, 1154, 1155, 7, 2, 0, 0, 1155, 1156, 7, 6, 0, 0, 1156, 1157, 7, 7, 0, 0, 1157, 1158, 7, 4, 0, 0, 1158, 258, 1, 0, 0, 0, 1159, 1160, 7, 14, 0, 0, 1160, 1161, 7, 10, 0, 0, 1161, 1162, 7, 11, 0, 0, 1162, 1163, 7, 10, 0, 0, 1163, 1164, 7, 5, 0, 0, 1164, 1165, 7, 4, 0, 0, 1165, 260, 1, 0, 0, 0, 1166, 1167, 7, 14, 0, 0, 1167, 1168, 7, 10, 0, 0, 1168, 1169, 7, 4, 0, 0, 1169, 262, 1, 0, 0, 0, 1170, 1171, 7, 4, 0, 0, 1171, 1172, 7, 0, 0, 0, 1172, 1173, 7, 1, 0, 0, 1173, 1174, 7, 11, 0, 0, 1174, 1175, 7, 10, 0, 0, 1175, 264, 1, 0, 0, 0, 1176, 1177, 7, 4, 0, 0, 1177, 1178, 7, 10, 0, 0, 1178, 1179, 7, 17, 0, 0, 1179, 1180, 7, 21, 0, 0, 1180, 266, 1, 0, 0, 0, 1181, 1182, 7, 4, 0, 0, 1182, 1183, 7, 10, 0, 0, 1183, 1184, 7, 17, 0, 0, 1184, 1185, 7, 21, 0, 0, 1185, 1186, 7, 2, 0, 0, 1186, 1187, 7, 3, 0, 0, 1187, 1188, 7, 0, 0, 0, 1188, 1189, 7, 3, 0, 0, 1189, 1190, 7, 12, 0, 0, 1190, 268, 1, 0, 0, 0, 1191, 1192, 7, 4, 0, 0, 1192, 1193, 7, 15, 0, 0, 1193, 1194, 7, 10, 0, 0, 1194, 1195, 7, 7, 0, 0, 1195, 270, 1, 0, 0, 0, 1196, 1197, 7, 4, 0, 0, 1197, 1198, 7, 2, 0, 0, 1198, 272, 1, 0, 0, 0, 1199, 1200, 7, 4, 0, 0, 1200, 1201, 7, 3, 0, 0, 1201, 1202, 7, 0, 0, 0, 1202, 1203, 7, 7, 0, 0, 1203, 1204, 7, 14, 0, 0, 1204, 1205, 7, 0, 0, 0, 1205, 1206, 7, 5, 0, 0, 1206, 1207, 7, 4, 0, 0, 1207, 1208, 7, 6, 0, 0, 1208, 1209, 7, 2, 0, 0, 1209, 1210, 7, 7, 0, 0, 1210, 274, 1, 0, 0, 0, 1211, 1212, 7, 4, 0, 0, 1212, 1213, 7, 3, 0, 0, 1213, 1214, 7, 6, 0, 0, 1214, 1215, 7, 18, 0, 0, 1215, 1216, 7, 18, 0, 0, 1216, 1217, 7, 10, 0, 0, 1217, 1218, 7, 3, 0, 0, 1218, 276, 1, 0, 0, 0, 1219, 1220, 7, 16, 0, 0, 1220, 1221, 7, 7, 0, 0, 1221, 1222, 7, 6, 0, 0, 1222, 1223, 7, 2, 0, 0, 1223, 1224, 7, 7, 0, 0, 1224, 278, 1, 0, 0, 0, 1225, 1226, 7, 16, 0, 0, 1226, 1227, 7, 7, 0, 0, 1227, 1228, 7, 6, 0, 0, 1228, 1229, 7, 25, 0, 0, 1229, 1230, 7, 16, 0, 0, 1230, 1231, 7, 10, 0, 0, 1231, 280, 1, 0, 0, 0, 1232, 1233, 7, 16, 0, 0, 1233, 1234, 7, 21, 0, 0, 1234, 1235, 7, 8, 0, 0, 1235, 1236, 7, 0, 0, 0, 1236, 1237, 7, 4, 0, 0, 1237, 1238, 7, 10, 0, 0, 1238, 282, 1, 0, 0, 0, 1239, 1240, 7, 16, 0, 0, 1240, 1241, 7, 14, 0, 0, 1241, 1242, 7, 6, 0, 0, 1242, 1243, 7, 7, 0, 0, 1243, 1244, 7, 18, 0, 0, 1244, 284, 1, 0, 0, 0, 1245, 1246, 7, 23, 0, 0, 1246, 1247, 7, 0, 0, 0, 1247, 1248, 7, 5, 0, 0, 1248, 1249, 7, 16, 0, 0, 1249, 1250, 7, 16, 0, 0, 1250, 1251, 7, 17, 0, 0, 1251, 286, 1, 0, 0, 0, 1252, 1253, 7, 23, 0, 0, 1253, 1254, 7, 0, 0, 0, 1254, 1255, 7, 11, 0, 0, 1255, 1256, 7, 16, 0, 0, 1256, 1257, 7, 10, 0, 0, 1257, 1258, 7, 14, 0, 0, 1258, 288, 1, 0, 0, 0, 1259, 1260, 7, 23, 0, 0, 1260, 1261, 7, 6, 0, 0, 1261, 1262, 7, 10, 0, 0, 1262, 1263, 7, 19, 0, 0, 1263, 290, 1, 0, 0, 0, 1264, 1265, 7, 23, 0, 0, 1265, 1266, 7, 6, 0, 0, 1266, 1267, 7, 3, 0, 0, 1267, 1268, 7, 4, 0, 0, 1268, 1269, 7, 16, 0, 0, 1269, 1270, 7, 0, 0, 0, 1270, 1271, 7, 11, 0, 0, 1271, 292, 1, 0, 0, 0, 1272, 1273, 7, 19, 0, 0, 1273, 1274, 7, 15, 0, 0, 1274, 1275, 7, 10, 0, 0, 1275, 1276, 7, 7, 0, 0, 1276, 294, 1, 0, 0, 0, 1277, 1278, 7, 19, 0, 0, 1278, 1279, 7, 15, 0, 0, 1279, 1280, 7, 10, 0, 0, 1280, 1281, 7, 3, 0, 0, 1281, 1282, 7, 10, 0, 0, 1282, 296, 1, 0, 0, 0, 1283, 1284, 7, 19, 0, 0, 1284, 1285, 7, 6, 0, 0, 1285, 1286, 7, 4, 0, 0, 1286, 1287, 7, 15, 0, 0, 1287, 298, 1, 0, 0, 0, 1288, 1289, 7, 19, 0, 0, 1289, 1290, 7, 6, 0, 0, 1290, 1291, 7, 4, 0, 0, 1291, 1292, 7, 15, 0, 0, 1292, 1293, 7, 2, 0, 0, 1293, 1294, 7, 16, 0, 0, 1294, 1295, 7, 4, 0, 0, 1295, 300, 1, 0, 0, 0, 1296, 1297, 7, 9, 0, 0, 1297, 1298, 7, 6, 0, 0, 1298, 1299, 7, 3, 0, 0, 1299, 1300, 7, 14, 0, 0, 1300, 1301, 7, 4, 0, 0, 1301, 1302, 5, 95, 0, 0, 1302, 1303, 7, 23, 0, 0, 1303, 1304, 7, 0, 0, 0, 1304, 1305, 7, 11, 0, 0, 1305, 1306, 7, 16, 0, 0, 1306, 1307, 7, 10, 0, 0, 1307, 302, 1, 0, 0, 0, 1308, 1309, 7, 2, 0, 0, 1309, 1310, 7, 23, 0, 0, 1310, 1311, 7, 10, 0, 0, 1311, 1312, 7, 3, 0, 0, 1312, 304, 1, 0, 0, 0, 1313, 1314, 7, 21, 0, 0, 1314, 1315, 7, 0, 0, 0, 1315, 1316, 7, 3, 0, 0, 1316, 1317, 7, 4, 0, 0, 1317, 1318, 7, 6, 0, 0, 1318, 1319, 7, 4, 0, 0, 1319, 1320, 7, 6, 0, 0, 1320, 1321, 7, 2, 0, 0, 1321, 1322, 7, 7, 0, 0, 1322, 306, 1, 0, 0, 0, 1323, 1324, 7, 3, 0, 0, 1324, 1325, 7, 0, 0, 0, 1325, 1326, 7, 7, 0, 0, 1326, 1327, 7, 18, 0, 0, 1327, 1328, 7, 10, 0, 0, 1328, 308, 1, 0, 0, 0, 1329, 1330, 7, 21, 0, 0, 1330, 1331, 7, 3, 0, 0, 1331, 1332, 7, 10, 0, 0, 1332, 1333, 7, 5, 0, 0, 1333, 1334, 7, 10, 0, 0, 1334, 1335, 7, 8, 0, 0, 1335, 1336, 7, 6, 0, 0, 1336, 1337, 7, 7, 0, 0, 1337, 1338, 7, 18, 0, 0, 1338, 310, 1, 0, 0, 0, 1339, 1340, 7, 16, 0, 0, 1340, 1341, 7, 7, 0, 0, 1341, 1342, 7, 1, 0, 0, 1342, 1343, 7, 2, 0, 0, 1343, 1344, 7, 16, 0, 0, 1344, 1345, 7, 7, 0, 0, 1345, 1346, 7, 8, 0, 0, 1346, 1347, 7, 10, 0, 0, 1347, 1348, 7, 8, 0, 0, 1348, 312, 1, 0, 0, 0, 1349, 1350, 7, 5, 0, 0, 1350, 1351, 7, 16, 0, 0, 1351, 1352, 7, 3, 0, 0, 1352, 1353, 7, 3, 0, 0, 1353, 1354, 7, 10, 0, 0, 1354, 1355, 7, 7, 0, 0, 1355, 1356, 7, 4, 0, 0, 1356, 314, 1, 0, 0, 0, 1357, 1358, 7, 9, 0, 0, 1358, 1359, 7, 2, 0, 0, 1359, 1360, 7, 11, 0, 0, 1360, 1361, 7, 11, 0, 0, 1361, 1362, 7, 2, 0, 0, 1362, 1363, 7, 19, 0, 0, 1363, 1364, 7, 6, 0, 0, 1364, 1365, 7, 7, 0, 0, 1365, 1366, 7, 18, 0, 0, 1366, 316, 1, 0, 0, 0, 1367, 1368, 7, 5, 0, 0, 1368, 1369, 7, 16, 0, 0, 1369, 1370, 7, 17, 0, 0, 1370, 1371, 7, 10, 0, 0, 1371, 1372, 5, 95, 0, 0, 1372, 1373, 7, 8, 0, 0, 1373, 1374, 7, 6, 0, 0, 1374, 1375, 7, 14, 0, 0, 1375, 1376, 7, 4, 0, 0, 1376, 318, 1, 0, 0, 0, 1377, 1378, 7, 8, 0, 0, 1378, 1379, 7, 10, 0, 0, 1379, 1380, 7, 7, 0, 0, 1380, 1381, 7, 14, 0, 0, 1381, 1382, 7, 10, 0, 0, 1382, 1383, 5, 95, 0, 0, 1383, 1384, 7, 3, 0, 0, 1384, 1385, 7, 0, 0, 0, 1385, 1386, 7, 7, 0, 0, 1386, 1387, 7, 20, 0, 0, 1387, 320, 1, 0, 0, 0, 1388, 1389, 7, 11, 0, 0, 1389, 1390, 7, 0, 0, 0, 1390, 1391, 7, 18, 0, 0, 1391, 322, 1, 0, 0, 0, 1392, 1393, 7, 11, 0, 0, 1393, 1394, 7, 0, 0, 0, 1394, 1395, 7, 14, 0, 0, 1395, 1396, 7, 4, 0, 0, 1396, 1397, 5, 95, 0, 0, 1397, 1398, 7, 23, 0, 0, 1398, 1399, 7, 0, 0, 0, 1399, 1400, 7, 11, 0, 0, 1400, 1401, 7, 16, 0, 0, 1401, 1402, 7, 10, 0, 0, 1402, 324, 1, 0, 0, 0, 1403, 1404, 7, 11, 0, 0, 1404, 1405, 7, 10, 0, 0, 1405, 1406, 7, 0, 0, 0, 1406, 1407, 7, 8, 0, 0, 1407, 326, 1, 0, 0, 0, 1408, 1409, 7, 7, 0, 0, 1409, 1410, 7, 4, 0, 0, 1410, 1411, 7, 15, 0, 0, 1411, 1412, 5, 95, 0, 0, 1412, 1413, 7, 23, 0, 0, 1413, 1414, 7, 0, 0, 0, 1414, 1415, 7, 11, 0, 0, 1415, 1416, 7, 16, 0, 0, 1416, 1417, 7, 10, 0, 0, 1417, 328, 1, 0, 0, 0, 1418, 1419, 7, 7, 0, 0, 1419, 1420, 7, 4, 0, 0, 1420, 1421, 7, 6, 0, 0, 1421, 1422, 7, 11, 0, 0, 1422, 1423, 7, 10, 0, 0, 1423, 330, 1, 0, 0, 0, 1424, 1425, 7, 21, 0, 0, 1425, 1426, 7, 10, 0, 0, 1426, 1427, 7, 3, 0, 0, 1427, 1428, 7, 5, 0, 0, 1428, 1429, 7, 10, 0, 0, 1429, 1430, 7, 7, 0, 0, 1430, 1431, 7, 4, 0, 0, 1431, 1432, 5, 95, 0, 0, 1432, 1433, 7, 3, 0, 0, 1433, 1434, 7, 0, 0, 0, 1434, 1435, 7, 7, 0, 0, 1435, 1436, 7, 20, 0, 0, 1436, 332, 1, 0, 0, 0, 1437, 1438, 7, 3, 0, 0, 1438, 1439, 7, 0, 0, 0, 1439, 1440, 7, 7, 0, 0, 1440, 1441, 7, 20, 0, 0, 1441, 334, 1, 0, 0, 0, 1442, 1443, 7, 3, 0, 0, 1443, 1444, 7, 2, 0, 0, 1444, 1445, 7, 19, 0, 0, 1445, 1446, 5, 95, 0, 0, 1446, 1447, 7, 7, 0, 0, 1447, 1448, 7, 16, 0, 0, 1448, 1449, 7, 17, 0, 0, 1449, 1450, 7, 1, 0, 0, 1450, 1451, 7, 10, 0, 0, 1451, 1452, 7, 3, 0, 0, 1452, 336, 1, 0, 0, 0, 1453, 1454, 7, 18, 0, 0, 1454, 1455, 7, 10, 0, 0, 1455, 1456, 7, 7, 0, 0, 1456, 1457, 7, 10, 0, 0, 1457, 1458, 7, 3, 0, 0, 1458, 1459, 7, 0, 0, 0, 1459, 1460, 7, 4, 0, 0, 1460, 1461, 7, 10, 0, 0, 1461, 1462, 7, 8, 0, 0, 1462, 338, 1, 0, 0, 0, 1463, 1464, 7, 0, 0, 0, 1464, 1465, 7, 11, 0, 0, 1465, 1466, 7, 19, 0, 0, 1466, 1467, 7, 0, 0, 0, 1467, 1468, 7, 12, 0, 0, 1468, 1469, 7, 14, 0, 0, 1469, 340, 1, 0, 0, 0, 1470, 1471, 7, 14, 0, 0, 1471, 1472, 7, 4, 0, 0, 1472, 1473, 7, 2, 0, 0, 1473, 1474, 7, 3, 0, 0, 1474, 1475, 7, 10, 0, 0, 1475, 1476, 7, 8, 0, 0, 1476, 342, 1, 0, 0, 0, 1477, 1478, 7, 4, 0, 0, 1478, 1479, 7, 3, 0, 0, 1479, 1480, 7, 16, 0, 0, 1480, 1481, 7, 10, 0, 0, 1481, 344, 1, 0, 0, 0, 1482, 1483, 7, 9, 0, 0, 1483, 1484, 7, 0, 0, 0, 1484, 1485, 7, 11, 0, 0, 1485, 1486, 7, 14, 0, 0, 1486, 1487, 7, 10, 0, 0, 1487, 346, 1, 0, 0, 0, 1488, 1489, 7, 19, 0, 0, 1489, 1490, 7, 6, 0, 0, 1490, 1491, 7, 7, 0, 0, 1491, 1492, 7, 8, 0, 0, 1492, 1493, 7, 2, 0, 0, 1493, 1494, 7, 19, 0, 0, 1494, 348, 1, 0, 0, 0, 1495, 1496, 7, 7, 0, 0, 1496, 1497, 7, 16, 0, 0, 1497, 1498, 7, 11, 0, 0, 1498, 1499, 7, 11, 0, 0, 1499, 1500, 7, 14, 0, 0, 1500, 350, 1, 0, 0, 0, 1501, 1502, 7, 9, 0, 0, 1502, 1503, 7, 6, 0, 0, 1503, 1504, 7, 3, 0, 0, 1504, 1505, 7, 14, 0, 0, 1505, 1506, 7, 4, 0, 0, 1506, 352, 1, 0, 0, 0, 1507, 1508, 7, 11, 0, 0, 1508, 1509, 7, 0, 0, 0, 1509, 1510, 7, 14, 0, 0, 1510, 1511, 7, 4, 0, 0, 1511, 354, 1, 0, 0, 0, 1512, 1513, 7, 9, 0, 0, 1513, 1514, 7, 6, 0, 0, 1514, 1515, 7, 11, 0, 0, 1515, 1516, 7, 4, 0, 0, 1516, 1517, 7, 10, 0, 0, 1517, 1518, 7, 3, 0, 0, 1518, 356, 1, 0, 0, 0, 1519, 1520, 7, 18, 0, 0, 1520, 1521, 7, 3, 0, 0, 1521, 1522, 7, 2, 0, 0, 1522, 1523, 7, 16, 0, 0, 1523, 1524, 7, 21, 0, 0, 1524, 1525, 7, 14, 0, 0, 1525, 358, 1, 0, 0, 0, 1526, 1527, 7, 10, 0, 0, 1527, 1528, 7, 22, 0, 0, 1528, 1529, 7, 5, 0, 0, 1529, 1530, 7, 11, 0, 0, 1530, 1531, 7, 16, 0, 0, 1531, 1532, 7, 8, 0, 0, 1532, 1533, 7, 10, 0, 0, 1533, 360, 1, 0, 0, 0, 1534, 1535, 7, 4, 0, 0, 1535, 1536, 7, 6, 0, 0, 1536, 1537, 7, 10, 0, 0, 1537, 1538, 7, 14, 0, 0, 1538, 362, 1, 0, 0, 0, 1539, 1540, 7, 2, 0, 0, 1540, 1541, 7, 4, 0, 0, 1541, 1542, 7, 15, 0, 0, 1542, 1543, 7, 10, 0, 0, 1543, 1544, 7, 3, 0, 0, 1544, 1545, 7, 14, 0, 0, 1545, 364, 1, 0, 0, 0, 1546, 1547, 7, 8, 0, 0, 1547, 1548, 7, 2, 0, 0, 1548, 366, 1, 0, 0, 0, 1549, 1550, 7, 7, 0, 0, 1550, 1551, 7, 2, 0, 0, 1551, 1552, 7, 4, 0, 0, 1552, 1553, 7, 15, 0, 0, 1553, 1554, 7, 6, 0, 0, 1554, 1555, 7, 7, 0, 0, 1555, 1556, 7, 18, 0, 0, 1556, 368, 1, 0, 0, 0, 1557, 1563, 5, 34, 0, 0, 1558, 1562, 8, 26, 0, 0, 1559, 1560, 5, 34, 0, 0, 1560, 1562, 5, 34, 0, 0, 1561, 1558, 1, 0, 0, 0, 1561, 1559, 1, 0, 0, 0, 1562, 1565, 1, 0, 0, 0, 1563, 1561, 1, 0, 0, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1566, 1, 0, 0, 0, 1565, 1563, 1, 0, 0, 0, 1566, 1593, 5, 34, 0, 0, 1567, 1573, 5, 96, 0, 0, 1568, 1572, 8, 27, 0, 0, 1569, 1570, 5, 96, 0, 0, 1570, 1572, 5, 96, 0, 0, 1571, 1568, 1, 0, 0, 0, 1571, 1569, 1, 0, 0, 0, 1572, 1575, 1, 0, 0, 0, 1573, 1571, 1, 0, 0, 0, 1573, 1574, 1, 0, 0, 0, 1574, 1576, 1, 0, 0, 0, 1575, 1573, 1, 0, 0, 0, 1576, 1593, 5, 96, 0, 0, 1577, 1581, 5, 91, 0, 0, 1578, 1580, 8, 28, 0, 0, 1579, 1578, 1, 0, 0, 0, 1580, 1583, 1, 0, 0, 0, 1581, 1579, 1, 0, 0, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1584, 1, 0, 0, 0, 1583, 1581, 1, 0, 0, 0, 1584, 1593, 5, 93, 0, 0, 1585, 1589, 7, 29, 0, 0, 1586, 1588, 7, 30, 0, 0, 1587, 1586, 1, 0, 0, 0, 1588, 1591, 1, 0, 0, 0, 1589, 1587, 1, 0, 0, 0, 1589, 1590, 1, 0, 0, 0, 1590, 1593, 1, 0, 0, 0, 1591, 1589, 1, 0, 0, 0, 1592, 1557, 1, 0, 0, 0, 1592, 1567, 1, 0, 0, 0, 1592, 1577, 1, 0, 0, 0, 1592, 1585, 1, 0, 0, 0, 1593, 370, 1, 0, 0, 0, 1594, 1596, 3, 389, 194, 0, 1595, 1594, 1, 0, 0, 0, 1596, 1597, 1, 0, 0, 0, 1597, 1595, 1, 0, 0, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1606, 1, 0, 0, 0, 1599, 1603, 5, 46, 0, 0, 1600, 1602, 3, 389, 194, 0, 1601, 1600, 1, 0, 0, 0, 1602, 1605, 1, 0, 0, 0, 1603, 1601, 1, 0, 0, 0, 1603, 1604, 1, 0, 0, 0, 1604, 1607, 1, 0, 0, 0, 1605, 1603, 1, 0, 0, 0, 1606, 1599, 1, 0, 0, 0, 1606, 1607, 1, 0, 0, 0, 1607, 1615, 1, 0, 0, 0, 1608, 1610, 5, 46, 0, 0, 1609, 1611, 3, 389, 194, 0, 1610, 1609, 1, 0, 0, 0, 1611, 1612, 1, 0, 0, 0, 1612, 1610, 1, 0, 0, 0, 1612, 1613, 1, 0, 0, 0, 1613, 1615, 1, 0, 0, 0, 1614, 1595, 1, 0, 0, 0, 1614, 1608, 1, 0, 0, 0, 1615, 1625, 1, 0, 0, 0, 1616, 1618, 7, 10, 0, 0, 1617, 1619, 7, 31, 0, 0, 1618, 1617, 1, 0, 0, 0, 1618, 1619, 1, 0, 0, 0, 1619, 1621, 1, 0, 0, 0, 1620, 1622, 3, 389, 194, 0, 1621, 1620, 1, 0, 0, 0, 1622, 1623, 1, 0, 0, 0, 1623, 1621, 1, 0, 0, 0, 1623, 1624, 1, 0, 0, 0, 1624, 1626, 1, 0, 0, 0, 1625, 1616, 1, 0, 0, 0, 1625, 1626, 1, 0, 0, 0, 1626, 1636, 1, 0, 0, 0, 1627, 1628, 5, 48, 0, 0, 1628, 1629, 7, 22, 0, 0, 1629, 1631, 1, 0, 0, 0, 1630, 1632, 3, 387, 193, 0, 1631, 1630, 1, 0, 0, 0, 1632, 1633, 1, 0, 0, 0, 1633, 1631, 1, 0, 0, 0, 1633, 1634, 1, 0, 0, 0, 1634, 1636, 1, 0, 0, 0, 1635, 1614, 1, 0, 0, 0, 1635, 1627, 1, 0, 0, 0, 1636, 372, 1, 0, 0, 0, 1637, 1641, 5, 63, 0, 0, 1638, 1640, 3, 389, 194, 0, 1639, 1638, 1, 0, 0, 0, 1640, 1643, 1, 0, 0, 0, 1641, 1639, 1, 0, 0, 0, 1641, 1642, 1, 0, 0, 0, 1642, 1647, 1, 0, 0, 0, 1643, 1641, 1, 0, 0, 0, 1644, 1645, 7, 32, 0, 0, 1645, 1647, 3, 369, 184, 0, 1646, 1637, 1, 0, 0, 0, 1646, 1644, 1, 0, 0, 0, 1647, 374, 1, 0, 0, 0, 1648, 1654, 5, 39, 0, 0, 1649, 1653, 8, 33, 0, 0, 1650, 1651, 5, 39, 0, 0, 1651, 1653, 5, 39, 0, 0, 1652, 1649, 1, 0, 0, 0, 1652, 1650, 1, 0, 0, 0, 1653, 1656, 1, 0, 0, 0, 1654, 1652, 1, 0, 0, 0, 1654, 1655, 1, 0, 0, 0, 1655, 1657, 1, 0, 0, 0, 1656, 1654, 1, 0, 0, 0, 1657, 1658, 5, 39, 0, 0, 1658, 376, 1, 0, 0, 0, 1659, 1660, 7, 22, 0, 0, 1660, 1661, 3, 375, 187, 0, 1661, 378, 1, 0, 0, 0, 1662, 1663, 5, 45, 0, 0, 1663, 1664, 5, 45, 0, 0, 1664, 1668, 1, 0, 0, 0, 1665, 1667, 8, 34, 0, 0, 1666, 1665, 1, 0, 0, 0, 1667, 1670, 1, 0, 0, 0, 1668, 1666, 1, 0, 0, 0, 1668, 1669, 1, 0, 0, 0, 1669, 1676, 1, 0, 0, 0, 1670, 1668, 1, 0, 0, 0, 1671, 1673, 5, 13, 0, 0, 1672, 1671, 1, 0, 0, 0, 1672, 1673, 1, 0, 0, 0, 1673, 1674, 1, 0, 0, 0, 1674, 1677, 5, 10, 0, 0, 1675, 1677, 5, 0, 0, 1, 1676, 1672, 1, 0, 0, 0, 1676, 1675, 1, 0, 0, 0, 1677, 1678, 1, 0, 0, 0, 1678, 1679, 6, 189, 0, 0, 1679, 380, 1, 0, 0, 0, 1680, 1681, 5, 47, 0, 0, 1681, 1682, 5, 42, 0, 0, 1682, 1686, 1, 0, 0, 0, 1683, 1685, 9, 0, 0, 0, 1684, 1683, 1, 0, 0, 0, 1685, 1688, 1, 0, 0, 0, 1686, 1687, 1, 0, 0, 0, 1686, 1684, 1, 0, 0, 0, 1687, 1689, 1, 0, 0, 0, 1688, 1686, 1, 0, 0, 0, 1689, 1690, 5, 42, 0, 0, 1690, 1691, 5, 47, 0, 0, 1691, 1692, 1, 0, 0, 0, 1692, 1693, 6, 190, 0, 0, 1693, 382, 1, 0, 0, 0, 1694, 1695, 7, 35, 0, 0, 1695, 1696, 1, 0, 0, 0, 1696, 1697, 6, 191, 0, 0, 1697, 384, 1, 0, 0, 0, 1698, 1699, 9, 0, 0, 0, 1699, 386, 1, 0, 0, 0, 1700, 1701, 7, 36, 0, 0, 1701, 388, 1, 0, 0, 0, 1702, 1703, 7, 37, 0, 0, 1703, 390, 1, 0, 0, 0, 26, 0, 1561, 1563, 1571, 1573, 1581, 1589, 1592, 1597, 1603, 1606, 1612, 1614, 1618, 1623, 1625, 1633, 1635, 1641, 1646, 1652, 1654, 1668, 1672, 1676, 1686, 1, 0, 1, 0, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) atn := staticData.atn staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) decisionToDFA := staticData.decisionToDFA for index, state := range atn.DecisionToState { decisionToDFA[index] = antlr.NewDFA(state, index) } } // LexerInit initializes any static state used to implement Lexer. By default the // static state used to implement the lexer is lazily initialized during the first call to // NewLexer(). You can call this function if you wish to initialize the static state ahead // of time. func LexerInit() { staticData := &lexerLexerStaticData staticData.once.Do(lexerLexerInit) } // NewLexer produces a new lexer instance for the optional input antlr.CharStream. func NewLexer(input antlr.CharStream) *Lexer { LexerInit() l := new(Lexer) l.BaseLexer = antlr.NewBaseLexer(input) staticData := &lexerLexerStaticData l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) l.channelNames = staticData.channelNames l.modeNames = staticData.modeNames l.RuleNames = staticData.ruleNames l.LiteralNames = staticData.literalNames l.SymbolicNames = staticData.symbolicNames l.GrammarFileName = "Lexer.g4" // TODO: l.EOF = antlr.TokenEOF return l } // Lexer tokens. const ( LexerSCOL = 1 LexerDOT = 2 LexerOPEN_PAR = 3 LexerCLOSE_PAR = 4 LexerCOMMA = 5 LexerASSIGN = 6 LexerSTAR = 7 LexerPLUS = 8 LexerMINUS = 9 LexerTILDE = 10 LexerPIPE2 = 11 LexerDIV = 12 LexerMOD = 13 LexerLT2 = 14 LexerGT2 = 15 LexerAMP = 16 LexerPIPE = 17 LexerLT = 18 LexerLT_EQ = 19 LexerGT = 20 LexerGT_EQ = 21 LexerEQ = 22 LexerNOT_EQ1 = 23 LexerNOT_EQ2 = 24 LexerABORT_ = 25 LexerACTION_ = 26 LexerADD_ = 27 LexerAFTER_ = 28 LexerALL_ = 29 LexerALTER_ = 30 LexerANALYZE_ = 31 LexerAND_ = 32 LexerAS_ = 33 LexerASC_ = 34 LexerATTACH_ = 35 LexerAUTOINCREMENT_ = 36 LexerBEFORE_ = 37 LexerBEGIN_ = 38 LexerBETWEEN_ = 39 LexerBY_ = 40 LexerCASCADE_ = 41 LexerCASE_ = 42 LexerCAST_ = 43 LexerCHECK_ = 44 LexerCOLLATE_ = 45 LexerCOLUMN_ = 46 LexerCOMMIT_ = 47 LexerCONFLICT_ = 48 LexerCONSTRAINT_ = 49 LexerCREATE_ = 50 LexerCROSS_ = 51 LexerCURRENT_DATE_ = 52 LexerCURRENT_TIME_ = 53 LexerCURRENT_TIMESTAMP_ = 54 LexerDATABASE_ = 55 LexerDEFAULT_ = 56 LexerDEFERRABLE_ = 57 LexerDEFERRED_ = 58 LexerDELETE_ = 59 LexerDESC_ = 60 LexerDETACH_ = 61 LexerDISTINCT_ = 62 LexerDROP_ = 63 LexerEACH_ = 64 LexerELSE_ = 65 LexerEND_ = 66 LexerESCAPE_ = 67 LexerEXCEPT_ = 68 LexerEXCLUSIVE_ = 69 LexerEXISTS_ = 70 LexerEXPLAIN_ = 71 LexerFAIL_ = 72 LexerFOR_ = 73 LexerFOREIGN_ = 74 LexerFROM_ = 75 LexerFULL_ = 76 LexerGLOB_ = 77 LexerGROUP_ = 78 LexerHAVING_ = 79 LexerIF_ = 80 LexerIGNORE_ = 81 LexerIMMEDIATE_ = 82 LexerIN_ = 83 LexerINDEX_ = 84 LexerINDEXED_ = 85 LexerINITIALLY_ = 86 LexerINNER_ = 87 LexerINSERT_ = 88 LexerINSTEAD_ = 89 LexerINTERSECT_ = 90 LexerINTO_ = 91 LexerIS_ = 92 LexerISNULL_ = 93 LexerJOIN_ = 94 LexerKEY_ = 95 LexerLEFT_ = 96 LexerLIKE_ = 97 LexerLIMIT_ = 98 LexerMATCH_ = 99 LexerNATURAL_ = 100 LexerNO_ = 101 LexerNOT_ = 102 LexerNOTNULL_ = 103 LexerNULL_ = 104 LexerOF_ = 105 LexerOFFSET_ = 106 LexerON_ = 107 LexerOR_ = 108 LexerORDER_ = 109 LexerOUTER_ = 110 LexerPLAN_ = 111 LexerPRAGMA_ = 112 LexerPRIMARY_ = 113 LexerQUERY_ = 114 LexerRAISE_ = 115 LexerRECURSIVE_ = 116 LexerREFERENCES_ = 117 LexerREGEXP_ = 118 LexerREINDEX_ = 119 LexerRELEASE_ = 120 LexerRENAME_ = 121 LexerREPLACE_ = 122 LexerRESTRICT_ = 123 LexerRETURNING_ = 124 LexerRIGHT_ = 125 LexerROLLBACK_ = 126 LexerROW_ = 127 LexerROWS_ = 128 LexerSAVEPOINT_ = 129 LexerSELECT_ = 130 LexerSET_ = 131 LexerTABLE_ = 132 LexerTEMP_ = 133 LexerTEMPORARY_ = 134 LexerTHEN_ = 135 LexerTO_ = 136 LexerTRANSACTION_ = 137 LexerTRIGGER_ = 138 LexerUNION_ = 139 LexerUNIQUE_ = 140 LexerUPDATE_ = 141 LexerUSING_ = 142 LexerVACUUM_ = 143 LexerVALUES_ = 144 LexerVIEW_ = 145 LexerVIRTUAL_ = 146 LexerWHEN_ = 147 LexerWHERE_ = 148 LexerWITH_ = 149 LexerWITHOUT_ = 150 LexerFIRST_VALUE_ = 151 LexerOVER_ = 152 LexerPARTITION_ = 153 LexerRANGE_ = 154 LexerPRECEDING_ = 155 LexerUNBOUNDED_ = 156 LexerCURRENT_ = 157 LexerFOLLOWING_ = 158 LexerCUME_DIST_ = 159 LexerDENSE_RANK_ = 160 LexerLAG_ = 161 LexerLAST_VALUE_ = 162 LexerLEAD_ = 163 LexerNTH_VALUE_ = 164 LexerNTILE_ = 165 LexerPERCENT_RANK_ = 166 LexerRANK_ = 167 LexerROW_NUMBER_ = 168 LexerGENERATED_ = 169 LexerALWAYS_ = 170 LexerSTORED_ = 171 LexerTRUE_ = 172 LexerFALSE_ = 173 LexerWINDOW_ = 174 LexerNULLS_ = 175 LexerFIRST_ = 176 LexerLAST_ = 177 LexerFILTER_ = 178 LexerGROUPS_ = 179 LexerEXCLUDE_ = 180 LexerTIES_ = 181 LexerOTHERS_ = 182 LexerDO_ = 183 LexerNOTHING_ = 184 LexerIDENTIFIER = 185 LexerNUMERIC_LITERAL = 186 LexerBIND_PARAMETER = 187 LexerSTRING_LITERAL = 188 LexerBLOB_LITERAL = 189 LexerSINGLE_LINE_COMMENT = 190 LexerMULTILINE_COMMENT = 191 LexerSPACES = 192 LexerUNEXPECTED_CHAR = 193 ) atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/parser.go000066400000000000000000025313501431455511600243360ustar00rootroot00000000000000// Code generated from Parser.g4 by ANTLR 4.10.1. DO NOT EDIT. package sqliteparse // Parser import ( "fmt" "strconv" "sync" "github.com/antlr/antlr4/runtime/Go/antlr" ) // Suppress unused import errors var _ = fmt.Printf var _ = strconv.Itoa var _ = sync.Once{} type Parser struct { *antlr.BaseParser } var parserParserStaticData struct { once sync.Once serializedATN []int32 literalNames []string symbolicNames []string ruleNames []string predictionContextCache *antlr.PredictionContextCache atn *antlr.ATN decisionToDFA []*antlr.DFA } func parserParserInit() { staticData := &parserParserStaticData staticData.literalNames = []string{ "", "';'", "'.'", "'('", "')'", "','", "'='", "'*'", "'+'", "'-'", "'~'", "'||'", "'/'", "'%'", "'<<'", "'>>'", "'&'", "'|'", "'<'", "'<='", "'>'", "'>='", "'=='", "'!='", "'<>'", "'ABORT'", "'ACTION'", "'ADD'", "'AFTER'", "'ALL'", "'ALTER'", "'ANALYZE'", "'AND'", "'AS'", "'ASC'", "'ATTACH'", "'AUTOINCREMENT'", "'BEFORE'", "'BEGIN'", "'BETWEEN'", "'BY'", "'CASCADE'", "'CASE'", "'CAST'", "'CHECK'", "'COLLATE'", "'COLUMN'", "'COMMIT'", "'CONFLICT'", "'CONSTRAINT'", "'CREATE'", "'CROSS'", "'CURRENT_DATE'", "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DATABASE'", "'DEFAULT'", "'DEFERRABLE'", "'DEFERRED'", "'DELETE'", "'DESC'", "'DETACH'", "'DISTINCT'", "'DROP'", "'EACH'", "'ELSE'", "'END'", "'ESCAPE'", "'EXCEPT'", "'EXCLUSIVE'", "'EXISTS'", "'EXPLAIN'", "'FAIL'", "'FOR'", "'FOREIGN'", "'FROM'", "'FULL'", "'GLOB'", "'GROUP'", "'HAVING'", "'IF'", "'IGNORE'", "'IMMEDIATE'", "'IN'", "'INDEX'", "'INDEXED'", "'INITIALLY'", "'INNER'", "'INSERT'", "'INSTEAD'", "'INTERSECT'", "'INTO'", "'IS'", "'ISNULL'", "'JOIN'", "'KEY'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MATCH'", "'NATURAL'", "'NO'", "'NOT'", "'NOTNULL'", "'NULL'", "'OF'", "'OFFSET'", "'ON'", "'OR'", "'ORDER'", "'OUTER'", "'PLAN'", "'PRAGMA'", "'PRIMARY'", "'QUERY'", "'RAISE'", "'RECURSIVE'", "'REFERENCES'", "'REGEXP'", "'REINDEX'", "'RELEASE'", "'RENAME'", "'REPLACE'", "'RESTRICT'", "'RETURNING'", "'RIGHT'", "'ROLLBACK'", "'ROW'", "'ROWS'", "'SAVEPOINT'", "'SELECT'", "'SET'", "'TABLE'", "'TEMP'", "'TEMPORARY'", "'THEN'", "'TO'", "'TRANSACTION'", "'TRIGGER'", "'UNION'", "'UNIQUE'", "'UPDATE'", "'USING'", "'VACUUM'", "'VALUES'", "'VIEW'", "'VIRTUAL'", "'WHEN'", "'WHERE'", "'WITH'", "'WITHOUT'", "'FIRST_VALUE'", "'OVER'", "'PARTITION'", "'RANGE'", "'PRECEDING'", "'UNBOUNDED'", "'CURRENT'", "'FOLLOWING'", "'CUME_DIST'", "'DENSE_RANK'", "'LAG'", "'LAST_VALUE'", "'LEAD'", "'NTH_VALUE'", "'NTILE'", "'PERCENT_RANK'", "'RANK'", "'ROW_NUMBER'", "'GENERATED'", "'ALWAYS'", "'STORED'", "'TRUE'", "'FALSE'", "'WINDOW'", "'NULLS'", "'FIRST'", "'LAST'", "'FILTER'", "'GROUPS'", "'EXCLUDE'", "'TIES'", "'OTHERS'", "'DO'", "'NOTHING'", } staticData.symbolicNames = []string{ "", "SCOL", "DOT", "OPEN_PAR", "CLOSE_PAR", "COMMA", "ASSIGN", "STAR", "PLUS", "MINUS", "TILDE", "PIPE2", "DIV", "MOD", "LT2", "GT2", "AMP", "PIPE", "LT", "LT_EQ", "GT", "GT_EQ", "EQ", "NOT_EQ1", "NOT_EQ2", "ABORT_", "ACTION_", "ADD_", "AFTER_", "ALL_", "ALTER_", "ANALYZE_", "AND_", "AS_", "ASC_", "ATTACH_", "AUTOINCREMENT_", "BEFORE_", "BEGIN_", "BETWEEN_", "BY_", "CASCADE_", "CASE_", "CAST_", "CHECK_", "COLLATE_", "COLUMN_", "COMMIT_", "CONFLICT_", "CONSTRAINT_", "CREATE_", "CROSS_", "CURRENT_DATE_", "CURRENT_TIME_", "CURRENT_TIMESTAMP_", "DATABASE_", "DEFAULT_", "DEFERRABLE_", "DEFERRED_", "DELETE_", "DESC_", "DETACH_", "DISTINCT_", "DROP_", "EACH_", "ELSE_", "END_", "ESCAPE_", "EXCEPT_", "EXCLUSIVE_", "EXISTS_", "EXPLAIN_", "FAIL_", "FOR_", "FOREIGN_", "FROM_", "FULL_", "GLOB_", "GROUP_", "HAVING_", "IF_", "IGNORE_", "IMMEDIATE_", "IN_", "INDEX_", "INDEXED_", "INITIALLY_", "INNER_", "INSERT_", "INSTEAD_", "INTERSECT_", "INTO_", "IS_", "ISNULL_", "JOIN_", "KEY_", "LEFT_", "LIKE_", "LIMIT_", "MATCH_", "NATURAL_", "NO_", "NOT_", "NOTNULL_", "NULL_", "OF_", "OFFSET_", "ON_", "OR_", "ORDER_", "OUTER_", "PLAN_", "PRAGMA_", "PRIMARY_", "QUERY_", "RAISE_", "RECURSIVE_", "REFERENCES_", "REGEXP_", "REINDEX_", "RELEASE_", "RENAME_", "REPLACE_", "RESTRICT_", "RETURNING_", "RIGHT_", "ROLLBACK_", "ROW_", "ROWS_", "SAVEPOINT_", "SELECT_", "SET_", "TABLE_", "TEMP_", "TEMPORARY_", "THEN_", "TO_", "TRANSACTION_", "TRIGGER_", "UNION_", "UNIQUE_", "UPDATE_", "USING_", "VACUUM_", "VALUES_", "VIEW_", "VIRTUAL_", "WHEN_", "WHERE_", "WITH_", "WITHOUT_", "FIRST_VALUE_", "OVER_", "PARTITION_", "RANGE_", "PRECEDING_", "UNBOUNDED_", "CURRENT_", "FOLLOWING_", "CUME_DIST_", "DENSE_RANK_", "LAG_", "LAST_VALUE_", "LEAD_", "NTH_VALUE_", "NTILE_", "PERCENT_RANK_", "RANK_", "ROW_NUMBER_", "GENERATED_", "ALWAYS_", "STORED_", "TRUE_", "FALSE_", "WINDOW_", "NULLS_", "FIRST_", "LAST_", "FILTER_", "GROUPS_", "EXCLUDE_", "TIES_", "OTHERS_", "DO_", "NOTHING_", "IDENTIFIER", "NUMERIC_LITERAL", "BIND_PARAMETER", "STRING_LITERAL", "BLOB_LITERAL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "UNEXPECTED_CHAR", } staticData.ruleNames = []string{ "parse", "sql_stmt_list", "sql_stmt", "alter_table_stmt", "analyze_stmt", "attach_stmt", "begin_stmt", "commit_stmt", "rollback_stmt", "savepoint_stmt", "release_stmt", "create_index_stmt", "indexed_column", "create_table_stmt", "column_def", "type_name", "column_constraint", "signed_number", "table_constraint", "foreign_key_clause", "conflict_clause", "create_trigger_stmt", "create_view_stmt", "create_virtual_table_stmt", "with_clause", "cte_table_name", "recursive_cte", "common_table_expression", "delete_stmt", "delete_stmt_limited", "detach_stmt", "drop_stmt", "expr", "raise_function", "literal_value", "insert_stmt", "returning_clause", "upsert_clause", "pragma_stmt", "pragma_value", "reindex_stmt", "select_stmt", "join_clause", "select_core", "factored_select_stmt", "simple_select_stmt", "compound_select_stmt", "table_or_subquery", "result_column", "join_operator", "join_constraint", "compound_operator", "update_stmt", "assignment_list", "assignment", "column_name_list", "update_stmt_limited", "qualified_table_name", "vacuum_stmt", "filter_clause", "window_defn", "over_clause", "frame_spec", "frame_clause", "simple_function_invocation", "aggregate_function_invocation", "window_function_invocation", "common_table_stmt", "order_by_stmt", "limit_stmt", "ordering_term", "asc_desc", "frame_left", "frame_right", "frame_single", "window_function", "offset", "default_value", "partition_by", "order_by_expr", "order_by_expr_asc_desc", "expr_asc_desc", "initial_select", "recursive_select", "unary_operator", "error_message", "module_argument", "column_alias", "keyword", "name", "function_name", "schema_name", "table_name", "table_or_index_name", "column_name", "collation_name", "foreign_table", "index_name", "trigger_name", "view_name", "module_name", "pragma_name", "savepoint_name", "table_alias", "transaction_name", "window_name", "alias", "filename", "base_window_name", "simple_func", "aggregate_func", "table_function_name", "any_name", } staticData.predictionContextCache = antlr.NewPredictionContextCache() staticData.serializedATN = []int32{ 4, 1, 193, 2083, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 1, 0, 5, 0, 228, 8, 0, 10, 0, 12, 0, 231, 9, 0, 1, 0, 1, 0, 1, 1, 5, 1, 236, 8, 1, 10, 1, 12, 1, 239, 9, 1, 1, 1, 1, 1, 4, 1, 243, 8, 1, 11, 1, 12, 1, 244, 1, 1, 5, 1, 248, 8, 1, 10, 1, 12, 1, 251, 9, 1, 1, 1, 5, 1, 254, 8, 1, 10, 1, 12, 1, 257, 9, 1, 1, 2, 1, 2, 1, 2, 3, 2, 262, 8, 2, 3, 2, 264, 8, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 290, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 297, 8, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 304, 8, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 310, 8, 3, 1, 3, 1, 3, 3, 3, 314, 8, 3, 1, 3, 1, 3, 1, 3, 3, 3, 319, 8, 3, 1, 3, 3, 3, 322, 8, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 3, 4, 329, 8, 4, 1, 4, 3, 4, 332, 8, 4, 1, 5, 1, 5, 3, 5, 336, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 3, 6, 344, 8, 6, 1, 6, 1, 6, 3, 6, 348, 8, 6, 3, 6, 350, 8, 6, 1, 7, 1, 7, 3, 7, 354, 8, 7, 1, 8, 1, 8, 3, 8, 358, 8, 8, 1, 8, 1, 8, 3, 8, 362, 8, 8, 1, 8, 3, 8, 365, 8, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 3, 10, 372, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 3, 11, 378, 8, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 384, 8, 11, 1, 11, 1, 11, 1, 11, 3, 11, 389, 8, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 398, 8, 11, 10, 11, 12, 11, 401, 9, 11, 1, 11, 1, 11, 1, 11, 3, 11, 406, 8, 11, 1, 12, 1, 12, 3, 12, 410, 8, 12, 1, 12, 1, 12, 3, 12, 414, 8, 12, 1, 12, 3, 12, 417, 8, 12, 1, 13, 1, 13, 3, 13, 421, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 427, 8, 13, 1, 13, 1, 13, 1, 13, 3, 13, 432, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 439, 8, 13, 10, 13, 12, 13, 442, 9, 13, 1, 13, 1, 13, 5, 13, 446, 8, 13, 10, 13, 12, 13, 449, 9, 13, 1, 13, 1, 13, 1, 13, 3, 13, 454, 8, 13, 1, 13, 1, 13, 3, 13, 458, 8, 13, 1, 14, 1, 14, 3, 14, 462, 8, 14, 1, 14, 5, 14, 465, 8, 14, 10, 14, 12, 14, 468, 9, 14, 1, 15, 4, 15, 471, 8, 15, 11, 15, 12, 15, 472, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 485, 8, 15, 1, 16, 1, 16, 3, 16, 489, 8, 16, 1, 16, 1, 16, 1, 16, 3, 16, 494, 8, 16, 1, 16, 3, 16, 497, 8, 16, 1, 16, 3, 16, 500, 8, 16, 1, 16, 1, 16, 1, 16, 3, 16, 505, 8, 16, 1, 16, 3, 16, 508, 8, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 522, 8, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 529, 8, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 536, 8, 16, 3, 16, 538, 8, 16, 1, 17, 3, 17, 541, 8, 17, 1, 17, 1, 17, 1, 18, 1, 18, 3, 18, 547, 8, 18, 1, 18, 1, 18, 1, 18, 3, 18, 552, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 558, 8, 18, 10, 18, 12, 18, 561, 9, 18, 1, 18, 1, 18, 3, 18, 565, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 578, 8, 18, 10, 18, 12, 18, 581, 9, 18, 1, 18, 1, 18, 1, 18, 3, 18, 586, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 594, 8, 19, 10, 19, 12, 19, 597, 9, 19, 1, 19, 1, 19, 3, 19, 601, 8, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 611, 8, 19, 1, 19, 1, 19, 5, 19, 615, 8, 19, 10, 19, 12, 19, 618, 9, 19, 1, 19, 3, 19, 621, 8, 19, 1, 19, 1, 19, 1, 19, 3, 19, 626, 8, 19, 3, 19, 628, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 636, 8, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 642, 8, 21, 1, 21, 1, 21, 1, 21, 3, 21, 647, 8, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 654, 8, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 663, 8, 21, 10, 21, 12, 21, 666, 9, 21, 3, 21, 668, 8, 21, 3, 21, 670, 8, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 677, 8, 21, 1, 21, 1, 21, 3, 21, 681, 8, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 688, 8, 21, 1, 21, 1, 21, 4, 21, 692, 8, 21, 11, 21, 12, 21, 693, 1, 21, 1, 21, 1, 22, 1, 22, 3, 22, 700, 8, 22, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 706, 8, 22, 1, 22, 1, 22, 1, 22, 3, 22, 711, 8, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 718, 8, 22, 10, 22, 12, 22, 721, 9, 22, 1, 22, 1, 22, 3, 22, 725, 8, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 736, 8, 23, 1, 23, 1, 23, 1, 23, 3, 23, 741, 8, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 750, 8, 23, 10, 23, 12, 23, 753, 9, 23, 1, 23, 1, 23, 3, 23, 757, 8, 23, 1, 24, 1, 24, 3, 24, 761, 8, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 775, 8, 24, 10, 24, 12, 24, 778, 9, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 785, 8, 25, 10, 25, 12, 25, 788, 9, 25, 1, 25, 1, 25, 3, 25, 792, 8, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 800, 8, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 810, 8, 27, 10, 27, 12, 27, 813, 9, 27, 1, 27, 1, 27, 3, 27, 817, 8, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 825, 8, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 832, 8, 28, 1, 28, 3, 28, 835, 8, 28, 1, 29, 3, 29, 838, 8, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 3, 29, 845, 8, 29, 1, 29, 3, 29, 848, 8, 29, 1, 29, 3, 29, 851, 8, 29, 1, 29, 3, 29, 854, 8, 29, 1, 30, 1, 30, 3, 30, 858, 8, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 3, 31, 866, 8, 31, 1, 31, 1, 31, 1, 31, 3, 31, 871, 8, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 881, 8, 32, 1, 32, 1, 32, 1, 32, 3, 32, 886, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 895, 8, 32, 1, 32, 1, 32, 1, 32, 5, 32, 900, 8, 32, 10, 32, 12, 32, 903, 9, 32, 1, 32, 3, 32, 906, 8, 32, 1, 32, 1, 32, 3, 32, 910, 8, 32, 1, 32, 3, 32, 913, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 919, 8, 32, 10, 32, 12, 32, 922, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 934, 8, 32, 1, 32, 3, 32, 937, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 945, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 4, 32, 952, 8, 32, 11, 32, 12, 32, 953, 1, 32, 1, 32, 3, 32, 958, 8, 32, 1, 32, 1, 32, 1, 32, 3, 32, 963, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 993, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1005, 8, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1010, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1022, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1028, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1035, 8, 32, 1, 32, 1, 32, 3, 32, 1039, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 1047, 8, 32, 10, 32, 12, 32, 1050, 9, 32, 3, 32, 1052, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1058, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 1064, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 1071, 8, 32, 10, 32, 12, 32, 1074, 9, 32, 3, 32, 1076, 8, 32, 1, 32, 1, 32, 3, 32, 1080, 8, 32, 5, 32, 1082, 8, 32, 10, 32, 12, 32, 1085, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 1093, 8, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 3, 35, 1100, 8, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 1107, 8, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 1113, 8, 35, 1, 35, 1, 35, 1, 35, 3, 35, 1118, 8, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 1124, 8, 35, 10, 35, 12, 35, 1127, 9, 35, 1, 35, 1, 35, 3, 35, 1131, 8, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 1138, 8, 35, 10, 35, 12, 35, 1141, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 1149, 8, 35, 10, 35, 12, 35, 1152, 9, 35, 1, 35, 1, 35, 5, 35, 1156, 8, 35, 10, 35, 12, 35, 1159, 9, 35, 1, 35, 3, 35, 1162, 8, 35, 1, 35, 3, 35, 1165, 8, 35, 1, 35, 1, 35, 3, 35, 1169, 8, 35, 1, 35, 3, 35, 1172, 8, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 1178, 8, 36, 10, 36, 12, 36, 1181, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 1189, 8, 37, 10, 37, 12, 37, 1192, 9, 37, 1, 37, 1, 37, 1, 37, 3, 37, 1197, 8, 37, 3, 37, 1199, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 1207, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 1214, 8, 37, 1, 37, 1, 37, 1, 37, 5, 37, 1219, 8, 37, 10, 37, 12, 37, 1222, 9, 37, 1, 37, 1, 37, 3, 37, 1226, 8, 37, 3, 37, 1228, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 1234, 8, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 1243, 8, 38, 1, 39, 1, 39, 1, 39, 3, 39, 1248, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 1255, 8, 40, 1, 40, 1, 40, 3, 40, 1259, 8, 40, 3, 40, 1261, 8, 40, 1, 41, 3, 41, 1264, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 1270, 8, 41, 10, 41, 12, 41, 1273, 9, 41, 1, 41, 3, 41, 1276, 8, 41, 1, 41, 3, 41, 1279, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 1285, 8, 42, 5, 42, 1287, 8, 42, 10, 42, 12, 42, 1290, 9, 42, 1, 43, 1, 43, 3, 43, 1294, 8, 43, 1, 43, 1, 43, 1, 43, 5, 43, 1299, 8, 43, 10, 43, 12, 43, 1302, 9, 43, 1, 43, 1, 43, 1, 43, 1, 43, 5, 43, 1308, 8, 43, 10, 43, 12, 43, 1311, 9, 43, 1, 43, 3, 43, 1314, 8, 43, 3, 43, 1316, 8, 43, 1, 43, 1, 43, 3, 43, 1320, 8, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 5, 43, 1327, 8, 43, 10, 43, 12, 43, 1330, 9, 43, 1, 43, 1, 43, 3, 43, 1334, 8, 43, 3, 43, 1336, 8, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 5, 43, 1347, 8, 43, 10, 43, 12, 43, 1350, 9, 43, 3, 43, 1352, 8, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 5, 43, 1359, 8, 43, 10, 43, 12, 43, 1362, 9, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 5, 43, 1370, 8, 43, 10, 43, 12, 43, 1373, 9, 43, 1, 43, 1, 43, 5, 43, 1377, 8, 43, 10, 43, 12, 43, 1380, 9, 43, 3, 43, 1382, 8, 43, 1, 44, 1, 44, 1, 45, 3, 45, 1387, 8, 45, 1, 45, 1, 45, 3, 45, 1391, 8, 45, 1, 45, 3, 45, 1394, 8, 45, 1, 46, 3, 46, 1397, 8, 46, 1, 46, 1, 46, 1, 46, 3, 46, 1402, 8, 46, 1, 46, 1, 46, 3, 46, 1406, 8, 46, 1, 46, 4, 46, 1409, 8, 46, 11, 46, 12, 46, 1410, 1, 46, 3, 46, 1414, 8, 46, 1, 46, 3, 46, 1417, 8, 46, 1, 47, 1, 47, 1, 47, 3, 47, 1422, 8, 47, 1, 47, 1, 47, 3, 47, 1426, 8, 47, 1, 47, 3, 47, 1429, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 1436, 8, 47, 1, 47, 1, 47, 1, 47, 3, 47, 1441, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 1448, 8, 47, 10, 47, 12, 47, 1451, 9, 47, 1, 47, 1, 47, 3, 47, 1455, 8, 47, 1, 47, 3, 47, 1458, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 1464, 8, 47, 10, 47, 12, 47, 1467, 9, 47, 1, 47, 3, 47, 1470, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 1478, 8, 47, 1, 47, 3, 47, 1481, 8, 47, 3, 47, 1483, 8, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 3, 48, 1492, 8, 48, 1, 48, 3, 48, 1495, 8, 48, 3, 48, 1497, 8, 48, 1, 49, 1, 49, 3, 49, 1501, 8, 49, 1, 49, 1, 49, 3, 49, 1505, 8, 49, 1, 49, 1, 49, 3, 49, 1509, 8, 49, 1, 49, 3, 49, 1512, 8, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 5, 50, 1521, 8, 50, 10, 50, 12, 50, 1524, 9, 50, 1, 50, 1, 50, 3, 50, 1528, 8, 50, 1, 51, 1, 51, 3, 51, 1532, 8, 51, 1, 51, 1, 51, 3, 51, 1536, 8, 51, 1, 52, 3, 52, 1539, 8, 52, 1, 52, 1, 52, 1, 52, 3, 52, 1544, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 1553, 8, 52, 10, 52, 12, 52, 1556, 9, 52, 1, 52, 3, 52, 1559, 8, 52, 3, 52, 1561, 8, 52, 1, 52, 1, 52, 3, 52, 1565, 8, 52, 1, 52, 3, 52, 1568, 8, 52, 1, 53, 1, 53, 1, 53, 5, 53, 1573, 8, 53, 10, 53, 12, 53, 1576, 9, 53, 1, 54, 1, 54, 3, 54, 1580, 8, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 1589, 8, 55, 10, 55, 12, 55, 1592, 9, 55, 1, 55, 1, 55, 1, 56, 3, 56, 1597, 8, 56, 1, 56, 1, 56, 1, 56, 3, 56, 1602, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 1608, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 1615, 8, 56, 1, 56, 1, 56, 1, 56, 5, 56, 1620, 8, 56, 10, 56, 12, 56, 1623, 9, 56, 1, 56, 1, 56, 3, 56, 1627, 8, 56, 1, 56, 3, 56, 1630, 8, 56, 1, 56, 3, 56, 1633, 8, 56, 1, 56, 3, 56, 1636, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 1641, 8, 57, 1, 57, 1, 57, 1, 57, 3, 57, 1646, 8, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 1653, 8, 57, 1, 58, 1, 58, 3, 58, 1657, 8, 58, 1, 58, 1, 58, 3, 58, 1661, 8, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 3, 60, 1671, 8, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 5, 60, 1678, 8, 60, 10, 60, 12, 60, 1681, 9, 60, 3, 60, 1683, 8, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 5, 60, 1690, 8, 60, 10, 60, 12, 60, 1693, 9, 60, 1, 60, 3, 60, 1696, 8, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 1704, 8, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 5, 61, 1711, 8, 61, 10, 61, 12, 61, 1714, 9, 61, 3, 61, 1716, 8, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 5, 61, 1723, 8, 61, 10, 61, 12, 61, 1726, 9, 61, 3, 61, 1728, 8, 61, 1, 61, 3, 61, 1731, 8, 61, 1, 61, 3, 61, 1734, 8, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 3, 62, 1744, 8, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 1753, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 1760, 8, 64, 10, 64, 12, 64, 1763, 9, 64, 1, 64, 3, 64, 1766, 8, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 3, 65, 1773, 8, 65, 1, 65, 1, 65, 1, 65, 5, 65, 1778, 8, 65, 10, 65, 12, 65, 1781, 9, 65, 1, 65, 3, 65, 1784, 8, 65, 1, 65, 1, 65, 3, 65, 1788, 8, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 5, 66, 1795, 8, 66, 10, 66, 12, 66, 1798, 9, 66, 1, 66, 3, 66, 1801, 8, 66, 1, 66, 1, 66, 3, 66, 1805, 8, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1810, 8, 66, 1, 67, 1, 67, 3, 67, 1814, 8, 67, 1, 67, 1, 67, 1, 67, 5, 67, 1819, 8, 67, 10, 67, 12, 67, 1822, 9, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 5, 68, 1829, 8, 68, 10, 68, 12, 68, 1832, 9, 68, 1, 69, 1, 69, 1, 69, 1, 69, 3, 69, 1838, 8, 69, 1, 70, 1, 70, 1, 70, 3, 70, 1843, 8, 70, 1, 70, 3, 70, 1846, 8, 70, 1, 70, 1, 70, 3, 70, 1850, 8, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 3, 72, 1864, 8, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 3, 73, 1876, 8, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 3, 74, 1885, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1894, 8, 75, 1, 75, 1, 75, 3, 75, 1898, 8, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1908, 8, 75, 1, 75, 3, 75, 1911, 8, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1920, 8, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1929, 8, 75, 1, 75, 3, 75, 1932, 8, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1938, 8, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1952, 8, 75, 1, 75, 1, 75, 3, 75, 1956, 8, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1967, 8, 75, 1, 75, 1, 75, 1, 75, 3, 75, 1972, 8, 75, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 4, 78, 1983, 8, 78, 11, 78, 12, 78, 1984, 1, 79, 1, 79, 1, 79, 4, 79, 1990, 8, 79, 11, 79, 12, 79, 1991, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 3, 81, 2000, 8, 81, 1, 81, 1, 81, 1, 81, 3, 81, 2005, 8, 81, 5, 81, 2007, 8, 81, 10, 81, 12, 81, 2010, 9, 81, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 3, 86, 2022, 8, 86, 1, 87, 1, 87, 1, 88, 1, 88, 1, 89, 1, 89, 1, 90, 1, 90, 1, 91, 1, 91, 1, 92, 1, 92, 1, 93, 1, 93, 1, 94, 1, 94, 1, 95, 1, 95, 1, 96, 1, 96, 1, 97, 1, 97, 1, 98, 1, 98, 1, 99, 1, 99, 1, 100, 1, 100, 1, 101, 1, 101, 1, 102, 1, 102, 1, 103, 1, 103, 1, 104, 1, 104, 1, 105, 1, 105, 1, 106, 1, 106, 1, 107, 1, 107, 1, 108, 1, 108, 1, 109, 1, 109, 1, 110, 1, 110, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 3, 112, 2081, 8, 112, 1, 112, 2, 440, 472, 1, 64, 113, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 0, 28, 3, 0, 58, 58, 69, 69, 82, 82, 2, 0, 47, 47, 66, 66, 1, 0, 133, 134, 2, 0, 146, 146, 171, 171, 1, 0, 8, 9, 2, 0, 59, 59, 141, 141, 2, 0, 56, 56, 104, 104, 2, 0, 58, 58, 82, 82, 5, 0, 25, 25, 72, 72, 81, 81, 122, 122, 126, 126, 4, 0, 84, 84, 132, 132, 138, 138, 145, 145, 2, 0, 7, 7, 12, 13, 1, 0, 14, 17, 1, 0, 18, 21, 4, 0, 77, 77, 97, 97, 99, 99, 118, 118, 3, 0, 25, 25, 72, 72, 126, 126, 5, 0, 52, 54, 104, 104, 172, 173, 186, 186, 188, 189, 2, 0, 29, 29, 62, 62, 3, 0, 128, 128, 154, 154, 179, 179, 2, 0, 5, 5, 106, 106, 1, 0, 176, 177, 2, 0, 34, 34, 60, 60, 2, 0, 151, 151, 162, 162, 2, 0, 159, 159, 166, 166, 2, 0, 160, 160, 167, 168, 2, 0, 161, 161, 163, 163, 2, 0, 8, 10, 102, 102, 2, 0, 185, 185, 188, 188, 2, 0, 25, 123, 125, 180, 2367, 0, 229, 1, 0, 0, 0, 2, 237, 1, 0, 0, 0, 4, 263, 1, 0, 0, 0, 6, 291, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 10, 333, 1, 0, 0, 0, 12, 341, 1, 0, 0, 0, 14, 351, 1, 0, 0, 0, 16, 355, 1, 0, 0, 0, 18, 366, 1, 0, 0, 0, 20, 369, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 409, 1, 0, 0, 0, 26, 418, 1, 0, 0, 0, 28, 459, 1, 0, 0, 0, 30, 470, 1, 0, 0, 0, 32, 488, 1, 0, 0, 0, 34, 540, 1, 0, 0, 0, 36, 546, 1, 0, 0, 0, 38, 587, 1, 0, 0, 0, 40, 629, 1, 0, 0, 0, 42, 633, 1, 0, 0, 0, 44, 697, 1, 0, 0, 0, 46, 729, 1, 0, 0, 0, 48, 758, 1, 0, 0, 0, 50, 779, 1, 0, 0, 0, 52, 793, 1, 0, 0, 0, 54, 804, 1, 0, 0, 0, 56, 824, 1, 0, 0, 0, 58, 837, 1, 0, 0, 0, 60, 855, 1, 0, 0, 0, 62, 861, 1, 0, 0, 0, 64, 962, 1, 0, 0, 0, 66, 1086, 1, 0, 0, 0, 68, 1096, 1, 0, 0, 0, 70, 1099, 1, 0, 0, 0, 72, 1173, 1, 0, 0, 0, 74, 1182, 1, 0, 0, 0, 76, 1229, 1, 0, 0, 0, 78, 1247, 1, 0, 0, 0, 80, 1249, 1, 0, 0, 0, 82, 1263, 1, 0, 0, 0, 84, 1280, 1, 0, 0, 0, 86, 1381, 1, 0, 0, 0, 88, 1383, 1, 0, 0, 0, 90, 1386, 1, 0, 0, 0, 92, 1396, 1, 0, 0, 0, 94, 1482, 1, 0, 0, 0, 96, 1496, 1, 0, 0, 0, 98, 1511, 1, 0, 0, 0, 100, 1527, 1, 0, 0, 0, 102, 1535, 1, 0, 0, 0, 104, 1538, 1, 0, 0, 0, 106, 1569, 1, 0, 0, 0, 108, 1579, 1, 0, 0, 0, 110, 1584, 1, 0, 0, 0, 112, 1596, 1, 0, 0, 0, 114, 1640, 1, 0, 0, 0, 116, 1654, 1, 0, 0, 0, 118, 1662, 1, 0, 0, 0, 120, 1668, 1, 0, 0, 0, 122, 1699, 1, 0, 0, 0, 124, 1735, 1, 0, 0, 0, 126, 1745, 1, 0, 0, 0, 128, 1754, 1, 0, 0, 0, 130, 1769, 1, 0, 0, 0, 132, 1789, 1, 0, 0, 0, 134, 1811, 1, 0, 0, 0, 136, 1823, 1, 0, 0, 0, 138, 1833, 1, 0, 0, 0, 140, 1839, 1, 0, 0, 0, 142, 1851, 1, 0, 0, 0, 144, 1863, 1, 0, 0, 0, 146, 1875, 1, 0, 0, 0, 148, 1884, 1, 0, 0, 0, 150, 1971, 1, 0, 0, 0, 152, 1973, 1, 0, 0, 0, 154, 1976, 1, 0, 0, 0, 156, 1979, 1, 0, 0, 0, 158, 1986, 1, 0, 0, 0, 160, 1993, 1, 0, 0, 0, 162, 1997, 1, 0, 0, 0, 164, 2011, 1, 0, 0, 0, 166, 2013, 1, 0, 0, 0, 168, 2015, 1, 0, 0, 0, 170, 2017, 1, 0, 0, 0, 172, 2021, 1, 0, 0, 0, 174, 2023, 1, 0, 0, 0, 176, 2025, 1, 0, 0, 0, 178, 2027, 1, 0, 0, 0, 180, 2029, 1, 0, 0, 0, 182, 2031, 1, 0, 0, 0, 184, 2033, 1, 0, 0, 0, 186, 2035, 1, 0, 0, 0, 188, 2037, 1, 0, 0, 0, 190, 2039, 1, 0, 0, 0, 192, 2041, 1, 0, 0, 0, 194, 2043, 1, 0, 0, 0, 196, 2045, 1, 0, 0, 0, 198, 2047, 1, 0, 0, 0, 200, 2049, 1, 0, 0, 0, 202, 2051, 1, 0, 0, 0, 204, 2053, 1, 0, 0, 0, 206, 2055, 1, 0, 0, 0, 208, 2057, 1, 0, 0, 0, 210, 2059, 1, 0, 0, 0, 212, 2061, 1, 0, 0, 0, 214, 2063, 1, 0, 0, 0, 216, 2065, 1, 0, 0, 0, 218, 2067, 1, 0, 0, 0, 220, 2069, 1, 0, 0, 0, 222, 2071, 1, 0, 0, 0, 224, 2080, 1, 0, 0, 0, 226, 228, 3, 2, 1, 0, 227, 226, 1, 0, 0, 0, 228, 231, 1, 0, 0, 0, 229, 227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 232, 1, 0, 0, 0, 231, 229, 1, 0, 0, 0, 232, 233, 5, 0, 0, 1, 233, 1, 1, 0, 0, 0, 234, 236, 5, 1, 0, 0, 235, 234, 1, 0, 0, 0, 236, 239, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 240, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 240, 249, 3, 4, 2, 0, 241, 243, 5, 1, 0, 0, 242, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, 244, 245, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 248, 3, 4, 2, 0, 247, 242, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 255, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 254, 5, 1, 0, 0, 253, 252, 1, 0, 0, 0, 254, 257, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 255, 256, 1, 0, 0, 0, 256, 3, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 258, 261, 5, 71, 0, 0, 259, 260, 5, 114, 0, 0, 260, 262, 5, 111, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 264, 1, 0, 0, 0, 263, 258, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 289, 1, 0, 0, 0, 265, 290, 3, 6, 3, 0, 266, 290, 3, 8, 4, 0, 267, 290, 3, 10, 5, 0, 268, 290, 3, 12, 6, 0, 269, 290, 3, 14, 7, 0, 270, 290, 3, 22, 11, 0, 271, 290, 3, 26, 13, 0, 272, 290, 3, 42, 21, 0, 273, 290, 3, 44, 22, 0, 274, 290, 3, 46, 23, 0, 275, 290, 3, 56, 28, 0, 276, 290, 3, 58, 29, 0, 277, 290, 3, 60, 30, 0, 278, 290, 3, 62, 31, 0, 279, 290, 3, 70, 35, 0, 280, 290, 3, 76, 38, 0, 281, 290, 3, 80, 40, 0, 282, 290, 3, 20, 10, 0, 283, 290, 3, 16, 8, 0, 284, 290, 3, 18, 9, 0, 285, 290, 3, 82, 41, 0, 286, 290, 3, 104, 52, 0, 287, 290, 3, 112, 56, 0, 288, 290, 3, 116, 58, 0, 289, 265, 1, 0, 0, 0, 289, 266, 1, 0, 0, 0, 289, 267, 1, 0, 0, 0, 289, 268, 1, 0, 0, 0, 289, 269, 1, 0, 0, 0, 289, 270, 1, 0, 0, 0, 289, 271, 1, 0, 0, 0, 289, 272, 1, 0, 0, 0, 289, 273, 1, 0, 0, 0, 289, 274, 1, 0, 0, 0, 289, 275, 1, 0, 0, 0, 289, 276, 1, 0, 0, 0, 289, 277, 1, 0, 0, 0, 289, 278, 1, 0, 0, 0, 289, 279, 1, 0, 0, 0, 289, 280, 1, 0, 0, 0, 289, 281, 1, 0, 0, 0, 289, 282, 1, 0, 0, 0, 289, 283, 1, 0, 0, 0, 289, 284, 1, 0, 0, 0, 289, 285, 1, 0, 0, 0, 289, 286, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 5, 1, 0, 0, 0, 291, 292, 5, 30, 0, 0, 292, 296, 5, 132, 0, 0, 293, 294, 3, 182, 91, 0, 294, 295, 5, 2, 0, 0, 295, 297, 1, 0, 0, 0, 296, 293, 1, 0, 0, 0, 296, 297, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 321, 3, 184, 92, 0, 299, 309, 5, 121, 0, 0, 300, 301, 5, 136, 0, 0, 301, 310, 3, 184, 92, 0, 302, 304, 5, 46, 0, 0, 303, 302, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 3, 188, 94, 0, 306, 307, 5, 136, 0, 0, 307, 308, 3, 188, 94, 0, 308, 310, 1, 0, 0, 0, 309, 300, 1, 0, 0, 0, 309, 303, 1, 0, 0, 0, 310, 322, 1, 0, 0, 0, 311, 313, 5, 27, 0, 0, 312, 314, 5, 46, 0, 0, 313, 312, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 322, 3, 28, 14, 0, 316, 318, 5, 63, 0, 0, 317, 319, 5, 46, 0, 0, 318, 317, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 322, 3, 188, 94, 0, 321, 299, 1, 0, 0, 0, 321, 311, 1, 0, 0, 0, 321, 316, 1, 0, 0, 0, 322, 7, 1, 0, 0, 0, 323, 331, 5, 31, 0, 0, 324, 332, 3, 182, 91, 0, 325, 326, 3, 182, 91, 0, 326, 327, 5, 2, 0, 0, 327, 329, 1, 0, 0, 0, 328, 325, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 332, 3, 186, 93, 0, 331, 324, 1, 0, 0, 0, 331, 328, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 9, 1, 0, 0, 0, 333, 335, 5, 35, 0, 0, 334, 336, 5, 55, 0, 0, 335, 334, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 338, 3, 64, 32, 0, 338, 339, 5, 33, 0, 0, 339, 340, 3, 182, 91, 0, 340, 11, 1, 0, 0, 0, 341, 343, 5, 38, 0, 0, 342, 344, 7, 0, 0, 0, 343, 342, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 349, 1, 0, 0, 0, 345, 347, 5, 137, 0, 0, 346, 348, 3, 208, 104, 0, 347, 346, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 350, 1, 0, 0, 0, 349, 345, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 13, 1, 0, 0, 0, 351, 353, 7, 1, 0, 0, 352, 354, 5, 137, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 15, 1, 0, 0, 0, 355, 357, 5, 126, 0, 0, 356, 358, 5, 137, 0, 0, 357, 356, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 364, 1, 0, 0, 0, 359, 361, 5, 136, 0, 0, 360, 362, 5, 129, 0, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 365, 3, 204, 102, 0, 364, 359, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 17, 1, 0, 0, 0, 366, 367, 5, 129, 0, 0, 367, 368, 3, 204, 102, 0, 368, 19, 1, 0, 0, 0, 369, 371, 5, 120, 0, 0, 370, 372, 5, 129, 0, 0, 371, 370, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 3, 204, 102, 0, 374, 21, 1, 0, 0, 0, 375, 377, 5, 50, 0, 0, 376, 378, 5, 140, 0, 0, 377, 376, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 383, 5, 84, 0, 0, 380, 381, 5, 80, 0, 0, 381, 382, 5, 102, 0, 0, 382, 384, 5, 70, 0, 0, 383, 380, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 388, 1, 0, 0, 0, 385, 386, 3, 182, 91, 0, 386, 387, 5, 2, 0, 0, 387, 389, 1, 0, 0, 0, 388, 385, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 391, 3, 194, 97, 0, 391, 392, 5, 107, 0, 0, 392, 393, 3, 184, 92, 0, 393, 394, 5, 3, 0, 0, 394, 399, 3, 24, 12, 0, 395, 396, 5, 5, 0, 0, 396, 398, 3, 24, 12, 0, 397, 395, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 405, 5, 4, 0, 0, 403, 404, 5, 148, 0, 0, 404, 406, 3, 64, 32, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 23, 1, 0, 0, 0, 407, 410, 3, 188, 94, 0, 408, 410, 3, 64, 32, 0, 409, 407, 1, 0, 0, 0, 409, 408, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 412, 5, 45, 0, 0, 412, 414, 3, 190, 95, 0, 413, 411, 1, 0, 0, 0, 413, 414, 1, 0, 0, 0, 414, 416, 1, 0, 0, 0, 415, 417, 3, 142, 71, 0, 416, 415, 1, 0, 0, 0, 416, 417, 1, 0, 0, 0, 417, 25, 1, 0, 0, 0, 418, 420, 5, 50, 0, 0, 419, 421, 7, 2, 0, 0, 420, 419, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 426, 5, 132, 0, 0, 423, 424, 5, 80, 0, 0, 424, 425, 5, 102, 0, 0, 425, 427, 5, 70, 0, 0, 426, 423, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 431, 1, 0, 0, 0, 428, 429, 3, 182, 91, 0, 429, 430, 5, 2, 0, 0, 430, 432, 1, 0, 0, 0, 431, 428, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 433, 1, 0, 0, 0, 433, 457, 3, 184, 92, 0, 434, 435, 5, 3, 0, 0, 435, 440, 3, 28, 14, 0, 436, 437, 5, 5, 0, 0, 437, 439, 3, 28, 14, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 441, 447, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 5, 5, 0, 0, 444, 446, 3, 36, 18, 0, 445, 443, 1, 0, 0, 0, 446, 449, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 450, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 450, 453, 5, 4, 0, 0, 451, 452, 5, 150, 0, 0, 452, 454, 5, 185, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 458, 1, 0, 0, 0, 455, 456, 5, 33, 0, 0, 456, 458, 3, 82, 41, 0, 457, 434, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 458, 27, 1, 0, 0, 0, 459, 461, 3, 188, 94, 0, 460, 462, 3, 30, 15, 0, 461, 460, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 466, 1, 0, 0, 0, 463, 465, 3, 32, 16, 0, 464, 463, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 29, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 471, 3, 178, 89, 0, 470, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 484, 1, 0, 0, 0, 474, 475, 5, 3, 0, 0, 475, 476, 3, 34, 17, 0, 476, 477, 5, 4, 0, 0, 477, 485, 1, 0, 0, 0, 478, 479, 5, 3, 0, 0, 479, 480, 3, 34, 17, 0, 480, 481, 5, 5, 0, 0, 481, 482, 3, 34, 17, 0, 482, 483, 5, 4, 0, 0, 483, 485, 1, 0, 0, 0, 484, 474, 1, 0, 0, 0, 484, 478, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 31, 1, 0, 0, 0, 486, 487, 5, 49, 0, 0, 487, 489, 3, 178, 89, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 537, 1, 0, 0, 0, 490, 491, 5, 113, 0, 0, 491, 493, 5, 95, 0, 0, 492, 494, 3, 142, 71, 0, 493, 492, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 497, 3, 40, 20, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 499, 1, 0, 0, 0, 498, 500, 5, 36, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 538, 1, 0, 0, 0, 501, 502, 5, 102, 0, 0, 502, 505, 5, 104, 0, 0, 503, 505, 5, 140, 0, 0, 504, 501, 1, 0, 0, 0, 504, 503, 1, 0, 0, 0, 505, 507, 1, 0, 0, 0, 506, 508, 3, 40, 20, 0, 507, 506, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 538, 1, 0, 0, 0, 509, 510, 5, 44, 0, 0, 510, 511, 5, 3, 0, 0, 511, 512, 3, 64, 32, 0, 512, 513, 5, 4, 0, 0, 513, 538, 1, 0, 0, 0, 514, 521, 5, 56, 0, 0, 515, 522, 3, 34, 17, 0, 516, 522, 3, 68, 34, 0, 517, 518, 5, 3, 0, 0, 518, 519, 3, 64, 32, 0, 519, 520, 5, 4, 0, 0, 520, 522, 1, 0, 0, 0, 521, 515, 1, 0, 0, 0, 521, 516, 1, 0, 0, 0, 521, 517, 1, 0, 0, 0, 522, 538, 1, 0, 0, 0, 523, 524, 5, 45, 0, 0, 524, 538, 3, 190, 95, 0, 525, 538, 3, 38, 19, 0, 526, 527, 5, 169, 0, 0, 527, 529, 5, 170, 0, 0, 528, 526, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 5, 33, 0, 0, 531, 532, 5, 3, 0, 0, 532, 533, 3, 64, 32, 0, 533, 535, 5, 4, 0, 0, 534, 536, 7, 3, 0, 0, 535, 534, 1, 0, 0, 0, 535, 536, 1, 0, 0, 0, 536, 538, 1, 0, 0, 0, 537, 490, 1, 0, 0, 0, 537, 504, 1, 0, 0, 0, 537, 509, 1, 0, 0, 0, 537, 514, 1, 0, 0, 0, 537, 523, 1, 0, 0, 0, 537, 525, 1, 0, 0, 0, 537, 528, 1, 0, 0, 0, 538, 33, 1, 0, 0, 0, 539, 541, 7, 4, 0, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 543, 5, 186, 0, 0, 543, 35, 1, 0, 0, 0, 544, 545, 5, 49, 0, 0, 545, 547, 3, 178, 89, 0, 546, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 585, 1, 0, 0, 0, 548, 549, 5, 113, 0, 0, 549, 552, 5, 95, 0, 0, 550, 552, 5, 140, 0, 0, 551, 548, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 5, 3, 0, 0, 554, 559, 3, 24, 12, 0, 555, 556, 5, 5, 0, 0, 556, 558, 3, 24, 12, 0, 557, 555, 1, 0, 0, 0, 558, 561, 1, 0, 0, 0, 559, 557, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 562, 1, 0, 0, 0, 561, 559, 1, 0, 0, 0, 562, 564, 5, 4, 0, 0, 563, 565, 3, 40, 20, 0, 564, 563, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 586, 1, 0, 0, 0, 566, 567, 5, 44, 0, 0, 567, 568, 5, 3, 0, 0, 568, 569, 3, 64, 32, 0, 569, 570, 5, 4, 0, 0, 570, 586, 1, 0, 0, 0, 571, 572, 5, 74, 0, 0, 572, 573, 5, 95, 0, 0, 573, 574, 5, 3, 0, 0, 574, 579, 3, 188, 94, 0, 575, 576, 5, 5, 0, 0, 576, 578, 3, 188, 94, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 582, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 583, 5, 4, 0, 0, 583, 584, 3, 38, 19, 0, 584, 586, 1, 0, 0, 0, 585, 551, 1, 0, 0, 0, 585, 566, 1, 0, 0, 0, 585, 571, 1, 0, 0, 0, 586, 37, 1, 0, 0, 0, 587, 588, 5, 117, 0, 0, 588, 600, 3, 192, 96, 0, 589, 590, 5, 3, 0, 0, 590, 595, 3, 188, 94, 0, 591, 592, 5, 5, 0, 0, 592, 594, 3, 188, 94, 0, 593, 591, 1, 0, 0, 0, 594, 597, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 598, 1, 0, 0, 0, 597, 595, 1, 0, 0, 0, 598, 599, 5, 4, 0, 0, 599, 601, 1, 0, 0, 0, 600, 589, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 616, 1, 0, 0, 0, 602, 603, 5, 107, 0, 0, 603, 610, 7, 5, 0, 0, 604, 605, 5, 131, 0, 0, 605, 611, 7, 6, 0, 0, 606, 611, 5, 41, 0, 0, 607, 611, 5, 123, 0, 0, 608, 609, 5, 101, 0, 0, 609, 611, 5, 26, 0, 0, 610, 604, 1, 0, 0, 0, 610, 606, 1, 0, 0, 0, 610, 607, 1, 0, 0, 0, 610, 608, 1, 0, 0, 0, 611, 615, 1, 0, 0, 0, 612, 613, 5, 99, 0, 0, 613, 615, 3, 178, 89, 0, 614, 602, 1, 0, 0, 0, 614, 612, 1, 0, 0, 0, 615, 618, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 627, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 619, 621, 5, 102, 0, 0, 620, 619, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 625, 5, 57, 0, 0, 623, 624, 5, 86, 0, 0, 624, 626, 7, 7, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 628, 1, 0, 0, 0, 627, 620, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 39, 1, 0, 0, 0, 629, 630, 5, 107, 0, 0, 630, 631, 5, 48, 0, 0, 631, 632, 7, 8, 0, 0, 632, 41, 1, 0, 0, 0, 633, 635, 5, 50, 0, 0, 634, 636, 7, 2, 0, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 641, 5, 138, 0, 0, 638, 639, 5, 80, 0, 0, 639, 640, 5, 102, 0, 0, 640, 642, 5, 70, 0, 0, 641, 638, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 646, 1, 0, 0, 0, 643, 644, 3, 182, 91, 0, 644, 645, 5, 2, 0, 0, 645, 647, 1, 0, 0, 0, 646, 643, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 648, 1, 0, 0, 0, 648, 653, 3, 196, 98, 0, 649, 654, 5, 37, 0, 0, 650, 654, 5, 28, 0, 0, 651, 652, 5, 89, 0, 0, 652, 654, 5, 105, 0, 0, 653, 649, 1, 0, 0, 0, 653, 650, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 669, 1, 0, 0, 0, 655, 670, 5, 59, 0, 0, 656, 670, 5, 88, 0, 0, 657, 667, 5, 141, 0, 0, 658, 659, 5, 105, 0, 0, 659, 664, 3, 188, 94, 0, 660, 661, 5, 5, 0, 0, 661, 663, 3, 188, 94, 0, 662, 660, 1, 0, 0, 0, 663, 666, 1, 0, 0, 0, 664, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 668, 1, 0, 0, 0, 666, 664, 1, 0, 0, 0, 667, 658, 1, 0, 0, 0, 667, 668, 1, 0, 0, 0, 668, 670, 1, 0, 0, 0, 669, 655, 1, 0, 0, 0, 669, 656, 1, 0, 0, 0, 669, 657, 1, 0, 0, 0, 670, 671, 1, 0, 0, 0, 671, 672, 5, 107, 0, 0, 672, 676, 3, 184, 92, 0, 673, 674, 5, 73, 0, 0, 674, 675, 5, 64, 0, 0, 675, 677, 5, 127, 0, 0, 676, 673, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 680, 1, 0, 0, 0, 678, 679, 5, 147, 0, 0, 679, 681, 3, 64, 32, 0, 680, 678, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 681, 682, 1, 0, 0, 0, 682, 691, 5, 38, 0, 0, 683, 688, 3, 104, 52, 0, 684, 688, 3, 70, 35, 0, 685, 688, 3, 56, 28, 0, 686, 688, 3, 82, 41, 0, 687, 683, 1, 0, 0, 0, 687, 684, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 687, 686, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 690, 5, 1, 0, 0, 690, 692, 1, 0, 0, 0, 691, 687, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 5, 66, 0, 0, 696, 43, 1, 0, 0, 0, 697, 699, 5, 50, 0, 0, 698, 700, 7, 2, 0, 0, 699, 698, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 5, 145, 0, 0, 702, 703, 5, 80, 0, 0, 703, 704, 5, 102, 0, 0, 704, 706, 5, 70, 0, 0, 705, 702, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 710, 1, 0, 0, 0, 707, 708, 3, 182, 91, 0, 708, 709, 5, 2, 0, 0, 709, 711, 1, 0, 0, 0, 710, 707, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 724, 3, 198, 99, 0, 713, 714, 5, 3, 0, 0, 714, 719, 3, 188, 94, 0, 715, 716, 5, 5, 0, 0, 716, 718, 3, 188, 94, 0, 717, 715, 1, 0, 0, 0, 718, 721, 1, 0, 0, 0, 719, 717, 1, 0, 0, 0, 719, 720, 1, 0, 0, 0, 720, 722, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 722, 723, 5, 4, 0, 0, 723, 725, 1, 0, 0, 0, 724, 713, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 5, 33, 0, 0, 727, 728, 3, 82, 41, 0, 728, 45, 1, 0, 0, 0, 729, 730, 5, 50, 0, 0, 730, 731, 5, 146, 0, 0, 731, 735, 5, 132, 0, 0, 732, 733, 5, 80, 0, 0, 733, 734, 5, 102, 0, 0, 734, 736, 5, 70, 0, 0, 735, 732, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 740, 1, 0, 0, 0, 737, 738, 3, 182, 91, 0, 738, 739, 5, 2, 0, 0, 739, 741, 1, 0, 0, 0, 740, 737, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 743, 3, 184, 92, 0, 743, 744, 5, 142, 0, 0, 744, 756, 3, 200, 100, 0, 745, 746, 5, 3, 0, 0, 746, 751, 3, 172, 86, 0, 747, 748, 5, 5, 0, 0, 748, 750, 3, 172, 86, 0, 749, 747, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 755, 5, 4, 0, 0, 755, 757, 1, 0, 0, 0, 756, 745, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 757, 47, 1, 0, 0, 0, 758, 760, 5, 149, 0, 0, 759, 761, 5, 116, 0, 0, 760, 759, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 762, 1, 0, 0, 0, 762, 763, 3, 50, 25, 0, 763, 764, 5, 33, 0, 0, 764, 765, 5, 3, 0, 0, 765, 766, 3, 82, 41, 0, 766, 776, 5, 4, 0, 0, 767, 768, 5, 5, 0, 0, 768, 769, 3, 50, 25, 0, 769, 770, 5, 33, 0, 0, 770, 771, 5, 3, 0, 0, 771, 772, 3, 82, 41, 0, 772, 773, 5, 4, 0, 0, 773, 775, 1, 0, 0, 0, 774, 767, 1, 0, 0, 0, 775, 778, 1, 0, 0, 0, 776, 774, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 49, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 779, 791, 3, 184, 92, 0, 780, 781, 5, 3, 0, 0, 781, 786, 3, 188, 94, 0, 782, 783, 5, 5, 0, 0, 783, 785, 3, 188, 94, 0, 784, 782, 1, 0, 0, 0, 785, 788, 1, 0, 0, 0, 786, 784, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 789, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 789, 790, 5, 4, 0, 0, 790, 792, 1, 0, 0, 0, 791, 780, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 51, 1, 0, 0, 0, 793, 794, 3, 50, 25, 0, 794, 795, 5, 33, 0, 0, 795, 796, 5, 3, 0, 0, 796, 797, 3, 164, 82, 0, 797, 799, 5, 139, 0, 0, 798, 800, 5, 29, 0, 0, 799, 798, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 802, 3, 166, 83, 0, 802, 803, 5, 4, 0, 0, 803, 53, 1, 0, 0, 0, 804, 816, 3, 184, 92, 0, 805, 806, 5, 3, 0, 0, 806, 811, 3, 188, 94, 0, 807, 808, 5, 5, 0, 0, 808, 810, 3, 188, 94, 0, 809, 807, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 814, 1, 0, 0, 0, 813, 811, 1, 0, 0, 0, 814, 815, 5, 4, 0, 0, 815, 817, 1, 0, 0, 0, 816, 805, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 5, 33, 0, 0, 819, 820, 5, 3, 0, 0, 820, 821, 3, 82, 41, 0, 821, 822, 5, 4, 0, 0, 822, 55, 1, 0, 0, 0, 823, 825, 3, 48, 24, 0, 824, 823, 1, 0, 0, 0, 824, 825, 1, 0, 0, 0, 825, 826, 1, 0, 0, 0, 826, 827, 5, 59, 0, 0, 827, 828, 5, 75, 0, 0, 828, 831, 3, 114, 57, 0, 829, 830, 5, 148, 0, 0, 830, 832, 3, 64, 32, 0, 831, 829, 1, 0, 0, 0, 831, 832, 1, 0, 0, 0, 832, 834, 1, 0, 0, 0, 833, 835, 3, 72, 36, 0, 834, 833, 1, 0, 0, 0, 834, 835, 1, 0, 0, 0, 835, 57, 1, 0, 0, 0, 836, 838, 3, 48, 24, 0, 837, 836, 1, 0, 0, 0, 837, 838, 1, 0, 0, 0, 838, 839, 1, 0, 0, 0, 839, 840, 5, 59, 0, 0, 840, 841, 5, 75, 0, 0, 841, 844, 3, 114, 57, 0, 842, 843, 5, 148, 0, 0, 843, 845, 3, 64, 32, 0, 844, 842, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, 845, 847, 1, 0, 0, 0, 846, 848, 3, 72, 36, 0, 847, 846, 1, 0, 0, 0, 847, 848, 1, 0, 0, 0, 848, 853, 1, 0, 0, 0, 849, 851, 3, 136, 68, 0, 850, 849, 1, 0, 0, 0, 850, 851, 1, 0, 0, 0, 851, 852, 1, 0, 0, 0, 852, 854, 3, 138, 69, 0, 853, 850, 1, 0, 0, 0, 853, 854, 1, 0, 0, 0, 854, 59, 1, 0, 0, 0, 855, 857, 5, 61, 0, 0, 856, 858, 5, 55, 0, 0, 857, 856, 1, 0, 0, 0, 857, 858, 1, 0, 0, 0, 858, 859, 1, 0, 0, 0, 859, 860, 3, 182, 91, 0, 860, 61, 1, 0, 0, 0, 861, 862, 5, 63, 0, 0, 862, 865, 7, 9, 0, 0, 863, 864, 5, 80, 0, 0, 864, 866, 5, 70, 0, 0, 865, 863, 1, 0, 0, 0, 865, 866, 1, 0, 0, 0, 866, 870, 1, 0, 0, 0, 867, 868, 3, 182, 91, 0, 868, 869, 5, 2, 0, 0, 869, 871, 1, 0, 0, 0, 870, 867, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 873, 3, 224, 112, 0, 873, 63, 1, 0, 0, 0, 874, 875, 6, 32, -1, 0, 875, 963, 3, 68, 34, 0, 876, 963, 5, 187, 0, 0, 877, 878, 3, 182, 91, 0, 878, 879, 5, 2, 0, 0, 879, 881, 1, 0, 0, 0, 880, 877, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 883, 3, 184, 92, 0, 883, 884, 5, 2, 0, 0, 884, 886, 1, 0, 0, 0, 885, 880, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 887, 1, 0, 0, 0, 887, 963, 3, 188, 94, 0, 888, 889, 3, 168, 84, 0, 889, 890, 3, 64, 32, 21, 890, 963, 1, 0, 0, 0, 891, 892, 3, 180, 90, 0, 892, 905, 5, 3, 0, 0, 893, 895, 5, 62, 0, 0, 894, 893, 1, 0, 0, 0, 894, 895, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 901, 3, 64, 32, 0, 897, 898, 5, 5, 0, 0, 898, 900, 3, 64, 32, 0, 899, 897, 1, 0, 0, 0, 900, 903, 1, 0, 0, 0, 901, 899, 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 906, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 904, 906, 5, 7, 0, 0, 905, 894, 1, 0, 0, 0, 905, 904, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 909, 5, 4, 0, 0, 908, 910, 3, 118, 59, 0, 909, 908, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 912, 1, 0, 0, 0, 911, 913, 3, 122, 61, 0, 912, 911, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 963, 1, 0, 0, 0, 914, 915, 5, 3, 0, 0, 915, 920, 3, 64, 32, 0, 916, 917, 5, 5, 0, 0, 917, 919, 3, 64, 32, 0, 918, 916, 1, 0, 0, 0, 919, 922, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 923, 1, 0, 0, 0, 922, 920, 1, 0, 0, 0, 923, 924, 5, 4, 0, 0, 924, 963, 1, 0, 0, 0, 925, 926, 5, 43, 0, 0, 926, 927, 5, 3, 0, 0, 927, 928, 3, 64, 32, 0, 928, 929, 5, 33, 0, 0, 929, 930, 3, 30, 15, 0, 930, 931, 5, 4, 0, 0, 931, 963, 1, 0, 0, 0, 932, 934, 5, 102, 0, 0, 933, 932, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, 935, 937, 5, 70, 0, 0, 936, 933, 1, 0, 0, 0, 936, 937, 1, 0, 0, 0, 937, 938, 1, 0, 0, 0, 938, 939, 5, 3, 0, 0, 939, 940, 3, 82, 41, 0, 940, 941, 5, 4, 0, 0, 941, 963, 1, 0, 0, 0, 942, 944, 5, 42, 0, 0, 943, 945, 3, 64, 32, 0, 944, 943, 1, 0, 0, 0, 944, 945, 1, 0, 0, 0, 945, 951, 1, 0, 0, 0, 946, 947, 5, 147, 0, 0, 947, 948, 3, 64, 32, 0, 948, 949, 5, 135, 0, 0, 949, 950, 3, 64, 32, 0, 950, 952, 1, 0, 0, 0, 951, 946, 1, 0, 0, 0, 952, 953, 1, 0, 0, 0, 953, 951, 1, 0, 0, 0, 953, 954, 1, 0, 0, 0, 954, 957, 1, 0, 0, 0, 955, 956, 5, 65, 0, 0, 956, 958, 3, 64, 32, 0, 957, 955, 1, 0, 0, 0, 957, 958, 1, 0, 0, 0, 958, 959, 1, 0, 0, 0, 959, 960, 5, 66, 0, 0, 960, 963, 1, 0, 0, 0, 961, 963, 3, 66, 33, 0, 962, 874, 1, 0, 0, 0, 962, 876, 1, 0, 0, 0, 962, 885, 1, 0, 0, 0, 962, 888, 1, 0, 0, 0, 962, 891, 1, 0, 0, 0, 962, 914, 1, 0, 0, 0, 962, 925, 1, 0, 0, 0, 962, 936, 1, 0, 0, 0, 962, 942, 1, 0, 0, 0, 962, 961, 1, 0, 0, 0, 963, 1083, 1, 0, 0, 0, 964, 965, 10, 20, 0, 0, 965, 966, 5, 11, 0, 0, 966, 1082, 3, 64, 32, 21, 967, 968, 10, 19, 0, 0, 968, 969, 7, 10, 0, 0, 969, 1082, 3, 64, 32, 20, 970, 971, 10, 18, 0, 0, 971, 972, 7, 4, 0, 0, 972, 1082, 3, 64, 32, 19, 973, 974, 10, 17, 0, 0, 974, 975, 7, 11, 0, 0, 975, 1082, 3, 64, 32, 18, 976, 977, 10, 16, 0, 0, 977, 978, 7, 12, 0, 0, 978, 1082, 3, 64, 32, 17, 979, 992, 10, 15, 0, 0, 980, 993, 5, 6, 0, 0, 981, 993, 5, 22, 0, 0, 982, 993, 5, 23, 0, 0, 983, 993, 5, 24, 0, 0, 984, 993, 5, 92, 0, 0, 985, 986, 5, 92, 0, 0, 986, 993, 5, 102, 0, 0, 987, 993, 5, 83, 0, 0, 988, 993, 5, 97, 0, 0, 989, 993, 5, 77, 0, 0, 990, 993, 5, 99, 0, 0, 991, 993, 5, 118, 0, 0, 992, 980, 1, 0, 0, 0, 992, 981, 1, 0, 0, 0, 992, 982, 1, 0, 0, 0, 992, 983, 1, 0, 0, 0, 992, 984, 1, 0, 0, 0, 992, 985, 1, 0, 0, 0, 992, 987, 1, 0, 0, 0, 992, 988, 1, 0, 0, 0, 992, 989, 1, 0, 0, 0, 992, 990, 1, 0, 0, 0, 992, 991, 1, 0, 0, 0, 993, 994, 1, 0, 0, 0, 994, 1082, 3, 64, 32, 16, 995, 996, 10, 14, 0, 0, 996, 997, 5, 32, 0, 0, 997, 1082, 3, 64, 32, 15, 998, 999, 10, 13, 0, 0, 999, 1000, 5, 108, 0, 0, 1000, 1082, 3, 64, 32, 14, 1001, 1002, 10, 6, 0, 0, 1002, 1004, 5, 92, 0, 0, 1003, 1005, 5, 102, 0, 0, 1004, 1003, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1082, 3, 64, 32, 7, 1007, 1009, 10, 5, 0, 0, 1008, 1010, 5, 102, 0, 0, 1009, 1008, 1, 0, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 5, 39, 0, 0, 1012, 1013, 3, 64, 32, 0, 1013, 1014, 5, 32, 0, 0, 1014, 1015, 3, 64, 32, 6, 1015, 1082, 1, 0, 0, 0, 1016, 1017, 10, 9, 0, 0, 1017, 1018, 5, 45, 0, 0, 1018, 1082, 3, 190, 95, 0, 1019, 1021, 10, 8, 0, 0, 1020, 1022, 5, 102, 0, 0, 1021, 1020, 1, 0, 0, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 7, 13, 0, 0, 1024, 1027, 3, 64, 32, 0, 1025, 1026, 5, 67, 0, 0, 1026, 1028, 3, 64, 32, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1082, 1, 0, 0, 0, 1029, 1034, 10, 7, 0, 0, 1030, 1035, 5, 93, 0, 0, 1031, 1035, 5, 103, 0, 0, 1032, 1033, 5, 102, 0, 0, 1033, 1035, 5, 104, 0, 0, 1034, 1030, 1, 0, 0, 0, 1034, 1031, 1, 0, 0, 0, 1034, 1032, 1, 0, 0, 0, 1035, 1082, 1, 0, 0, 0, 1036, 1038, 10, 4, 0, 0, 1037, 1039, 5, 102, 0, 0, 1038, 1037, 1, 0, 0, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1079, 5, 83, 0, 0, 1041, 1051, 5, 3, 0, 0, 1042, 1052, 3, 82, 41, 0, 1043, 1048, 3, 64, 32, 0, 1044, 1045, 5, 5, 0, 0, 1045, 1047, 3, 64, 32, 0, 1046, 1044, 1, 0, 0, 0, 1047, 1050, 1, 0, 0, 0, 1048, 1046, 1, 0, 0, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1052, 1, 0, 0, 0, 1050, 1048, 1, 0, 0, 0, 1051, 1042, 1, 0, 0, 0, 1051, 1043, 1, 0, 0, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1080, 5, 4, 0, 0, 1054, 1055, 3, 182, 91, 0, 1055, 1056, 5, 2, 0, 0, 1056, 1058, 1, 0, 0, 0, 1057, 1054, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1080, 3, 184, 92, 0, 1060, 1061, 3, 182, 91, 0, 1061, 1062, 5, 2, 0, 0, 1062, 1064, 1, 0, 0, 0, 1063, 1060, 1, 0, 0, 0, 1063, 1064, 1, 0, 0, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 3, 222, 111, 0, 1066, 1075, 5, 3, 0, 0, 1067, 1072, 3, 64, 32, 0, 1068, 1069, 5, 5, 0, 0, 1069, 1071, 3, 64, 32, 0, 1070, 1068, 1, 0, 0, 0, 1071, 1074, 1, 0, 0, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1076, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1075, 1067, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 5, 4, 0, 0, 1078, 1080, 1, 0, 0, 0, 1079, 1041, 1, 0, 0, 0, 1079, 1057, 1, 0, 0, 0, 1079, 1063, 1, 0, 0, 0, 1080, 1082, 1, 0, 0, 0, 1081, 964, 1, 0, 0, 0, 1081, 967, 1, 0, 0, 0, 1081, 970, 1, 0, 0, 0, 1081, 973, 1, 0, 0, 0, 1081, 976, 1, 0, 0, 0, 1081, 979, 1, 0, 0, 0, 1081, 995, 1, 0, 0, 0, 1081, 998, 1, 0, 0, 0, 1081, 1001, 1, 0, 0, 0, 1081, 1007, 1, 0, 0, 0, 1081, 1016, 1, 0, 0, 0, 1081, 1019, 1, 0, 0, 0, 1081, 1029, 1, 0, 0, 0, 1081, 1036, 1, 0, 0, 0, 1082, 1085, 1, 0, 0, 0, 1083, 1081, 1, 0, 0, 0, 1083, 1084, 1, 0, 0, 0, 1084, 65, 1, 0, 0, 0, 1085, 1083, 1, 0, 0, 0, 1086, 1087, 5, 115, 0, 0, 1087, 1092, 5, 3, 0, 0, 1088, 1093, 5, 81, 0, 0, 1089, 1090, 7, 14, 0, 0, 1090, 1091, 5, 5, 0, 0, 1091, 1093, 3, 170, 85, 0, 1092, 1088, 1, 0, 0, 0, 1092, 1089, 1, 0, 0, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 5, 4, 0, 0, 1095, 67, 1, 0, 0, 0, 1096, 1097, 7, 15, 0, 0, 1097, 69, 1, 0, 0, 0, 1098, 1100, 3, 48, 24, 0, 1099, 1098, 1, 0, 0, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1106, 1, 0, 0, 0, 1101, 1107, 5, 88, 0, 0, 1102, 1107, 5, 122, 0, 0, 1103, 1104, 5, 88, 0, 0, 1104, 1105, 5, 108, 0, 0, 1105, 1107, 7, 8, 0, 0, 1106, 1101, 1, 0, 0, 0, 1106, 1102, 1, 0, 0, 0, 1106, 1103, 1, 0, 0, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1112, 5, 91, 0, 0, 1109, 1110, 3, 182, 91, 0, 1110, 1111, 5, 2, 0, 0, 1111, 1113, 1, 0, 0, 0, 1112, 1109, 1, 0, 0, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1117, 3, 184, 92, 0, 1115, 1116, 5, 33, 0, 0, 1116, 1118, 3, 206, 103, 0, 1117, 1115, 1, 0, 0, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1130, 1, 0, 0, 0, 1119, 1120, 5, 3, 0, 0, 1120, 1125, 3, 188, 94, 0, 1121, 1122, 5, 5, 0, 0, 1122, 1124, 3, 188, 94, 0, 1123, 1121, 1, 0, 0, 0, 1124, 1127, 1, 0, 0, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1128, 1, 0, 0, 0, 1127, 1125, 1, 0, 0, 0, 1128, 1129, 5, 4, 0, 0, 1129, 1131, 1, 0, 0, 0, 1130, 1119, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1168, 1, 0, 0, 0, 1132, 1133, 5, 144, 0, 0, 1133, 1134, 5, 3, 0, 0, 1134, 1139, 3, 64, 32, 0, 1135, 1136, 5, 5, 0, 0, 1136, 1138, 3, 64, 32, 0, 1137, 1135, 1, 0, 0, 0, 1138, 1141, 1, 0, 0, 0, 1139, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1142, 1, 0, 0, 0, 1141, 1139, 1, 0, 0, 0, 1142, 1157, 5, 4, 0, 0, 1143, 1144, 5, 5, 0, 0, 1144, 1145, 5, 3, 0, 0, 1145, 1150, 3, 64, 32, 0, 1146, 1147, 5, 5, 0, 0, 1147, 1149, 3, 64, 32, 0, 1148, 1146, 1, 0, 0, 0, 1149, 1152, 1, 0, 0, 0, 1150, 1148, 1, 0, 0, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1153, 1, 0, 0, 0, 1152, 1150, 1, 0, 0, 0, 1153, 1154, 5, 4, 0, 0, 1154, 1156, 1, 0, 0, 0, 1155, 1143, 1, 0, 0, 0, 1156, 1159, 1, 0, 0, 0, 1157, 1155, 1, 0, 0, 0, 1157, 1158, 1, 0, 0, 0, 1158, 1162, 1, 0, 0, 0, 1159, 1157, 1, 0, 0, 0, 1160, 1162, 3, 82, 41, 0, 1161, 1132, 1, 0, 0, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1164, 1, 0, 0, 0, 1163, 1165, 3, 74, 37, 0, 1164, 1163, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1169, 1, 0, 0, 0, 1166, 1167, 5, 56, 0, 0, 1167, 1169, 5, 144, 0, 0, 1168, 1161, 1, 0, 0, 0, 1168, 1166, 1, 0, 0, 0, 1169, 1171, 1, 0, 0, 0, 1170, 1172, 3, 72, 36, 0, 1171, 1170, 1, 0, 0, 0, 1171, 1172, 1, 0, 0, 0, 1172, 71, 1, 0, 0, 0, 1173, 1174, 5, 124, 0, 0, 1174, 1179, 3, 96, 48, 0, 1175, 1176, 5, 5, 0, 0, 1176, 1178, 3, 96, 48, 0, 1177, 1175, 1, 0, 0, 0, 1178, 1181, 1, 0, 0, 0, 1179, 1177, 1, 0, 0, 0, 1179, 1180, 1, 0, 0, 0, 1180, 73, 1, 0, 0, 0, 1181, 1179, 1, 0, 0, 0, 1182, 1183, 5, 107, 0, 0, 1183, 1198, 5, 48, 0, 0, 1184, 1185, 5, 3, 0, 0, 1185, 1190, 3, 24, 12, 0, 1186, 1187, 5, 5, 0, 0, 1187, 1189, 3, 24, 12, 0, 1188, 1186, 1, 0, 0, 0, 1189, 1192, 1, 0, 0, 0, 1190, 1188, 1, 0, 0, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1193, 1, 0, 0, 0, 1192, 1190, 1, 0, 0, 0, 1193, 1196, 5, 4, 0, 0, 1194, 1195, 5, 148, 0, 0, 1195, 1197, 3, 64, 32, 0, 1196, 1194, 1, 0, 0, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1199, 1, 0, 0, 0, 1198, 1184, 1, 0, 0, 0, 1198, 1199, 1, 0, 0, 0, 1199, 1200, 1, 0, 0, 0, 1200, 1227, 5, 183, 0, 0, 1201, 1228, 5, 184, 0, 0, 1202, 1203, 5, 141, 0, 0, 1203, 1206, 5, 131, 0, 0, 1204, 1207, 3, 188, 94, 0, 1205, 1207, 3, 110, 55, 0, 1206, 1204, 1, 0, 0, 0, 1206, 1205, 1, 0, 0, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 5, 6, 0, 0, 1209, 1220, 3, 64, 32, 0, 1210, 1213, 5, 5, 0, 0, 1211, 1214, 3, 188, 94, 0, 1212, 1214, 3, 110, 55, 0, 1213, 1211, 1, 0, 0, 0, 1213, 1212, 1, 0, 0, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 5, 6, 0, 0, 1216, 1217, 3, 64, 32, 0, 1217, 1219, 1, 0, 0, 0, 1218, 1210, 1, 0, 0, 0, 1219, 1222, 1, 0, 0, 0, 1220, 1218, 1, 0, 0, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1225, 1, 0, 0, 0, 1222, 1220, 1, 0, 0, 0, 1223, 1224, 5, 148, 0, 0, 1224, 1226, 3, 64, 32, 0, 1225, 1223, 1, 0, 0, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1228, 1, 0, 0, 0, 1227, 1201, 1, 0, 0, 0, 1227, 1202, 1, 0, 0, 0, 1228, 75, 1, 0, 0, 0, 1229, 1233, 5, 112, 0, 0, 1230, 1231, 3, 182, 91, 0, 1231, 1232, 5, 2, 0, 0, 1232, 1234, 1, 0, 0, 0, 1233, 1230, 1, 0, 0, 0, 1233, 1234, 1, 0, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1242, 3, 202, 101, 0, 1236, 1237, 5, 6, 0, 0, 1237, 1243, 3, 78, 39, 0, 1238, 1239, 5, 3, 0, 0, 1239, 1240, 3, 78, 39, 0, 1240, 1241, 5, 4, 0, 0, 1241, 1243, 1, 0, 0, 0, 1242, 1236, 1, 0, 0, 0, 1242, 1238, 1, 0, 0, 0, 1242, 1243, 1, 0, 0, 0, 1243, 77, 1, 0, 0, 0, 1244, 1248, 3, 34, 17, 0, 1245, 1248, 3, 178, 89, 0, 1246, 1248, 5, 188, 0, 0, 1247, 1244, 1, 0, 0, 0, 1247, 1245, 1, 0, 0, 0, 1247, 1246, 1, 0, 0, 0, 1248, 79, 1, 0, 0, 0, 1249, 1260, 5, 119, 0, 0, 1250, 1261, 3, 190, 95, 0, 1251, 1252, 3, 182, 91, 0, 1252, 1253, 5, 2, 0, 0, 1253, 1255, 1, 0, 0, 0, 1254, 1251, 1, 0, 0, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1258, 1, 0, 0, 0, 1256, 1259, 3, 184, 92, 0, 1257, 1259, 3, 194, 97, 0, 1258, 1256, 1, 0, 0, 0, 1258, 1257, 1, 0, 0, 0, 1259, 1261, 1, 0, 0, 0, 1260, 1250, 1, 0, 0, 0, 1260, 1254, 1, 0, 0, 0, 1260, 1261, 1, 0, 0, 0, 1261, 81, 1, 0, 0, 0, 1262, 1264, 3, 134, 67, 0, 1263, 1262, 1, 0, 0, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 1, 0, 0, 0, 1265, 1271, 3, 86, 43, 0, 1266, 1267, 3, 102, 51, 0, 1267, 1268, 3, 86, 43, 0, 1268, 1270, 1, 0, 0, 0, 1269, 1266, 1, 0, 0, 0, 1270, 1273, 1, 0, 0, 0, 1271, 1269, 1, 0, 0, 0, 1271, 1272, 1, 0, 0, 0, 1272, 1275, 1, 0, 0, 0, 1273, 1271, 1, 0, 0, 0, 1274, 1276, 3, 136, 68, 0, 1275, 1274, 1, 0, 0, 0, 1275, 1276, 1, 0, 0, 0, 1276, 1278, 1, 0, 0, 0, 1277, 1279, 3, 138, 69, 0, 1278, 1277, 1, 0, 0, 0, 1278, 1279, 1, 0, 0, 0, 1279, 83, 1, 0, 0, 0, 1280, 1288, 3, 94, 47, 0, 1281, 1282, 3, 98, 49, 0, 1282, 1284, 3, 94, 47, 0, 1283, 1285, 3, 100, 50, 0, 1284, 1283, 1, 0, 0, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1287, 1, 0, 0, 0, 1286, 1281, 1, 0, 0, 0, 1287, 1290, 1, 0, 0, 0, 1288, 1286, 1, 0, 0, 0, 1288, 1289, 1, 0, 0, 0, 1289, 85, 1, 0, 0, 0, 1290, 1288, 1, 0, 0, 0, 1291, 1293, 5, 130, 0, 0, 1292, 1294, 7, 16, 0, 0, 1293, 1292, 1, 0, 0, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1300, 3, 96, 48, 0, 1296, 1297, 5, 5, 0, 0, 1297, 1299, 3, 96, 48, 0, 1298, 1296, 1, 0, 0, 0, 1299, 1302, 1, 0, 0, 0, 1300, 1298, 1, 0, 0, 0, 1300, 1301, 1, 0, 0, 0, 1301, 1315, 1, 0, 0, 0, 1302, 1300, 1, 0, 0, 0, 1303, 1313, 5, 75, 0, 0, 1304, 1309, 3, 94, 47, 0, 1305, 1306, 5, 5, 0, 0, 1306, 1308, 3, 94, 47, 0, 1307, 1305, 1, 0, 0, 0, 1308, 1311, 1, 0, 0, 0, 1309, 1307, 1, 0, 0, 0, 1309, 1310, 1, 0, 0, 0, 1310, 1314, 1, 0, 0, 0, 1311, 1309, 1, 0, 0, 0, 1312, 1314, 3, 84, 42, 0, 1313, 1304, 1, 0, 0, 0, 1313, 1312, 1, 0, 0, 0, 1314, 1316, 1, 0, 0, 0, 1315, 1303, 1, 0, 0, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1319, 1, 0, 0, 0, 1317, 1318, 5, 148, 0, 0, 1318, 1320, 3, 64, 32, 0, 1319, 1317, 1, 0, 0, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1335, 1, 0, 0, 0, 1321, 1322, 5, 78, 0, 0, 1322, 1323, 5, 40, 0, 0, 1323, 1328, 3, 64, 32, 0, 1324, 1325, 5, 5, 0, 0, 1325, 1327, 3, 64, 32, 0, 1326, 1324, 1, 0, 0, 0, 1327, 1330, 1, 0, 0, 0, 1328, 1326, 1, 0, 0, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1333, 1, 0, 0, 0, 1330, 1328, 1, 0, 0, 0, 1331, 1332, 5, 79, 0, 0, 1332, 1334, 3, 64, 32, 0, 1333, 1331, 1, 0, 0, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1336, 1, 0, 0, 0, 1335, 1321, 1, 0, 0, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1351, 1, 0, 0, 0, 1337, 1338, 5, 174, 0, 0, 1338, 1339, 3, 210, 105, 0, 1339, 1340, 5, 33, 0, 0, 1340, 1348, 3, 120, 60, 0, 1341, 1342, 5, 5, 0, 0, 1342, 1343, 3, 210, 105, 0, 1343, 1344, 5, 33, 0, 0, 1344, 1345, 3, 120, 60, 0, 1345, 1347, 1, 0, 0, 0, 1346, 1341, 1, 0, 0, 0, 1347, 1350, 1, 0, 0, 0, 1348, 1346, 1, 0, 0, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1352, 1, 0, 0, 0, 1350, 1348, 1, 0, 0, 0, 1351, 1337, 1, 0, 0, 0, 1351, 1352, 1, 0, 0, 0, 1352, 1382, 1, 0, 0, 0, 1353, 1354, 5, 144, 0, 0, 1354, 1355, 5, 3, 0, 0, 1355, 1360, 3, 64, 32, 0, 1356, 1357, 5, 5, 0, 0, 1357, 1359, 3, 64, 32, 0, 1358, 1356, 1, 0, 0, 0, 1359, 1362, 1, 0, 0, 0, 1360, 1358, 1, 0, 0, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1363, 1, 0, 0, 0, 1362, 1360, 1, 0, 0, 0, 1363, 1378, 5, 4, 0, 0, 1364, 1365, 5, 5, 0, 0, 1365, 1366, 5, 3, 0, 0, 1366, 1371, 3, 64, 32, 0, 1367, 1368, 5, 5, 0, 0, 1368, 1370, 3, 64, 32, 0, 1369, 1367, 1, 0, 0, 0, 1370, 1373, 1, 0, 0, 0, 1371, 1369, 1, 0, 0, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1374, 1, 0, 0, 0, 1373, 1371, 1, 0, 0, 0, 1374, 1375, 5, 4, 0, 0, 1375, 1377, 1, 0, 0, 0, 1376, 1364, 1, 0, 0, 0, 1377, 1380, 1, 0, 0, 0, 1378, 1376, 1, 0, 0, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1382, 1, 0, 0, 0, 1380, 1378, 1, 0, 0, 0, 1381, 1291, 1, 0, 0, 0, 1381, 1353, 1, 0, 0, 0, 1382, 87, 1, 0, 0, 0, 1383, 1384, 3, 82, 41, 0, 1384, 89, 1, 0, 0, 0, 1385, 1387, 3, 134, 67, 0, 1386, 1385, 1, 0, 0, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1390, 3, 86, 43, 0, 1389, 1391, 3, 136, 68, 0, 1390, 1389, 1, 0, 0, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1393, 1, 0, 0, 0, 1392, 1394, 3, 138, 69, 0, 1393, 1392, 1, 0, 0, 0, 1393, 1394, 1, 0, 0, 0, 1394, 91, 1, 0, 0, 0, 1395, 1397, 3, 134, 67, 0, 1396, 1395, 1, 0, 0, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1408, 3, 86, 43, 0, 1399, 1401, 5, 139, 0, 0, 1400, 1402, 5, 29, 0, 0, 1401, 1400, 1, 0, 0, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1406, 1, 0, 0, 0, 1403, 1406, 5, 90, 0, 0, 1404, 1406, 5, 68, 0, 0, 1405, 1399, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1404, 1, 0, 0, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1409, 3, 86, 43, 0, 1408, 1405, 1, 0, 0, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1408, 1, 0, 0, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1413, 1, 0, 0, 0, 1412, 1414, 3, 136, 68, 0, 1413, 1412, 1, 0, 0, 0, 1413, 1414, 1, 0, 0, 0, 1414, 1416, 1, 0, 0, 0, 1415, 1417, 3, 138, 69, 0, 1416, 1415, 1, 0, 0, 0, 1416, 1417, 1, 0, 0, 0, 1417, 93, 1, 0, 0, 0, 1418, 1419, 3, 182, 91, 0, 1419, 1420, 5, 2, 0, 0, 1420, 1422, 1, 0, 0, 0, 1421, 1418, 1, 0, 0, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1428, 3, 184, 92, 0, 1424, 1426, 5, 33, 0, 0, 1425, 1424, 1, 0, 0, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1429, 3, 206, 103, 0, 1428, 1425, 1, 0, 0, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1435, 1, 0, 0, 0, 1430, 1431, 5, 85, 0, 0, 1431, 1432, 5, 40, 0, 0, 1432, 1436, 3, 194, 97, 0, 1433, 1434, 5, 102, 0, 0, 1434, 1436, 5, 85, 0, 0, 1435, 1430, 1, 0, 0, 0, 1435, 1433, 1, 0, 0, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1483, 1, 0, 0, 0, 1437, 1438, 3, 182, 91, 0, 1438, 1439, 5, 2, 0, 0, 1439, 1441, 1, 0, 0, 0, 1440, 1437, 1, 0, 0, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 1, 0, 0, 0, 1442, 1443, 3, 222, 111, 0, 1443, 1444, 5, 3, 0, 0, 1444, 1449, 3, 64, 32, 0, 1445, 1446, 5, 5, 0, 0, 1446, 1448, 3, 64, 32, 0, 1447, 1445, 1, 0, 0, 0, 1448, 1451, 1, 0, 0, 0, 1449, 1447, 1, 0, 0, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1452, 1, 0, 0, 0, 1451, 1449, 1, 0, 0, 0, 1452, 1457, 5, 4, 0, 0, 1453, 1455, 5, 33, 0, 0, 1454, 1453, 1, 0, 0, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1458, 3, 206, 103, 0, 1457, 1454, 1, 0, 0, 0, 1457, 1458, 1, 0, 0, 0, 1458, 1483, 1, 0, 0, 0, 1459, 1469, 5, 3, 0, 0, 1460, 1465, 3, 94, 47, 0, 1461, 1462, 5, 5, 0, 0, 1462, 1464, 3, 94, 47, 0, 1463, 1461, 1, 0, 0, 0, 1464, 1467, 1, 0, 0, 0, 1465, 1463, 1, 0, 0, 0, 1465, 1466, 1, 0, 0, 0, 1466, 1470, 1, 0, 0, 0, 1467, 1465, 1, 0, 0, 0, 1468, 1470, 3, 84, 42, 0, 1469, 1460, 1, 0, 0, 0, 1469, 1468, 1, 0, 0, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 5, 4, 0, 0, 1472, 1483, 1, 0, 0, 0, 1473, 1474, 5, 3, 0, 0, 1474, 1475, 3, 82, 41, 0, 1475, 1480, 5, 4, 0, 0, 1476, 1478, 5, 33, 0, 0, 1477, 1476, 1, 0, 0, 0, 1477, 1478, 1, 0, 0, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1481, 3, 206, 103, 0, 1480, 1477, 1, 0, 0, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1483, 1, 0, 0, 0, 1482, 1421, 1, 0, 0, 0, 1482, 1440, 1, 0, 0, 0, 1482, 1459, 1, 0, 0, 0, 1482, 1473, 1, 0, 0, 0, 1483, 95, 1, 0, 0, 0, 1484, 1497, 5, 7, 0, 0, 1485, 1486, 3, 184, 92, 0, 1486, 1487, 5, 2, 0, 0, 1487, 1488, 5, 7, 0, 0, 1488, 1497, 1, 0, 0, 0, 1489, 1494, 3, 64, 32, 0, 1490, 1492, 5, 33, 0, 0, 1491, 1490, 1, 0, 0, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1495, 3, 174, 87, 0, 1494, 1491, 1, 0, 0, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1497, 1, 0, 0, 0, 1496, 1484, 1, 0, 0, 0, 1496, 1485, 1, 0, 0, 0, 1496, 1489, 1, 0, 0, 0, 1497, 97, 1, 0, 0, 0, 1498, 1512, 5, 5, 0, 0, 1499, 1501, 5, 100, 0, 0, 1500, 1499, 1, 0, 0, 0, 1500, 1501, 1, 0, 0, 0, 1501, 1508, 1, 0, 0, 0, 1502, 1504, 5, 96, 0, 0, 1503, 1505, 5, 110, 0, 0, 1504, 1503, 1, 0, 0, 0, 1504, 1505, 1, 0, 0, 0, 1505, 1509, 1, 0, 0, 0, 1506, 1509, 5, 87, 0, 0, 1507, 1509, 5, 51, 0, 0, 1508, 1502, 1, 0, 0, 0, 1508, 1506, 1, 0, 0, 0, 1508, 1507, 1, 0, 0, 0, 1508, 1509, 1, 0, 0, 0, 1509, 1510, 1, 0, 0, 0, 1510, 1512, 5, 94, 0, 0, 1511, 1498, 1, 0, 0, 0, 1511, 1500, 1, 0, 0, 0, 1512, 99, 1, 0, 0, 0, 1513, 1514, 5, 107, 0, 0, 1514, 1528, 3, 64, 32, 0, 1515, 1516, 5, 142, 0, 0, 1516, 1517, 5, 3, 0, 0, 1517, 1522, 3, 188, 94, 0, 1518, 1519, 5, 5, 0, 0, 1519, 1521, 3, 188, 94, 0, 1520, 1518, 1, 0, 0, 0, 1521, 1524, 1, 0, 0, 0, 1522, 1520, 1, 0, 0, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1525, 1, 0, 0, 0, 1524, 1522, 1, 0, 0, 0, 1525, 1526, 5, 4, 0, 0, 1526, 1528, 1, 0, 0, 0, 1527, 1513, 1, 0, 0, 0, 1527, 1515, 1, 0, 0, 0, 1528, 101, 1, 0, 0, 0, 1529, 1531, 5, 139, 0, 0, 1530, 1532, 5, 29, 0, 0, 1531, 1530, 1, 0, 0, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1536, 1, 0, 0, 0, 1533, 1536, 5, 90, 0, 0, 1534, 1536, 5, 68, 0, 0, 1535, 1529, 1, 0, 0, 0, 1535, 1533, 1, 0, 0, 0, 1535, 1534, 1, 0, 0, 0, 1536, 103, 1, 0, 0, 0, 1537, 1539, 3, 48, 24, 0, 1538, 1537, 1, 0, 0, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 1, 0, 0, 0, 1540, 1543, 5, 141, 0, 0, 1541, 1542, 5, 108, 0, 0, 1542, 1544, 7, 8, 0, 0, 1543, 1541, 1, 0, 0, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 3, 114, 57, 0, 1546, 1547, 5, 131, 0, 0, 1547, 1560, 3, 106, 53, 0, 1548, 1558, 5, 75, 0, 0, 1549, 1554, 3, 94, 47, 0, 1550, 1551, 5, 5, 0, 0, 1551, 1553, 3, 94, 47, 0, 1552, 1550, 1, 0, 0, 0, 1553, 1556, 1, 0, 0, 0, 1554, 1552, 1, 0, 0, 0, 1554, 1555, 1, 0, 0, 0, 1555, 1559, 1, 0, 0, 0, 1556, 1554, 1, 0, 0, 0, 1557, 1559, 3, 84, 42, 0, 1558, 1549, 1, 0, 0, 0, 1558, 1557, 1, 0, 0, 0, 1559, 1561, 1, 0, 0, 0, 1560, 1548, 1, 0, 0, 0, 1560, 1561, 1, 0, 0, 0, 1561, 1564, 1, 0, 0, 0, 1562, 1563, 5, 148, 0, 0, 1563, 1565, 3, 64, 32, 0, 1564, 1562, 1, 0, 0, 0, 1564, 1565, 1, 0, 0, 0, 1565, 1567, 1, 0, 0, 0, 1566, 1568, 3, 72, 36, 0, 1567, 1566, 1, 0, 0, 0, 1567, 1568, 1, 0, 0, 0, 1568, 105, 1, 0, 0, 0, 1569, 1574, 3, 108, 54, 0, 1570, 1571, 5, 5, 0, 0, 1571, 1573, 3, 108, 54, 0, 1572, 1570, 1, 0, 0, 0, 1573, 1576, 1, 0, 0, 0, 1574, 1572, 1, 0, 0, 0, 1574, 1575, 1, 0, 0, 0, 1575, 107, 1, 0, 0, 0, 1576, 1574, 1, 0, 0, 0, 1577, 1580, 3, 188, 94, 0, 1578, 1580, 3, 110, 55, 0, 1579, 1577, 1, 0, 0, 0, 1579, 1578, 1, 0, 0, 0, 1580, 1581, 1, 0, 0, 0, 1581, 1582, 5, 6, 0, 0, 1582, 1583, 3, 64, 32, 0, 1583, 109, 1, 0, 0, 0, 1584, 1585, 5, 3, 0, 0, 1585, 1590, 3, 188, 94, 0, 1586, 1587, 5, 5, 0, 0, 1587, 1589, 3, 188, 94, 0, 1588, 1586, 1, 0, 0, 0, 1589, 1592, 1, 0, 0, 0, 1590, 1588, 1, 0, 0, 0, 1590, 1591, 1, 0, 0, 0, 1591, 1593, 1, 0, 0, 0, 1592, 1590, 1, 0, 0, 0, 1593, 1594, 5, 4, 0, 0, 1594, 111, 1, 0, 0, 0, 1595, 1597, 3, 48, 24, 0, 1596, 1595, 1, 0, 0, 0, 1596, 1597, 1, 0, 0, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1601, 5, 141, 0, 0, 1599, 1600, 5, 108, 0, 0, 1600, 1602, 7, 8, 0, 0, 1601, 1599, 1, 0, 0, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 1, 0, 0, 0, 1603, 1604, 3, 114, 57, 0, 1604, 1607, 5, 131, 0, 0, 1605, 1608, 3, 188, 94, 0, 1606, 1608, 3, 110, 55, 0, 1607, 1605, 1, 0, 0, 0, 1607, 1606, 1, 0, 0, 0, 1608, 1609, 1, 0, 0, 0, 1609, 1610, 5, 6, 0, 0, 1610, 1621, 3, 64, 32, 0, 1611, 1614, 5, 5, 0, 0, 1612, 1615, 3, 188, 94, 0, 1613, 1615, 3, 110, 55, 0, 1614, 1612, 1, 0, 0, 0, 1614, 1613, 1, 0, 0, 0, 1615, 1616, 1, 0, 0, 0, 1616, 1617, 5, 6, 0, 0, 1617, 1618, 3, 64, 32, 0, 1618, 1620, 1, 0, 0, 0, 1619, 1611, 1, 0, 0, 0, 1620, 1623, 1, 0, 0, 0, 1621, 1619, 1, 0, 0, 0, 1621, 1622, 1, 0, 0, 0, 1622, 1626, 1, 0, 0, 0, 1623, 1621, 1, 0, 0, 0, 1624, 1625, 5, 148, 0, 0, 1625, 1627, 3, 64, 32, 0, 1626, 1624, 1, 0, 0, 0, 1626, 1627, 1, 0, 0, 0, 1627, 1629, 1, 0, 0, 0, 1628, 1630, 3, 72, 36, 0, 1629, 1628, 1, 0, 0, 0, 1629, 1630, 1, 0, 0, 0, 1630, 1635, 1, 0, 0, 0, 1631, 1633, 3, 136, 68, 0, 1632, 1631, 1, 0, 0, 0, 1632, 1633, 1, 0, 0, 0, 1633, 1634, 1, 0, 0, 0, 1634, 1636, 3, 138, 69, 0, 1635, 1632, 1, 0, 0, 0, 1635, 1636, 1, 0, 0, 0, 1636, 113, 1, 0, 0, 0, 1637, 1638, 3, 182, 91, 0, 1638, 1639, 5, 2, 0, 0, 1639, 1641, 1, 0, 0, 0, 1640, 1637, 1, 0, 0, 0, 1640, 1641, 1, 0, 0, 0, 1641, 1642, 1, 0, 0, 0, 1642, 1645, 3, 184, 92, 0, 1643, 1644, 5, 33, 0, 0, 1644, 1646, 3, 212, 106, 0, 1645, 1643, 1, 0, 0, 0, 1645, 1646, 1, 0, 0, 0, 1646, 1652, 1, 0, 0, 0, 1647, 1648, 5, 85, 0, 0, 1648, 1649, 5, 40, 0, 0, 1649, 1653, 3, 194, 97, 0, 1650, 1651, 5, 102, 0, 0, 1651, 1653, 5, 85, 0, 0, 1652, 1647, 1, 0, 0, 0, 1652, 1650, 1, 0, 0, 0, 1652, 1653, 1, 0, 0, 0, 1653, 115, 1, 0, 0, 0, 1654, 1656, 5, 143, 0, 0, 1655, 1657, 3, 182, 91, 0, 1656, 1655, 1, 0, 0, 0, 1656, 1657, 1, 0, 0, 0, 1657, 1660, 1, 0, 0, 0, 1658, 1659, 5, 91, 0, 0, 1659, 1661, 3, 214, 107, 0, 1660, 1658, 1, 0, 0, 0, 1660, 1661, 1, 0, 0, 0, 1661, 117, 1, 0, 0, 0, 1662, 1663, 5, 178, 0, 0, 1663, 1664, 5, 3, 0, 0, 1664, 1665, 5, 148, 0, 0, 1665, 1666, 3, 64, 32, 0, 1666, 1667, 5, 4, 0, 0, 1667, 119, 1, 0, 0, 0, 1668, 1670, 5, 3, 0, 0, 1669, 1671, 3, 216, 108, 0, 1670, 1669, 1, 0, 0, 0, 1670, 1671, 1, 0, 0, 0, 1671, 1682, 1, 0, 0, 0, 1672, 1673, 5, 153, 0, 0, 1673, 1674, 5, 40, 0, 0, 1674, 1679, 3, 64, 32, 0, 1675, 1676, 5, 5, 0, 0, 1676, 1678, 3, 64, 32, 0, 1677, 1675, 1, 0, 0, 0, 1678, 1681, 1, 0, 0, 0, 1679, 1677, 1, 0, 0, 0, 1679, 1680, 1, 0, 0, 0, 1680, 1683, 1, 0, 0, 0, 1681, 1679, 1, 0, 0, 0, 1682, 1672, 1, 0, 0, 0, 1682, 1683, 1, 0, 0, 0, 1683, 1684, 1, 0, 0, 0, 1684, 1685, 5, 109, 0, 0, 1685, 1686, 5, 40, 0, 0, 1686, 1691, 3, 140, 70, 0, 1687, 1688, 5, 5, 0, 0, 1688, 1690, 3, 140, 70, 0, 1689, 1687, 1, 0, 0, 0, 1690, 1693, 1, 0, 0, 0, 1691, 1689, 1, 0, 0, 0, 1691, 1692, 1, 0, 0, 0, 1692, 1695, 1, 0, 0, 0, 1693, 1691, 1, 0, 0, 0, 1694, 1696, 3, 124, 62, 0, 1695, 1694, 1, 0, 0, 0, 1695, 1696, 1, 0, 0, 0, 1696, 1697, 1, 0, 0, 0, 1697, 1698, 5, 4, 0, 0, 1698, 121, 1, 0, 0, 0, 1699, 1733, 5, 152, 0, 0, 1700, 1734, 3, 210, 105, 0, 1701, 1703, 5, 3, 0, 0, 1702, 1704, 3, 216, 108, 0, 1703, 1702, 1, 0, 0, 0, 1703, 1704, 1, 0, 0, 0, 1704, 1715, 1, 0, 0, 0, 1705, 1706, 5, 153, 0, 0, 1706, 1707, 5, 40, 0, 0, 1707, 1712, 3, 64, 32, 0, 1708, 1709, 5, 5, 0, 0, 1709, 1711, 3, 64, 32, 0, 1710, 1708, 1, 0, 0, 0, 1711, 1714, 1, 0, 0, 0, 1712, 1710, 1, 0, 0, 0, 1712, 1713, 1, 0, 0, 0, 1713, 1716, 1, 0, 0, 0, 1714, 1712, 1, 0, 0, 0, 1715, 1705, 1, 0, 0, 0, 1715, 1716, 1, 0, 0, 0, 1716, 1727, 1, 0, 0, 0, 1717, 1718, 5, 109, 0, 0, 1718, 1719, 5, 40, 0, 0, 1719, 1724, 3, 140, 70, 0, 1720, 1721, 5, 5, 0, 0, 1721, 1723, 3, 140, 70, 0, 1722, 1720, 1, 0, 0, 0, 1723, 1726, 1, 0, 0, 0, 1724, 1722, 1, 0, 0, 0, 1724, 1725, 1, 0, 0, 0, 1725, 1728, 1, 0, 0, 0, 1726, 1724, 1, 0, 0, 0, 1727, 1717, 1, 0, 0, 0, 1727, 1728, 1, 0, 0, 0, 1728, 1730, 1, 0, 0, 0, 1729, 1731, 3, 124, 62, 0, 1730, 1729, 1, 0, 0, 0, 1730, 1731, 1, 0, 0, 0, 1731, 1732, 1, 0, 0, 0, 1732, 1734, 5, 4, 0, 0, 1733, 1700, 1, 0, 0, 0, 1733, 1701, 1, 0, 0, 0, 1734, 123, 1, 0, 0, 0, 1735, 1743, 3, 126, 63, 0, 1736, 1737, 5, 180, 0, 0, 1737, 1738, 5, 101, 0, 0, 1738, 1744, 5, 182, 0, 0, 1739, 1740, 5, 157, 0, 0, 1740, 1744, 5, 127, 0, 0, 1741, 1744, 5, 78, 0, 0, 1742, 1744, 5, 181, 0, 0, 1743, 1736, 1, 0, 0, 0, 1743, 1739, 1, 0, 0, 0, 1743, 1741, 1, 0, 0, 0, 1743, 1742, 1, 0, 0, 0, 1743, 1744, 1, 0, 0, 0, 1744, 125, 1, 0, 0, 0, 1745, 1752, 7, 17, 0, 0, 1746, 1753, 3, 148, 74, 0, 1747, 1748, 5, 39, 0, 0, 1748, 1749, 3, 144, 72, 0, 1749, 1750, 5, 32, 0, 0, 1750, 1751, 3, 146, 73, 0, 1751, 1753, 1, 0, 0, 0, 1752, 1746, 1, 0, 0, 0, 1752, 1747, 1, 0, 0, 0, 1753, 127, 1, 0, 0, 0, 1754, 1755, 3, 218, 109, 0, 1755, 1765, 5, 3, 0, 0, 1756, 1761, 3, 64, 32, 0, 1757, 1758, 5, 5, 0, 0, 1758, 1760, 3, 64, 32, 0, 1759, 1757, 1, 0, 0, 0, 1760, 1763, 1, 0, 0, 0, 1761, 1759, 1, 0, 0, 0, 1761, 1762, 1, 0, 0, 0, 1762, 1766, 1, 0, 0, 0, 1763, 1761, 1, 0, 0, 0, 1764, 1766, 5, 7, 0, 0, 1765, 1756, 1, 0, 0, 0, 1765, 1764, 1, 0, 0, 0, 1766, 1767, 1, 0, 0, 0, 1767, 1768, 5, 4, 0, 0, 1768, 129, 1, 0, 0, 0, 1769, 1770, 3, 220, 110, 0, 1770, 1783, 5, 3, 0, 0, 1771, 1773, 5, 62, 0, 0, 1772, 1771, 1, 0, 0, 0, 1772, 1773, 1, 0, 0, 0, 1773, 1774, 1, 0, 0, 0, 1774, 1779, 3, 64, 32, 0, 1775, 1776, 5, 5, 0, 0, 1776, 1778, 3, 64, 32, 0, 1777, 1775, 1, 0, 0, 0, 1778, 1781, 1, 0, 0, 0, 1779, 1777, 1, 0, 0, 0, 1779, 1780, 1, 0, 0, 0, 1780, 1784, 1, 0, 0, 0, 1781, 1779, 1, 0, 0, 0, 1782, 1784, 5, 7, 0, 0, 1783, 1772, 1, 0, 0, 0, 1783, 1782, 1, 0, 0, 0, 1783, 1784, 1, 0, 0, 0, 1784, 1785, 1, 0, 0, 0, 1785, 1787, 5, 4, 0, 0, 1786, 1788, 3, 118, 59, 0, 1787, 1786, 1, 0, 0, 0, 1787, 1788, 1, 0, 0, 0, 1788, 131, 1, 0, 0, 0, 1789, 1790, 3, 150, 75, 0, 1790, 1800, 5, 3, 0, 0, 1791, 1796, 3, 64, 32, 0, 1792, 1793, 5, 5, 0, 0, 1793, 1795, 3, 64, 32, 0, 1794, 1792, 1, 0, 0, 0, 1795, 1798, 1, 0, 0, 0, 1796, 1794, 1, 0, 0, 0, 1796, 1797, 1, 0, 0, 0, 1797, 1801, 1, 0, 0, 0, 1798, 1796, 1, 0, 0, 0, 1799, 1801, 5, 7, 0, 0, 1800, 1791, 1, 0, 0, 0, 1800, 1799, 1, 0, 0, 0, 1800, 1801, 1, 0, 0, 0, 1801, 1802, 1, 0, 0, 0, 1802, 1804, 5, 4, 0, 0, 1803, 1805, 3, 118, 59, 0, 1804, 1803, 1, 0, 0, 0, 1804, 1805, 1, 0, 0, 0, 1805, 1806, 1, 0, 0, 0, 1806, 1809, 5, 152, 0, 0, 1807, 1810, 3, 120, 60, 0, 1808, 1810, 3, 210, 105, 0, 1809, 1807, 1, 0, 0, 0, 1809, 1808, 1, 0, 0, 0, 1810, 133, 1, 0, 0, 0, 1811, 1813, 5, 149, 0, 0, 1812, 1814, 5, 116, 0, 0, 1813, 1812, 1, 0, 0, 0, 1813, 1814, 1, 0, 0, 0, 1814, 1815, 1, 0, 0, 0, 1815, 1820, 3, 54, 27, 0, 1816, 1817, 5, 5, 0, 0, 1817, 1819, 3, 54, 27, 0, 1818, 1816, 1, 0, 0, 0, 1819, 1822, 1, 0, 0, 0, 1820, 1818, 1, 0, 0, 0, 1820, 1821, 1, 0, 0, 0, 1821, 135, 1, 0, 0, 0, 1822, 1820, 1, 0, 0, 0, 1823, 1824, 5, 109, 0, 0, 1824, 1825, 5, 40, 0, 0, 1825, 1830, 3, 140, 70, 0, 1826, 1827, 5, 5, 0, 0, 1827, 1829, 3, 140, 70, 0, 1828, 1826, 1, 0, 0, 0, 1829, 1832, 1, 0, 0, 0, 1830, 1828, 1, 0, 0, 0, 1830, 1831, 1, 0, 0, 0, 1831, 137, 1, 0, 0, 0, 1832, 1830, 1, 0, 0, 0, 1833, 1834, 5, 98, 0, 0, 1834, 1837, 3, 64, 32, 0, 1835, 1836, 7, 18, 0, 0, 1836, 1838, 3, 64, 32, 0, 1837, 1835, 1, 0, 0, 0, 1837, 1838, 1, 0, 0, 0, 1838, 139, 1, 0, 0, 0, 1839, 1842, 3, 64, 32, 0, 1840, 1841, 5, 45, 0, 0, 1841, 1843, 3, 190, 95, 0, 1842, 1840, 1, 0, 0, 0, 1842, 1843, 1, 0, 0, 0, 1843, 1845, 1, 0, 0, 0, 1844, 1846, 3, 142, 71, 0, 1845, 1844, 1, 0, 0, 0, 1845, 1846, 1, 0, 0, 0, 1846, 1849, 1, 0, 0, 0, 1847, 1848, 5, 175, 0, 0, 1848, 1850, 7, 19, 0, 0, 1849, 1847, 1, 0, 0, 0, 1849, 1850, 1, 0, 0, 0, 1850, 141, 1, 0, 0, 0, 1851, 1852, 7, 20, 0, 0, 1852, 143, 1, 0, 0, 0, 1853, 1854, 3, 64, 32, 0, 1854, 1855, 5, 155, 0, 0, 1855, 1864, 1, 0, 0, 0, 1856, 1857, 3, 64, 32, 0, 1857, 1858, 5, 158, 0, 0, 1858, 1864, 1, 0, 0, 0, 1859, 1860, 5, 157, 0, 0, 1860, 1864, 5, 127, 0, 0, 1861, 1862, 5, 156, 0, 0, 1862, 1864, 5, 155, 0, 0, 1863, 1853, 1, 0, 0, 0, 1863, 1856, 1, 0, 0, 0, 1863, 1859, 1, 0, 0, 0, 1863, 1861, 1, 0, 0, 0, 1864, 145, 1, 0, 0, 0, 1865, 1866, 3, 64, 32, 0, 1866, 1867, 5, 155, 0, 0, 1867, 1876, 1, 0, 0, 0, 1868, 1869, 3, 64, 32, 0, 1869, 1870, 5, 158, 0, 0, 1870, 1876, 1, 0, 0, 0, 1871, 1872, 5, 157, 0, 0, 1872, 1876, 5, 127, 0, 0, 1873, 1874, 5, 156, 0, 0, 1874, 1876, 5, 158, 0, 0, 1875, 1865, 1, 0, 0, 0, 1875, 1868, 1, 0, 0, 0, 1875, 1871, 1, 0, 0, 0, 1875, 1873, 1, 0, 0, 0, 1876, 147, 1, 0, 0, 0, 1877, 1878, 3, 64, 32, 0, 1878, 1879, 5, 155, 0, 0, 1879, 1885, 1, 0, 0, 0, 1880, 1881, 5, 156, 0, 0, 1881, 1885, 5, 155, 0, 0, 1882, 1883, 5, 157, 0, 0, 1883, 1885, 5, 127, 0, 0, 1884, 1877, 1, 0, 0, 0, 1884, 1880, 1, 0, 0, 0, 1884, 1882, 1, 0, 0, 0, 1885, 149, 1, 0, 0, 0, 1886, 1887, 7, 21, 0, 0, 1887, 1888, 5, 3, 0, 0, 1888, 1889, 3, 64, 32, 0, 1889, 1890, 5, 4, 0, 0, 1890, 1891, 5, 152, 0, 0, 1891, 1893, 5, 3, 0, 0, 1892, 1894, 3, 156, 78, 0, 1893, 1892, 1, 0, 0, 0, 1893, 1894, 1, 0, 0, 0, 1894, 1895, 1, 0, 0, 0, 1895, 1897, 3, 160, 80, 0, 1896, 1898, 3, 126, 63, 0, 1897, 1896, 1, 0, 0, 0, 1897, 1898, 1, 0, 0, 0, 1898, 1899, 1, 0, 0, 0, 1899, 1900, 5, 4, 0, 0, 1900, 1972, 1, 0, 0, 0, 1901, 1902, 7, 22, 0, 0, 1902, 1903, 5, 3, 0, 0, 1903, 1904, 5, 4, 0, 0, 1904, 1905, 5, 152, 0, 0, 1905, 1907, 5, 3, 0, 0, 1906, 1908, 3, 156, 78, 0, 1907, 1906, 1, 0, 0, 0, 1907, 1908, 1, 0, 0, 0, 1908, 1910, 1, 0, 0, 0, 1909, 1911, 3, 158, 79, 0, 1910, 1909, 1, 0, 0, 0, 1910, 1911, 1, 0, 0, 0, 1911, 1912, 1, 0, 0, 0, 1912, 1972, 5, 4, 0, 0, 1913, 1914, 7, 23, 0, 0, 1914, 1915, 5, 3, 0, 0, 1915, 1916, 5, 4, 0, 0, 1916, 1917, 5, 152, 0, 0, 1917, 1919, 5, 3, 0, 0, 1918, 1920, 3, 156, 78, 0, 1919, 1918, 1, 0, 0, 0, 1919, 1920, 1, 0, 0, 0, 1920, 1921, 1, 0, 0, 0, 1921, 1922, 3, 160, 80, 0, 1922, 1923, 5, 4, 0, 0, 1923, 1972, 1, 0, 0, 0, 1924, 1925, 7, 24, 0, 0, 1925, 1926, 5, 3, 0, 0, 1926, 1928, 3, 64, 32, 0, 1927, 1929, 3, 152, 76, 0, 1928, 1927, 1, 0, 0, 0, 1928, 1929, 1, 0, 0, 0, 1929, 1931, 1, 0, 0, 0, 1930, 1932, 3, 154, 77, 0, 1931, 1930, 1, 0, 0, 0, 1931, 1932, 1, 0, 0, 0, 1932, 1933, 1, 0, 0, 0, 1933, 1934, 5, 4, 0, 0, 1934, 1935, 5, 152, 0, 0, 1935, 1937, 5, 3, 0, 0, 1936, 1938, 3, 156, 78, 0, 1937, 1936, 1, 0, 0, 0, 1937, 1938, 1, 0, 0, 0, 1938, 1939, 1, 0, 0, 0, 1939, 1940, 3, 160, 80, 0, 1940, 1941, 5, 4, 0, 0, 1941, 1972, 1, 0, 0, 0, 1942, 1943, 5, 164, 0, 0, 1943, 1944, 5, 3, 0, 0, 1944, 1945, 3, 64, 32, 0, 1945, 1946, 5, 5, 0, 0, 1946, 1947, 3, 34, 17, 0, 1947, 1948, 5, 4, 0, 0, 1948, 1949, 5, 152, 0, 0, 1949, 1951, 5, 3, 0, 0, 1950, 1952, 3, 156, 78, 0, 1951, 1950, 1, 0, 0, 0, 1951, 1952, 1, 0, 0, 0, 1952, 1953, 1, 0, 0, 0, 1953, 1955, 3, 160, 80, 0, 1954, 1956, 3, 126, 63, 0, 1955, 1954, 1, 0, 0, 0, 1955, 1956, 1, 0, 0, 0, 1956, 1957, 1, 0, 0, 0, 1957, 1958, 5, 4, 0, 0, 1958, 1972, 1, 0, 0, 0, 1959, 1960, 5, 165, 0, 0, 1960, 1961, 5, 3, 0, 0, 1961, 1962, 3, 64, 32, 0, 1962, 1963, 5, 4, 0, 0, 1963, 1964, 5, 152, 0, 0, 1964, 1966, 5, 3, 0, 0, 1965, 1967, 3, 156, 78, 0, 1966, 1965, 1, 0, 0, 0, 1966, 1967, 1, 0, 0, 0, 1967, 1968, 1, 0, 0, 0, 1968, 1969, 3, 160, 80, 0, 1969, 1970, 5, 4, 0, 0, 1970, 1972, 1, 0, 0, 0, 1971, 1886, 1, 0, 0, 0, 1971, 1901, 1, 0, 0, 0, 1971, 1913, 1, 0, 0, 0, 1971, 1924, 1, 0, 0, 0, 1971, 1942, 1, 0, 0, 0, 1971, 1959, 1, 0, 0, 0, 1972, 151, 1, 0, 0, 0, 1973, 1974, 5, 5, 0, 0, 1974, 1975, 3, 34, 17, 0, 1975, 153, 1, 0, 0, 0, 1976, 1977, 5, 5, 0, 0, 1977, 1978, 3, 34, 17, 0, 1978, 155, 1, 0, 0, 0, 1979, 1980, 5, 153, 0, 0, 1980, 1982, 5, 40, 0, 0, 1981, 1983, 3, 64, 32, 0, 1982, 1981, 1, 0, 0, 0, 1983, 1984, 1, 0, 0, 0, 1984, 1982, 1, 0, 0, 0, 1984, 1985, 1, 0, 0, 0, 1985, 157, 1, 0, 0, 0, 1986, 1987, 5, 109, 0, 0, 1987, 1989, 5, 40, 0, 0, 1988, 1990, 3, 64, 32, 0, 1989, 1988, 1, 0, 0, 0, 1990, 1991, 1, 0, 0, 0, 1991, 1989, 1, 0, 0, 0, 1991, 1992, 1, 0, 0, 0, 1992, 159, 1, 0, 0, 0, 1993, 1994, 5, 109, 0, 0, 1994, 1995, 5, 40, 0, 0, 1995, 1996, 3, 162, 81, 0, 1996, 161, 1, 0, 0, 0, 1997, 1999, 3, 64, 32, 0, 1998, 2000, 3, 142, 71, 0, 1999, 1998, 1, 0, 0, 0, 1999, 2000, 1, 0, 0, 0, 2000, 2008, 1, 0, 0, 0, 2001, 2002, 5, 5, 0, 0, 2002, 2004, 3, 64, 32, 0, 2003, 2005, 3, 142, 71, 0, 2004, 2003, 1, 0, 0, 0, 2004, 2005, 1, 0, 0, 0, 2005, 2007, 1, 0, 0, 0, 2006, 2001, 1, 0, 0, 0, 2007, 2010, 1, 0, 0, 0, 2008, 2006, 1, 0, 0, 0, 2008, 2009, 1, 0, 0, 0, 2009, 163, 1, 0, 0, 0, 2010, 2008, 1, 0, 0, 0, 2011, 2012, 3, 82, 41, 0, 2012, 165, 1, 0, 0, 0, 2013, 2014, 3, 82, 41, 0, 2014, 167, 1, 0, 0, 0, 2015, 2016, 7, 25, 0, 0, 2016, 169, 1, 0, 0, 0, 2017, 2018, 5, 188, 0, 0, 2018, 171, 1, 0, 0, 0, 2019, 2022, 3, 64, 32, 0, 2020, 2022, 3, 28, 14, 0, 2021, 2019, 1, 0, 0, 0, 2021, 2020, 1, 0, 0, 0, 2022, 173, 1, 0, 0, 0, 2023, 2024, 7, 26, 0, 0, 2024, 175, 1, 0, 0, 0, 2025, 2026, 7, 27, 0, 0, 2026, 177, 1, 0, 0, 0, 2027, 2028, 3, 224, 112, 0, 2028, 179, 1, 0, 0, 0, 2029, 2030, 3, 224, 112, 0, 2030, 181, 1, 0, 0, 0, 2031, 2032, 3, 224, 112, 0, 2032, 183, 1, 0, 0, 0, 2033, 2034, 3, 224, 112, 0, 2034, 185, 1, 0, 0, 0, 2035, 2036, 3, 224, 112, 0, 2036, 187, 1, 0, 0, 0, 2037, 2038, 3, 224, 112, 0, 2038, 189, 1, 0, 0, 0, 2039, 2040, 3, 224, 112, 0, 2040, 191, 1, 0, 0, 0, 2041, 2042, 3, 224, 112, 0, 2042, 193, 1, 0, 0, 0, 2043, 2044, 3, 224, 112, 0, 2044, 195, 1, 0, 0, 0, 2045, 2046, 3, 224, 112, 0, 2046, 197, 1, 0, 0, 0, 2047, 2048, 3, 224, 112, 0, 2048, 199, 1, 0, 0, 0, 2049, 2050, 3, 224, 112, 0, 2050, 201, 1, 0, 0, 0, 2051, 2052, 3, 224, 112, 0, 2052, 203, 1, 0, 0, 0, 2053, 2054, 3, 224, 112, 0, 2054, 205, 1, 0, 0, 0, 2055, 2056, 3, 224, 112, 0, 2056, 207, 1, 0, 0, 0, 2057, 2058, 3, 224, 112, 0, 2058, 209, 1, 0, 0, 0, 2059, 2060, 3, 224, 112, 0, 2060, 211, 1, 0, 0, 0, 2061, 2062, 3, 224, 112, 0, 2062, 213, 1, 0, 0, 0, 2063, 2064, 3, 224, 112, 0, 2064, 215, 1, 0, 0, 0, 2065, 2066, 3, 224, 112, 0, 2066, 217, 1, 0, 0, 0, 2067, 2068, 3, 224, 112, 0, 2068, 219, 1, 0, 0, 0, 2069, 2070, 3, 224, 112, 0, 2070, 221, 1, 0, 0, 0, 2071, 2072, 3, 224, 112, 0, 2072, 223, 1, 0, 0, 0, 2073, 2081, 5, 185, 0, 0, 2074, 2081, 3, 176, 88, 0, 2075, 2081, 5, 188, 0, 0, 2076, 2077, 5, 3, 0, 0, 2077, 2078, 3, 224, 112, 0, 2078, 2079, 5, 4, 0, 0, 2079, 2081, 1, 0, 0, 0, 2080, 2073, 1, 0, 0, 0, 2080, 2074, 1, 0, 0, 0, 2080, 2075, 1, 0, 0, 0, 2080, 2076, 1, 0, 0, 0, 2081, 225, 1, 0, 0, 0, 297, 229, 237, 244, 249, 255, 261, 263, 289, 296, 303, 309, 313, 318, 321, 328, 331, 335, 343, 347, 349, 353, 357, 361, 364, 371, 377, 383, 388, 399, 405, 409, 413, 416, 420, 426, 431, 440, 447, 453, 457, 461, 466, 472, 484, 488, 493, 496, 499, 504, 507, 521, 528, 535, 537, 540, 546, 551, 559, 564, 579, 585, 595, 600, 610, 614, 616, 620, 625, 627, 635, 641, 646, 653, 664, 667, 669, 676, 680, 687, 693, 699, 705, 710, 719, 724, 735, 740, 751, 756, 760, 776, 786, 791, 799, 811, 816, 824, 831, 834, 837, 844, 847, 850, 853, 857, 865, 870, 880, 885, 894, 901, 905, 909, 912, 920, 933, 936, 944, 953, 957, 962, 992, 1004, 1009, 1021, 1027, 1034, 1038, 1048, 1051, 1057, 1063, 1072, 1075, 1079, 1081, 1083, 1092, 1099, 1106, 1112, 1117, 1125, 1130, 1139, 1150, 1157, 1161, 1164, 1168, 1171, 1179, 1190, 1196, 1198, 1206, 1213, 1220, 1225, 1227, 1233, 1242, 1247, 1254, 1258, 1260, 1263, 1271, 1275, 1278, 1284, 1288, 1293, 1300, 1309, 1313, 1315, 1319, 1328, 1333, 1335, 1348, 1351, 1360, 1371, 1378, 1381, 1386, 1390, 1393, 1396, 1401, 1405, 1410, 1413, 1416, 1421, 1425, 1428, 1435, 1440, 1449, 1454, 1457, 1465, 1469, 1477, 1480, 1482, 1491, 1494, 1496, 1500, 1504, 1508, 1511, 1522, 1527, 1531, 1535, 1538, 1543, 1554, 1558, 1560, 1564, 1567, 1574, 1579, 1590, 1596, 1601, 1607, 1614, 1621, 1626, 1629, 1632, 1635, 1640, 1645, 1652, 1656, 1660, 1670, 1679, 1682, 1691, 1695, 1703, 1712, 1715, 1724, 1727, 1730, 1733, 1743, 1752, 1761, 1765, 1772, 1779, 1783, 1787, 1796, 1800, 1804, 1809, 1813, 1820, 1830, 1837, 1842, 1845, 1849, 1863, 1875, 1884, 1893, 1897, 1907, 1910, 1919, 1928, 1931, 1937, 1951, 1955, 1966, 1971, 1984, 1991, 1999, 2004, 2008, 2021, 2080, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) atn := staticData.atn staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) decisionToDFA := staticData.decisionToDFA for index, state := range atn.DecisionToState { decisionToDFA[index] = antlr.NewDFA(state, index) } } // ParserInit initializes any static state used to implement Parser. By default the // static state used to implement the parser is lazily initialized during the first call to // NewParser(). You can call this function if you wish to initialize the static state ahead // of time. func ParserInit() { staticData := &parserParserStaticData staticData.once.Do(parserParserInit) } // NewParser produces a new parser instance for the optional input antlr.TokenStream. func NewParser(input antlr.TokenStream) *Parser { ParserInit() this := new(Parser) this.BaseParser = antlr.NewBaseParser(input) staticData := &parserParserStaticData this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) this.RuleNames = staticData.ruleNames this.LiteralNames = staticData.literalNames this.SymbolicNames = staticData.symbolicNames this.GrammarFileName = "Parser.g4" return this } // Parser tokens. const ( ParserEOF = antlr.TokenEOF ParserSCOL = 1 ParserDOT = 2 ParserOPEN_PAR = 3 ParserCLOSE_PAR = 4 ParserCOMMA = 5 ParserASSIGN = 6 ParserSTAR = 7 ParserPLUS = 8 ParserMINUS = 9 ParserTILDE = 10 ParserPIPE2 = 11 ParserDIV = 12 ParserMOD = 13 ParserLT2 = 14 ParserGT2 = 15 ParserAMP = 16 ParserPIPE = 17 ParserLT = 18 ParserLT_EQ = 19 ParserGT = 20 ParserGT_EQ = 21 ParserEQ = 22 ParserNOT_EQ1 = 23 ParserNOT_EQ2 = 24 ParserABORT_ = 25 ParserACTION_ = 26 ParserADD_ = 27 ParserAFTER_ = 28 ParserALL_ = 29 ParserALTER_ = 30 ParserANALYZE_ = 31 ParserAND_ = 32 ParserAS_ = 33 ParserASC_ = 34 ParserATTACH_ = 35 ParserAUTOINCREMENT_ = 36 ParserBEFORE_ = 37 ParserBEGIN_ = 38 ParserBETWEEN_ = 39 ParserBY_ = 40 ParserCASCADE_ = 41 ParserCASE_ = 42 ParserCAST_ = 43 ParserCHECK_ = 44 ParserCOLLATE_ = 45 ParserCOLUMN_ = 46 ParserCOMMIT_ = 47 ParserCONFLICT_ = 48 ParserCONSTRAINT_ = 49 ParserCREATE_ = 50 ParserCROSS_ = 51 ParserCURRENT_DATE_ = 52 ParserCURRENT_TIME_ = 53 ParserCURRENT_TIMESTAMP_ = 54 ParserDATABASE_ = 55 ParserDEFAULT_ = 56 ParserDEFERRABLE_ = 57 ParserDEFERRED_ = 58 ParserDELETE_ = 59 ParserDESC_ = 60 ParserDETACH_ = 61 ParserDISTINCT_ = 62 ParserDROP_ = 63 ParserEACH_ = 64 ParserELSE_ = 65 ParserEND_ = 66 ParserESCAPE_ = 67 ParserEXCEPT_ = 68 ParserEXCLUSIVE_ = 69 ParserEXISTS_ = 70 ParserEXPLAIN_ = 71 ParserFAIL_ = 72 ParserFOR_ = 73 ParserFOREIGN_ = 74 ParserFROM_ = 75 ParserFULL_ = 76 ParserGLOB_ = 77 ParserGROUP_ = 78 ParserHAVING_ = 79 ParserIF_ = 80 ParserIGNORE_ = 81 ParserIMMEDIATE_ = 82 ParserIN_ = 83 ParserINDEX_ = 84 ParserINDEXED_ = 85 ParserINITIALLY_ = 86 ParserINNER_ = 87 ParserINSERT_ = 88 ParserINSTEAD_ = 89 ParserINTERSECT_ = 90 ParserINTO_ = 91 ParserIS_ = 92 ParserISNULL_ = 93 ParserJOIN_ = 94 ParserKEY_ = 95 ParserLEFT_ = 96 ParserLIKE_ = 97 ParserLIMIT_ = 98 ParserMATCH_ = 99 ParserNATURAL_ = 100 ParserNO_ = 101 ParserNOT_ = 102 ParserNOTNULL_ = 103 ParserNULL_ = 104 ParserOF_ = 105 ParserOFFSET_ = 106 ParserON_ = 107 ParserOR_ = 108 ParserORDER_ = 109 ParserOUTER_ = 110 ParserPLAN_ = 111 ParserPRAGMA_ = 112 ParserPRIMARY_ = 113 ParserQUERY_ = 114 ParserRAISE_ = 115 ParserRECURSIVE_ = 116 ParserREFERENCES_ = 117 ParserREGEXP_ = 118 ParserREINDEX_ = 119 ParserRELEASE_ = 120 ParserRENAME_ = 121 ParserREPLACE_ = 122 ParserRESTRICT_ = 123 ParserRETURNING_ = 124 ParserRIGHT_ = 125 ParserROLLBACK_ = 126 ParserROW_ = 127 ParserROWS_ = 128 ParserSAVEPOINT_ = 129 ParserSELECT_ = 130 ParserSET_ = 131 ParserTABLE_ = 132 ParserTEMP_ = 133 ParserTEMPORARY_ = 134 ParserTHEN_ = 135 ParserTO_ = 136 ParserTRANSACTION_ = 137 ParserTRIGGER_ = 138 ParserUNION_ = 139 ParserUNIQUE_ = 140 ParserUPDATE_ = 141 ParserUSING_ = 142 ParserVACUUM_ = 143 ParserVALUES_ = 144 ParserVIEW_ = 145 ParserVIRTUAL_ = 146 ParserWHEN_ = 147 ParserWHERE_ = 148 ParserWITH_ = 149 ParserWITHOUT_ = 150 ParserFIRST_VALUE_ = 151 ParserOVER_ = 152 ParserPARTITION_ = 153 ParserRANGE_ = 154 ParserPRECEDING_ = 155 ParserUNBOUNDED_ = 156 ParserCURRENT_ = 157 ParserFOLLOWING_ = 158 ParserCUME_DIST_ = 159 ParserDENSE_RANK_ = 160 ParserLAG_ = 161 ParserLAST_VALUE_ = 162 ParserLEAD_ = 163 ParserNTH_VALUE_ = 164 ParserNTILE_ = 165 ParserPERCENT_RANK_ = 166 ParserRANK_ = 167 ParserROW_NUMBER_ = 168 ParserGENERATED_ = 169 ParserALWAYS_ = 170 ParserSTORED_ = 171 ParserTRUE_ = 172 ParserFALSE_ = 173 ParserWINDOW_ = 174 ParserNULLS_ = 175 ParserFIRST_ = 176 ParserLAST_ = 177 ParserFILTER_ = 178 ParserGROUPS_ = 179 ParserEXCLUDE_ = 180 ParserTIES_ = 181 ParserOTHERS_ = 182 ParserDO_ = 183 ParserNOTHING_ = 184 ParserIDENTIFIER = 185 ParserNUMERIC_LITERAL = 186 ParserBIND_PARAMETER = 187 ParserSTRING_LITERAL = 188 ParserBLOB_LITERAL = 189 ParserSINGLE_LINE_COMMENT = 190 ParserMULTILINE_COMMENT = 191 ParserSPACES = 192 ParserUNEXPECTED_CHAR = 193 ) // Parser rules. const ( ParserRULE_parse = 0 ParserRULE_sql_stmt_list = 1 ParserRULE_sql_stmt = 2 ParserRULE_alter_table_stmt = 3 ParserRULE_analyze_stmt = 4 ParserRULE_attach_stmt = 5 ParserRULE_begin_stmt = 6 ParserRULE_commit_stmt = 7 ParserRULE_rollback_stmt = 8 ParserRULE_savepoint_stmt = 9 ParserRULE_release_stmt = 10 ParserRULE_create_index_stmt = 11 ParserRULE_indexed_column = 12 ParserRULE_create_table_stmt = 13 ParserRULE_column_def = 14 ParserRULE_type_name = 15 ParserRULE_column_constraint = 16 ParserRULE_signed_number = 17 ParserRULE_table_constraint = 18 ParserRULE_foreign_key_clause = 19 ParserRULE_conflict_clause = 20 ParserRULE_create_trigger_stmt = 21 ParserRULE_create_view_stmt = 22 ParserRULE_create_virtual_table_stmt = 23 ParserRULE_with_clause = 24 ParserRULE_cte_table_name = 25 ParserRULE_recursive_cte = 26 ParserRULE_common_table_expression = 27 ParserRULE_delete_stmt = 28 ParserRULE_delete_stmt_limited = 29 ParserRULE_detach_stmt = 30 ParserRULE_drop_stmt = 31 ParserRULE_expr = 32 ParserRULE_raise_function = 33 ParserRULE_literal_value = 34 ParserRULE_insert_stmt = 35 ParserRULE_returning_clause = 36 ParserRULE_upsert_clause = 37 ParserRULE_pragma_stmt = 38 ParserRULE_pragma_value = 39 ParserRULE_reindex_stmt = 40 ParserRULE_select_stmt = 41 ParserRULE_join_clause = 42 ParserRULE_select_core = 43 ParserRULE_factored_select_stmt = 44 ParserRULE_simple_select_stmt = 45 ParserRULE_compound_select_stmt = 46 ParserRULE_table_or_subquery = 47 ParserRULE_result_column = 48 ParserRULE_join_operator = 49 ParserRULE_join_constraint = 50 ParserRULE_compound_operator = 51 ParserRULE_update_stmt = 52 ParserRULE_assignment_list = 53 ParserRULE_assignment = 54 ParserRULE_column_name_list = 55 ParserRULE_update_stmt_limited = 56 ParserRULE_qualified_table_name = 57 ParserRULE_vacuum_stmt = 58 ParserRULE_filter_clause = 59 ParserRULE_window_defn = 60 ParserRULE_over_clause = 61 ParserRULE_frame_spec = 62 ParserRULE_frame_clause = 63 ParserRULE_simple_function_invocation = 64 ParserRULE_aggregate_function_invocation = 65 ParserRULE_window_function_invocation = 66 ParserRULE_common_table_stmt = 67 ParserRULE_order_by_stmt = 68 ParserRULE_limit_stmt = 69 ParserRULE_ordering_term = 70 ParserRULE_asc_desc = 71 ParserRULE_frame_left = 72 ParserRULE_frame_right = 73 ParserRULE_frame_single = 74 ParserRULE_window_function = 75 ParserRULE_offset = 76 ParserRULE_default_value = 77 ParserRULE_partition_by = 78 ParserRULE_order_by_expr = 79 ParserRULE_order_by_expr_asc_desc = 80 ParserRULE_expr_asc_desc = 81 ParserRULE_initial_select = 82 ParserRULE_recursive_select = 83 ParserRULE_unary_operator = 84 ParserRULE_error_message = 85 ParserRULE_module_argument = 86 ParserRULE_column_alias = 87 ParserRULE_keyword = 88 ParserRULE_name = 89 ParserRULE_function_name = 90 ParserRULE_schema_name = 91 ParserRULE_table_name = 92 ParserRULE_table_or_index_name = 93 ParserRULE_column_name = 94 ParserRULE_collation_name = 95 ParserRULE_foreign_table = 96 ParserRULE_index_name = 97 ParserRULE_trigger_name = 98 ParserRULE_view_name = 99 ParserRULE_module_name = 100 ParserRULE_pragma_name = 101 ParserRULE_savepoint_name = 102 ParserRULE_table_alias = 103 ParserRULE_transaction_name = 104 ParserRULE_window_name = 105 ParserRULE_alias = 106 ParserRULE_filename = 107 ParserRULE_base_window_name = 108 ParserRULE_simple_func = 109 ParserRULE_aggregate_func = 110 ParserRULE_table_function_name = 111 ParserRULE_any_name = 112 ) // IParseContext is an interface to support dynamic dispatch. type IParseContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // IsParseContext differentiates from other interfaces. IsParseContext() } type ParseContext struct { *antlr.BaseParserRuleContext parser antlr.Parser } func NewEmptyParseContext() *ParseContext { var p = new(ParseContext) p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) p.RuleIndex = ParserRULE_parse return p } func (*ParseContext) IsParseContext() {} func NewParseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ParseContext { var p = new(ParseContext) p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) p.parser = parser p.RuleIndex = ParserRULE_parse return p } func (s *ParseContext) GetParser() antlr.Parser { return s.parser } func (s *ParseContext) EOF() antlr.TerminalNode { return s.GetToken(ParserEOF, 0) } func (s *ParseContext) AllSql_stmt_list() []ISql_stmt_listContext { children := s.GetChildren() len := 0 for _, ctx := range children { if _, ok := ctx.(ISql_stmt_listContext); ok { len++ } } tst := make([]ISql_stmt_listContext, len) i := 0 for _, ctx := range children { if t, ok := ctx.(ISql_stmt_listContext); ok { tst[i] = t.(ISql_stmt_listContext) i++ } } return tst } func (s *ParseContext) Sql_stmt_list(i int) ISql_stmt_listContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(ISql_stmt_listContext); ok { if j == i { t = ctx.(antlr.RuleContext) break } j++ } } if t == nil { return nil } return t.(ISql_stmt_listContext) } func (s *ParseContext) GetRuleContext() antlr.RuleContext { return s } func (s *ParseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } func (s *ParseContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(ParserListener); ok { listenerT.EnterParse(s) } } func (s *ParseContext) ExitRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(ParserListener); ok { listenerT.ExitParse(s) } } func (s *ParseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case ParserVisitor: return t.VisitParse(s) default: return t.VisitChildren(s) } } func (p *Parser) Parse() (localctx IParseContext) { this := p _ = this localctx = NewParseContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 0, ParserRULE_parse) var _la int defer func() { p.ExitRule() }() defer func() { if err := recover(); err != nil { if v, ok := err.(antlr.RecognitionException); ok { localctx.SetException(v) p.GetErrorHandler().ReportError(p, v) p.GetErrorHandler().Recover(p, v) } else { panic(err) } } }() p.EnterOuterAlt(localctx, 1) p.SetState(229) p.GetErrorHandler().Sync(p) _la = p.GetTokenStream().LA(1) for (((_la)&-(0x1f+1)) == 0 && ((1< 2 { return nil, fmt.Errorf("unexpected number of changes found: %d", len(changes)) } parseutil.RenameColumn(modify, r) } if r, ok := stmt.RenameTable(); ok { changes = parseutil.RenameTable(changes, r) } return changes, nil } func isnull(t antlr.Tree) bool { x, ok := t.(*ExprContext) if !ok || x.GetChildCount() != 1 { return false } l, ok := x.GetChild(0).(*Literal_valueContext) return ok && l.GetChildCount() == 1 && len(l.GetTokens(ParserNULL_)) > 0 } func unquote(s string) string { if len(s) < 2 || s[0] != '`' || s[len(s)-1] != '`' { return s } return s[1 : len(s)-1] } atlas-0.7.2/cmd/atlas/internal/sqlparse/sqliteparse/sqliteparse_test.go000066400000000000000000000060641431455511600264320ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqliteparse_test import ( "strconv" "testing" "ariga.io/atlas/cmd/atlas/internal/sqlparse/sqliteparse" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestFixChange_RenameColumns(t *testing.T) { var p sqliteparse.FileParser _, err := p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", nil, ) require.Error(t, err) _, err = p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", schema.Changes{&schema.AddTable{}}, ) require.Error(t, err) changes, err := p.FixChange( nil, "ALTER TABLE t RENAME COLUMN c1 TO c2", schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.DropColumn{C: schema.NewColumn("c1")}, &schema.AddColumn{C: schema.NewColumn("c2")}, }, }, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.ModifyTable{ Changes: schema.Changes{ &schema.RenameColumn{From: schema.NewColumn("c1"), To: schema.NewColumn("c2")}, }, }, }, changes, ) } func TestFixChange_RenameTable(t *testing.T) { var p sqliteparse.FileParser changes, err := p.FixChange( nil, "ALTER TABLE t1 RENAME TO t2", schema.Changes{ &schema.DropTable{T: schema.NewTable("t1")}, &schema.AddTable{T: schema.NewTable("t2")}, &schema.AddTable{T: schema.NewTable("t3")}, }, ) require.NoError(t, err) require.Equal( t, schema.Changes{ &schema.RenameTable{From: schema.NewTable("t1"), To: schema.NewTable("t2")}, &schema.AddTable{T: schema.NewTable("t3")}, }, changes, ) } func TestColumnFilledBefore(t *testing.T) { for i, tt := range []struct { file string pos int wantFilled bool wantErr bool }{ { file: `UPDATE t SET c = NULL;`, pos: 100, }, { file: "UPDATE `t` SET c = 2;", pos: 100, wantFilled: true, }, { file: `UPDATE t SET c = 2 WHERE c IS NULL;`, pos: 100, wantFilled: true, }, { file: "UPDATE `t` SET `c` = 2 WHERE `c` IS NULL;", pos: 100, wantFilled: true, }, { file: `UPDATE t SET c = 2 WHERE c IS NOT NULL;`, pos: 100, wantFilled: false, }, { file: `UPDATE t SET c = 2 WHERE c <> NULL`, pos: 100, wantFilled: false, }, { file: ` UPDATE t1 SET c = 2 WHERE c IS NULL; UPDATE t SET c = 2 WHERE c IS NULL; `, pos: 2, wantFilled: false, }, { file: ` UPDATE t SET c = 2 WHERE c IS NULL; UPDATE t1 SET c = 2 WHERE c IS NULL; `, pos: 30, wantFilled: true, }, } { t.Run(strconv.Itoa(i), func(t *testing.T) { var ( p sqliteparse.FileParser f = migrate.NewLocalFile("file", []byte(tt.file)) ) filled, err := p.ColumnFilledBefore(f, schema.NewTable("t"), schema.NewColumn("c"), tt.pos) require.Equal(t, err != nil, tt.wantErr, err) require.Equal(t, filled, tt.wantFilled) }) } } atlas-0.7.2/cmd/atlas/internal/sqlparse/sqlparse.go000066400000000000000000000031541431455511600223320ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlparse import ( "sync" "ariga.io/atlas/cmd/atlas/internal/sqlparse/myparse" "ariga.io/atlas/cmd/atlas/internal/sqlparse/pgparse" "ariga.io/atlas/cmd/atlas/internal/sqlparse/sqliteparse" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/mysql" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlite" ) // A Parser represents an SQL file parser used to fix, search and enrich schema.Changes. type Parser interface { // FixChange fixes the changes according to the given statement. FixChange(d migrate.Driver, stmt string, changes schema.Changes) (schema.Changes, error) // ColumnFilledBefore checks if the column was filled with values before the given position // in the file. For example: // // UPDATE SET = // UPDATE
SET = WHERE IS NULL // ColumnFilledBefore(migrate.File, *schema.Table, *schema.Column, int) (bool, error) } // drivers specific fixers. var drivers sync.Map // Register a fixer with the given name. func Register(name string, f Parser) { drivers.Store(name, f) } // ParserFor returns a ChangesFixer for the given driver. func ParserFor(name string) Parser { f, ok := drivers.Load(name) if ok { return f.(Parser) } return nil } func init() { Register(mysql.DriverName, &myparse.Parser{}) Register(postgres.DriverName, &pgparse.Parser{}) Register(sqlite.DriverName, &sqliteparse.FileParser{}) } atlas-0.7.2/cmd/atlas/internal/update/000077500000000000000000000000001431455511600175765ustar00rootroot00000000000000atlas-0.7.2/cmd/atlas/internal/update/update.go000066400000000000000000000101121431455511600214020ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package update import ( "encoding/json" "fmt" "io" "io/ioutil" "net/http" "os" "path/filepath" "time" "github.com/mitchellh/go-homedir" "golang.org/x/mod/semver" ) type ( // Store is where the latest release data is stored on CLI host. Store struct { Version string `json:"version"` URL string `json:"url"` CheckedAt time.Time `json:"checkedat"` } // LatestRelease holds the required fields from https://api.github.com/repos/ariga/atlas/releases/latest. LatestRelease struct { Version string `json:"tag_name"` URL string `json:"html_url"` } ) const ( // AtlasNoUpdateNotifier when enabled it cancels checking for update AtlasNoUpdateNotifier = "ATLAS_NO_UPDATE_NOTIFIER" ) // Check implements a notification to the user when a later release is available // 1. Check release file ~/.atlas/release.json for latest known release and poll time // 2. If last poll was more than 24h, poll GitHub public API https://docs.github.com/en/rest/reference/releases#get-the-latest-release // 3. Store the latest release metadata // 4. If current build Version, that is not development, is lower than the latest release, notify user func Check(version string, logF func(i ...any)) { if !enabled(version) { return } p, err := homedir.Expand("~/.atlas") if err != nil { return } ok, message, err := shouldUpdate(version, p, latestReleaseFromGithub) if err != nil || !ok { return } logF(message) } func enabled(version string) bool { if _, ok := os.LookupEnv(AtlasNoUpdateNotifier); ok { return false } if _, ok := os.LookupEnv("GITHUB_ACTIONS"); ok { return false } if ok := semver.IsValid(version); !ok { return false } return true } func shouldUpdate(version string, path string, latestReleaseF func() (*LatestRelease, error)) (bool, string, error) { r := localStore(path) if shouldSkip(r) { return false, "", nil } l, err := latestReleaseF() if err != nil { return false, "", err } if err := saveStore(path, l, time.Now()); err != nil { return false, "", err } if semver.Compare(version, l.Version) != -1 { return false, "", nil } return true, fmt.Sprintf("A new version of Atlas is available (%s):\n%s", l.Version, l.URL), nil } func latestReleaseFromGithub() (*LatestRelease, error) { req, err := http.NewRequest(http.MethodGet, "https://api.github.com/repos/ariga/atlas/tags", nil) if err != nil { return nil, err } req.Header.Set("Content-Type", "application/json; charset=utf-8") req.Header.Set("Accept", "application/vnd.github.v3+json") // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#user-agent-required req.Header.Set("User-Agent", "Ariga-Atlas-CLI") resp, err := http.DefaultClient.Do(req) if err != nil { return nil, err } b, err := io.ReadAll(resp.Body) if err != nil { return nil, err } var tags []tag if err := json.Unmarshal(b, &tags); err != nil { return nil, err } var max tag for _, tag := range tags { if !semver.IsValid(tag.Name) { continue } if semver.Compare(tag.Name, max.Name) > 0 { max = tag } } return &LatestRelease{ Version: max.Name, URL: fmt.Sprintf("https://github.com/ariga/atlas/releases/tag/%s", max.Name), }, nil } func shouldSkip(r *Store) bool { if r == nil { return false } return time.Since(r.CheckedAt).Hours() < 24 } func localStore(path string) *Store { b, err := ioutil.ReadFile(fileLocation(path)) if err != nil { return nil } var s Store if err := json.Unmarshal(b, &s); err != nil { return nil } return &s } func saveStore(path string, l *LatestRelease, t time.Time) error { s := Store{Version: l.Version, URL: l.URL, CheckedAt: t} err := os.MkdirAll(path, 0755) if err != nil { return err } b, err := json.Marshal(&s) if err != nil { return err } return ioutil.WriteFile(fileLocation(path), b, 0600) } func fileLocation(p string) string { return filepath.Join(p, "release.json") } type tag struct { Name string `json:"name"` } atlas-0.7.2/cmd/atlas/internal/update/update_test.go000066400000000000000000000060551431455511600224540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package update import ( "encoding/json" "fmt" "io/ioutil" "testing" "time" "github.com/stretchr/testify/require" ) func TestCheckForUpdate(t *testing.T) { url := "https://github.com/ariga/atlas/releases/tag/test" tests := []struct { name string currentVersion string latestVersion string store *Store latestRelease *LatestRelease shouldNotify bool message string }{ { name: "stale store fetch and update newer version", currentVersion: "v1.2.3", latestVersion: "v2.2.3", store: &Store{Version: "v1.2.3", URL: url, CheckedAt: time.Now().Add(-25 * time.Hour)}, latestRelease: &LatestRelease{Version: "v2.2.3", URL: url}, shouldNotify: true, message: fmt.Sprintf("A new version of Atlas is available (v2.2.3):\n%s", url), }, { name: "fresh store do nothing", currentVersion: "v1.2.3", latestVersion: "v2.2.3", store: &Store{Version: "v1.2.3", URL: url, CheckedAt: time.Now().Add(-23 * time.Hour)}, shouldNotify: false, }, { name: "no store fetch and update newer version", currentVersion: "v1.2.3", latestVersion: "v2.2.3", latestRelease: &LatestRelease{Version: "v2.2.3", URL: url}, shouldNotify: true, message: fmt.Sprintf("A new version of Atlas is available (v2.2.3):\n%s", url), }, { name: "no store fetch and update newer minor version", currentVersion: "v1.2.3", latestVersion: "v1.3.3", latestRelease: &LatestRelease{Version: "v2.2.3", URL: url}, shouldNotify: true, message: fmt.Sprintf("A new version of Atlas is available (v2.2.3):\n%s", url), }, { name: "no store fetch and update newer patch version", currentVersion: "v1.2.3", latestVersion: "v1.2.4", latestRelease: &LatestRelease{Version: "v2.2.3", URL: url}, shouldNotify: true, message: fmt.Sprintf("A new version of Atlas is available (v2.2.3):\n%s", url), }, { name: "no store fetch and update newer patch version - canary", currentVersion: "v1.2.3-6539f2704b5d-canary", latestVersion: "v1.2.4", latestRelease: &LatestRelease{Version: "v2.2.3", URL: url}, shouldNotify: true, message: fmt.Sprintf("A new version of Atlas is available (v2.2.3):\n%s", url), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { p := t.TempDir() if tt.store != nil { b, err := json.Marshal(tt.store) require.NoError(t, err) require.NoError(t, ioutil.WriteFile(fileLocation(p), b, 0600)) } var f func() (*LatestRelease, error) if tt.latestRelease != nil { f = func() (*LatestRelease, error) { return tt.latestRelease, nil } } ok, m, err := shouldUpdate(tt.currentVersion, p, f) require.NoError(t, err) require.Equal(t, tt.shouldNotify, ok) require.Equal(t, tt.message, m) }) } } atlas-0.7.2/cmd/atlas/main.go000066400000000000000000000015671431455511600157640ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package main import ( "context" "os" "os/signal" "ariga.io/atlas/cmd/atlas/internal/cmdapi" _ "ariga.io/atlas/cmd/atlas/internal/docker" _ "ariga.io/atlas/sql/mysql" _ "ariga.io/atlas/sql/mysql/mysqlcheck" _ "ariga.io/atlas/sql/postgres" _ "ariga.io/atlas/sql/postgres/postgrescheck" _ "ariga.io/atlas/sql/sqlite" _ "ariga.io/atlas/sql/sqlite/sqlitecheck" _ "github.com/go-sql-driver/mysql" _ "github.com/lib/pq" _ "github.com/mattn/go-sqlite3" ) func main() { ctx, _ := signal.NotifyContext(context.Background(), os.Interrupt, os.Kill) cmdapi.Root.SetOut(os.Stdout) err := cmdapi.Root.ExecuteContext(ctx) cmdapi.CheckForUpdate() if err != nil { os.Exit(1) } } atlas-0.7.2/doc/000077500000000000000000000000001431455511600133765ustar00rootroot00000000000000atlas-0.7.2/doc/md/000077500000000000000000000000001431455511600137765ustar00rootroot00000000000000atlas-0.7.2/doc/md/about.md000066400000000000000000000010431431455511600154300ustar00rootroot00000000000000--- title: About id: about slug: /about --- # About Atlas Atlas is an open-source project maintained by [Ariga Technologies Ltd](https://ariga.io), a tech company founded by the maintainers of [Ent](https://entgo.io). ### Reaching Out If you have a question or want to chat with the Atlas community, join our [Discord Server](https://discord.com/invite/QhsmBAWzrC) or [open an issue](https://github.com/ariga/atlas/issues) on our [GitHub Repo](https://github.com/ariga/atlas).atlas-0.7.2/doc/md/atlas-schema/000077500000000000000000000000001431455511600163405ustar00rootroot00000000000000atlas-0.7.2/doc/md/atlas-schema/input.md000066400000000000000000000047051431455511600200270ustar00rootroot00000000000000--- id: input-variables slug: /atlas-schema/input-variables title: Input Variables --- In some cases, it is desirable to reuse an Atlas HCL document in different contexts. For example, many organizations manage a multi-tenant architecture where the same database schema is replicated per tenant. For this reason, the Atlas DDL supports input variables. Input variables are defined using the `variable` block: ```hcl variable "comment" { type = string // | int | bool default = "rotemtam" } ``` Once defined, their value can be referenced using `var.`: ```hcl schema "main" { comment = var.comment } ``` Finally, input variables are passed to Atlas in the `schema apply` command using the `--var` flag: ```shell atlas schema apply -u ... -f atlas.hcl --var comment="hello" ``` If a variable is not set from the command line, Atlas tries to use its default value. If no default value is set, an error is returned: ```text schemahcl: failed decoding: input value "tenant" expected but missing ``` ### Variable schema names Returning to the use case we described above, let's see how we can use input variables to manage a multi-tenant architecture. First, we define our schema in a file named `multi.hcl`: ```hcl title="multi.hcl" // Define the input variable that contains the tenant name. variable "tenant" { type = string } // Define the schema, "tenant" here is a placeholder for the final // schema name that will be defined at runtime. schema "tenant" { // Reference to the input variable. name = var.tenant } table "users" { // Refer to the "tenant" schema. It's actual name will be // defined at runtime. schema = schema.tenant column "id" { type = int } } ``` Now suppose we have two tenants, `jerry` and `george`. We can apply the same schema twice: Once for Jerry: ```text atlas schema apply -u mysql://user:pass@localhost:3306/ --schema jerry --var tenant=jerry ``` Observe the generated queries apply to the `jerry` schema: ```text -- Planned Changes: -- Add new schema named "jerry" CREATE DATABASE `jerry` -- Create "users" table CREATE TABLE `jerry`.`users` (`id` int NOT NULL) ✔ Apply ``` And again for George: ```text atlas schema apply -u mysql://user:pass@localhost:3306/ --schema george --var tenant=george ``` The generated queries create the `george` schema: ```text -- Planned Changes: -- Add new schema named "george" CREATE DATABASE `george` -- Create "users" table CREATE TABLE `george`.`users` (`id` int NOT NULL) ✔ Apply ``` atlas-0.7.2/doc/md/atlas-schema/projects.md000066400000000000000000000102201431455511600205060ustar00rootroot00000000000000--- title: Project Structure id: projects slug: /atlas-schema/projects --- ### Project Files Project files provide a convenient way to describe and interact with multiple environments when working with Atlas. A project file is a file named `atlas.hcl` and contains one or more `env` blocks. For example: ```hcl // Define an environment named "local" env "local" { // Declare where the schema definition resides. // Also supported: // src = "./dir/with/schema" // src = ["multi.hcl", "file.hcl"] src = "./project/schema.hcl" // Define the URL of the database which is managed in // this environment. url = "mysql://localhost:3306" // Define the URL of the Dev Database for this environment // See: https://atlasgo.io/concepts/dev-database dev = "mysql://localhost:3307" // The schemas in the database that are managed by Atlas. schemas = ["users", "admin"] } env "dev" { // ... a different env } ``` Once defined, a project's environment can be worked against using the `--env` flag. For example: ```shell atlas schema apply --env local ``` Will run the `schema apply` command against the database that is defined for the `local` environment. ### Projects with Versioned Migrations Environments may declare a `migration` block to configure how versioned migrations work in the specific environment: ```hcl env "local" { // .. migration { // URL where the migration directory resides. Only filesystem directories // are currently supported but more options will be added in the future. dir = "file://migrations" // Format of the migration directory: atlas | flyway | liquibase | goose | golang-migrate format = atlas } } ``` Once defined, `migrate` commands can use this configuration, for example: ```shell atlas migrate validate --env local ``` Will run the `migrate validate` command against the Dev Database defined in the `local` environment. ### Configure Migration Linting Project files may declare `lint` blocks to configure how migration linting run in a specific environment or globally. ```hcl lint { destructive { // By default, destructive changes cause migration linting to error // on exit (code 1). Setting `error` to false disables this behavior. error = false } // Custom logging can be enabled using the `log` attribute. log = < In MySQL and MariaDB, the `schema` resource can contain the `charset` and `collate` attributes. Read more about them in [MySQL](https://dev.mysql.com/doc/refman/8.0/en/charset.html) or [MariaDB](https://mariadb.com/kb/en/setting-character-sets-and-collations/) websites. ```hcl # Schema with attributes. schema "market" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" comment = "A schema comment" } # Schema without attributes. schema "orders" {} ``` ```hcl schema "public" { comment = "A schema comment" } schema "private" {} ``` Atlas does not support [attached databases](https://www.sqlite.org/lang_attach.html), and support only the default database (i.e. `main`). ```hcl schema "main" {} ``` ## Table A `table` describes a table in a SQL database. A table hold its columns, indexes, constraints, and additional attributes that are supported by the different drivers. ```hcl table "users" { schema = schema.public column "id" { type = int } column "name" { type = varchar(255) } column "manager_id" { type = int } primary_key { columns = [ column.id ] } index "idx_name" { columns = [ column.name ] unique = true } foreign_key "manager_fk" { columns = [column.manager_id] ref_columns = [column.id] on_delete = CASCADE on_update = NO_ACTION } } ``` ### Check A `check` is a child resource of a `table` that describes a `CHECK` constraint. ```hcl table "products" { column "price" { type = float } check "positive price" { expr = "price > 0" } } ``` ### Partitions Table partitioning refers to splitting logical large tables into smaller physical ones. :::note Atlas currently only supports PostgreSQL. Support for the remaining dialects will be added in future versions. ::: ```hcl table "logs" { schema = schema.public column "date" { type = date } column "text" { type = integer } partition { type = RANGE columns = [column.date] } } table "metrics" { schema = schema.public column "x" { type = integer } column "y" { type = integer } partition { type = RANGE by { column = column.x } by { expr = "floor(y)" } } } ``` ### Table Qualification In some cases, an Atlas DDL document may contain multiple tables of the same name. This usually happens when the same table name appears in two different schemas. In these cases, the table names must be disambiguated by using resource [qualifers](/guides/ddl.md#qualifiers). The following document describes a database that contains two schemas named `a` and `b`, and both of them contain a table named `users`. ```hcl schema "a" {} schema "b" {} table "a" "users" { schema = schema.a // .. columns } table "b" "users" { schema = schema.b // .. columns } ``` ## Column A `column` is a child resource of a `table`. ```hcl column "name" { type = text null = false } column "age" { type = integer default = 42 } column "active" { type = tinyint(1) default = true } ``` #### Properties | Name | Kind | Type | Description | |---------|-----------|-------------------------|------------------------------------------------------------| | null | attribute | bool | Defines whether the column is nullable. | | type | attribute | *schemahcl.Type | Defines the type of data that can be stored in the column. | | default | attribute | *schemahcl.LiteralValue | Defines the default value of the column. | ### Generated Columns Generated columns are columns whose their values are computed using other columns or by deterministic expressions. ```hcl table "users" { schema = schema.test column "a" { type = int } column "b" { type = int # In MySQL, generated columns are VIRTUAL by default. as = "a * 2" } column "c" { type = int as { expr = "a * b" type = STORED } } } ``` ```hcl table "users" { schema = schema.test column "a" { type = int } column "b" { type = int # In PostgreSQL, generated columns are STORED by default. as = "a * 2" } column "c" { type = int as { expr = "a * b" type = STORED } } } ``` ```hcl table "users" { schema = schema.test column "a" { type = int } column "b" { type = int # In SQLite, generated columns are VIRTUAL by default. as = "a * 2" } column "c" { type = int as { expr = "a * b" type = STORED } } } ``` :::info Note, it is recommended to use the [`--dev-url`](../concepts/dev-database) option when generated columns are used. ::: ## Column Types The SQL dialects supported by Atlas (Postgres, MySQL, MariaDB, and SQLite) vary in the types they support. At this point, the Atlas DDL does not attempt to abstract away the differences between various databases. This means that the schema documents are tied to a specific database engine and version. This may change in a future version of Atlas as we plan to add "Virtual Types" support. This section lists the various types that are supported in each database. For a full list of supported column types, [click here](sql-types.md). ## Primary Key A `primary_key` is a child resource of a `table`, and it defines the table's primary key. #### Example ```hcl primary_key { columns = [column.id] } ``` #### Properties | Name | Kind | Type | Description | |---------|-----------|--------------------------|----------------------------------------------------------------| | columns | resource | reference (list) | A list of references to columns that comprise the primary key. | ## Foreign Key Foreign keys are child resources of a `table`, and it defines some columns in the table as references to columns in other tables. #### Example ```hcl foreign_key "manager_fk" { columns = [column.manager_id] ref_columns = [column.id] on_delete = CASCADE on_update = NO_ACTION } ``` #### Referencing Qualified Tables If a foreign key references a column in a [qualified](#table-qualification) table, it is referenced using `table...column.`: ```hcl {3} foreign_key "manager_fk" { columns = [column.manager_id] ref_columns = [table.other_schema.users.column.id] on_delete = CASCADE on_update = NO_ACTION } ``` #### Properties | Name | Kind | Type | Description | |-------------|-----------|------------------------|-------------------------------------------| | columns | attribute | reference (list) | The columns that reference other columns. | | ref_columns | attribute | reference (list) | The referenced columns. | | on_update | attribute | schema.ReferenceOption | Defines what to do on update. | | on_delete | attribute | schema.ReferenceOption | Defines what to do on delete. | ## Index Indexes are child resources of a `table`, and it defines an index on the table. #### Example ```hcl index "idx_name" { columns = [ column.name ] unique = true } index "idx_name" { on { column = column.rank } on { column = column.score desc = true } unique = true } index "idx_name" { type = GIN columns = [column.data] } index "idx_range" { type = BRIN columns = [column.range] page_per_range = 128 } index "idx_include" { columns = [column.range] include = [column.version] } ``` #### Properties | Name | Kind | Type | Description | |-----------|-----------|-------------------------|----------------------------------------------------------------| | unique | attribute | boolean | Defines whether a uniqueness constraint is set on the index. | | type | attribute | IndexType (enum) | Defines the index type. e.g. `HASH`, `GIN`, `FULLTEXT`. | | columns | attribute | reference (list) | The columns that comprise the index. | | on | resource | schema.IndexPart (list) | The index parts that comprise the index. | | options | attribute | schema.Attr | Additional driver specific attributes. e.g. `page_per_range` | ### Index Expressions Index expressions allow setting indexes over functions or computed expressions. Supported by PostgreSQL, SQLite and MySQL8. ```hcl {9-16} table "t" { schema = schema.test column "c1" { type = int } column "c2" { type = int } index "i" { on { expr = "c1 - c2" } on { expr = "c2 - c1" } } } ``` :::info Note, it is recommended to use the [`--dev-url`](../concepts/dev-database) option when index expressions are used. ::: ### Partial Indexes [Partial indexes](https://www.postgresql.org/docs/current/indexes-partial.html) allow setting indexes over subset of the table. Supported by PostgreSQL and SQLite. ```hcl {11} table "t" { schema = schema.public column "b" { type = bool } column "c" { type = int } index "i" { columns = [column.c] where = "b AND c > 0" } } ``` :::info Note, it is recommended to use the [`--dev-url`](../concepts/dev-database) option when partial indexes are used. ::: ### Index Prefixes [Index prefixes](https://dev.mysql.com/doc/refman/8.0/en/column-indexes.html#column-indexes-prefix) allow setting an index on the first `N` characters of string columns. Supported by MySQL and MariaDB. ```hcl {9} table "users" { schema = schema.test column "name" { type = varchar(255) } index "user_name" { on { column = column.name prefix = 128 } } } ``` ## Comment The `comment` attribute is an attribute of `schema`, `table`, `column`, and `index`. ```hcl schema "public" { comment = "A schema comment" } table "users" { schema = schema.public column "name" { type = text comment = "A column comment" } index "name_idx" { columns = [column.name] } comment = "A table comment" } ``` ## Charset and Collation The `charset` and `collate` are attributes of `schema`, `table` and `column` and supported by MySQL, MariaDB and PostgreSQL. Read more about them in [MySQL](https://dev.mysql.com/doc/refman/8.0/en/charset.html), [MariaDB](https://mariadb.com/kb/en/setting-character-sets-and-collations/) and [PostgreSQL](https://www.postgresql.org/docs/current/collation.html) websites. ```hcl schema "public" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } table "products" { column "name" { type = text collate = "binary" } collate = "utf8_general_ci" } ``` ```hcl schema "public" {} table "products" { column "name" { type = text collate = "es_ES" } } ``` ## Auto Increment `AUTO_INCREMENT` and `IDENTITY` columns are attributes of the `column` and `table` resource, and can be used to generate a unique identity for new rows. In MySQL/MariaDB the `auto_increment` attribute can be set on columns and tables. ```hcl table "users" { schema = schema.public column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } } ``` The `auto_increment` column can be set on the table to configure a start value other than 1. ```hcl table "users" { schema = schema.public column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } auto_increment = 100 } ``` PostgreSQL supports `serial` columns and the `generated as identity` syntax for versions >= 10. ```hcl table "users" { schema = schema.public column "id" { null = false type = int identity { generated = ALWAYS start = 10 increment = 10 } } primary_key { columns = [column.id] } } ``` SQLite allows configuring [`AUTOINCREMENT`](https://www.sqlite.org/autoinc.html) columns using the `auto_increment` attribute. ```hcl table "users" { schema = schema.main column "id" { null = false type = integer auto_increment = true } primary_key { columns = [column.id] } } ``` atlas-0.7.2/doc/md/cloud/000077500000000000000000000000001431455511600151045ustar00rootroot00000000000000atlas-0.7.2/doc/md/cloud/getting-started.md000066400000000000000000000234511431455511600205400ustar00rootroot00000000000000--- id: getting-started title: Getting Started with Ariga Cloud sidebar_label: Getting Started --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import Discord from '../../website/src/assets/icons/discord-white.svg' [Ariga Cloud](https://ariga.cloud) is an online platform that allows developers to view their Atlas projects and track their [Atlas GitHub Action](/integrations/github-actions) CI runs. The cloud platform gives full visibility into your Atlas schema, by displaying an entity relation diagram (ERD) that shows the schema changes, as well as the entire database schema. ### Signing Up ![Login](https://release.ariga.io/images/assets/login1.png) 1. To get started with [Ariga Cloud](https://ariga.cloud/), create an account by clicking 'sign up' on the homepage. 2. In the sign up screen, enter your work email and choose a password. 3. Next, you will receive an email to verify your account. From your email, [Ariga Cloud](https://ariga.cloud/) will open in a new tab, and you will need to sign in to access your account. 4. Once signed in, you will be prompted to create an organization. After creating the organization, you will be able to invite team members to join. Choose a meaningful name for the organization, as it will also be your subdomain. For example, "Acme Corp" will be available at "acme-corp.ariga.cloud". :::note Currently, the system only allows users to sign up for **one** organization. If you wish to create multiple organizations under the same user, you can create a [task-specific email address](https://support.google.com/a/users/answer/9308648?hl=en) (for Google users only). Create multiple emails that all link back to your regular address by adding a plus sign and any word before the @ sign in your address. ::: ### Connecting to the Atlas GitHub action At first you will notice that your projects and CI runs pages are empty. In order to connect the organization to your GitHub repository, you will need to setup the Atlas GitHub action on your repository by following these steps: :::note If you already have the Atlas GitHub action set up, you may skip step 4. In step 5, only add `ariga-token: ${{ secrets.ARIGA_TOKEN }}` to your yaml file. ::: 1. From the Settings page, generate an access token under 'Tokens'. 2. On your GitHub repo, under the 'Settings' section, click on 'Secrets' > 'Actions' to create a new repository secret. 3. Name your secret (for example, ARIGA_TOKEN) and paste the generated token from step 2. 4. Install the Atlas GitHub Action by adding a file named `.github/workflows/atlas-ci.yaml` to your repo. 5. Based on the type of database you are using, copy the following code into the workflow definition file. Set up the `ariga-token` input parameter to the secret name you chose in the previous step, and ensure your mainline branch and migration directory path are configured correctly: ```yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: // highlight-next-line - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: // highlight-next-line - 'path/to/migration/dir/*' jobs: lint: services: # Spin up a mysql:8.0.29 container to be used as the dev-database for analysis. mysql: image: mysql:8.0.29 env: MYSQL_ROOT_PASSWORD: pass MYSQL_DATABASE: test ports: - "3307:3306" options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: // highlight-next-line dir: 'path/to/migrations' dir-format: atlas # Or: golang-migrate, goose, dbmate, flyway, liquibase dev-url: mysql://root:pass@localhost:3307/test // highlight-next-line ariga-token: ${{ secrets.ARIGA_TOKEN }} ``` ```yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: // highlight-next-line - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: // highlight-next-line - 'path/to/migration/dir/*' jobs: lint: services: # Spin up a postgres:14 container to be used as the dev-database for analysis. postgres14: image: postgres:14 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5430:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: // highlight-next-line dir: 'path/to/migrations' dir-format: atlas # Or: golang-migrate, goose, dbmate, flyway, liquibase dev-url: postgres://postgres:pass@localhost:5430/test?sslmode=disable // highlight-next-line ariga-token: ${{ secrets.ARIGA_TOKEN }} ``` ```yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: // highlight-next-line - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: // highlight-next-line - 'path/to/migration/dir/*' jobs: lint: services: # Spin up a maria:10.7 container to be used as the dev-database for analysis. maria107: image: mariadb:10.7 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 4306:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: // highlight-next-line dir: 'path/to/migrations' dir-format: atlas # Or: golang-migrate, goose, dbmate, flyway, liquibase dev-url: maria://root:pass@localhost:4306/test // highlight-next-line ariga-token: ${{ secrets.ARIGA_TOKEN }} ``` 6. After merging the workflow to your mainline branch, the workflow will be triggered. 7. Refresh Ariga Cloud and your project will appear! ![Setup Projects](https://release.ariga.io/images/assets/setup-projects.png) ### Viewing CI Runs In the system, you can view all the CI runs that were triggered by the Atlas GitHub workflow. Each run includes the following: - A summary of the run. - SQL statements that were analyzed. - An ERD that shows the changes made to the schema, as well as a full view. A run can complete in one of three ways: ![Successful Run](https://release.ariga.io/images/assets/successful-run.png) The CI ran successfully and no errors or issues were found in your SQL statements or Atlas sum file. ![Issues Found Run](https://release.ariga.io/images/assets/issues-found.png) In cases where your SQL statements _might_ cause a failure in production, the CI run will be labeled as 'issues found'. In this example, we can see that the column `name` was created as non-nullable. The CI is letting us know that this has a chance of causing a failure, because if there is a row that exists in this table that has a null `name` value, this migration will for a fact fail in production. The report also makes sure to reference the specific data-dependent check that was found [MF103](https://atlasgo.io/lint/analyzers#MF103), in this example). ![Failed Run](https://release.ariga.io/images/assets/failed-run2.png) The CI run can fail for multiple reasons: incorrect SQL statements, wrong configuration, and more. In this example, we can see the CI has failed due to an SQL statement that attempts to drop a table. Because this is dangerous and will result in loss of data, the CI will automatically fail any `drop` statements. However, users can disable this by configuring the destructive analyzer in the [`atlas.hcl`](https://atlasgo.io/atlas-schema/projects) file: ```hcl title="atlas.hcl" lint { //highlight-start destructive { error = false } //highlight-end } ``` ### Inviting Members Under 'Settings' > 'Members', you can invite team members to your organization. These members will receive an email with a link to Ariga Cloud, and will be required to sign up with the same email in order to access the organization. ### Regenerating Tokens It is possible to regenerate the access token, however once you do so the old token will be **invalidated**. When choosing to regenerate the token, you must remember to copy the new one into your GitHub project's 'Secrets'. :::info For more help, reach out to us on our [Discord server](https://discord.gg/zZ6sWVg6NT). ::: atlas-0.7.2/doc/md/concepts/000077500000000000000000000000001431455511600156145ustar00rootroot00000000000000atlas-0.7.2/doc/md/concepts/dev.md000066400000000000000000000117201431455511600167150ustar00rootroot00000000000000--- title: Dev Database id: dev-database slug: /concepts/dev-database --- ## Introduction Atlas uses the concept of "Dev Database" to provide extra safety and correctness to the migration process. a development database (a twin environment) to validate schemas, simulate migrations and calculate the state of the migration directory by replaying the historical changes. Let's go over a few examples to explain the benefits of using a dev/twin database. For a one-time use Atlas can spin up an ephemeral local docker container for you with a special [docker driver](concepts/url.mdx). :::info Atlas cleans up after itself! You can use the same instance of a "Dev Database" for multiple environments, as long as they are not accessed concurrently. ::: ## Validation Suppose we want to the add the following `CHECK` constraint to the table below: ```hcl title="test.hcl" {6-8} table "t" { schema = schema.test column "c" { type = int } check "ck" { expr = "c <> d" } } ``` After running [`schema apply`](reference.md#atlas-schema-apply), we get the following error because the `CHECK` constraint is invalid, as column `d` does not exist. ```shell $ atlas schema apply --url "mysql://root:pass@:3308/test" -f test.hcl ``` ```text -- Planned Changes: -- Modify "t" table ALTER TABLE `test`.`t` ADD CONSTRAINT `ck` CHECK (c <> d), DROP COLUMN `c1`, ADD COLUMN `c` int NOT NULL ✔ Apply Error: modify "t" table: Error 1054: Unknown column 'd' in 'check constraint ck expression' exit status 1 ``` Atlas cannot predict such errors without applying the schema file on the database, because some cases require parsing and compiling SQL expressions, traverse their AST and validate them. This is already implemented by the database engine. Migration failures can leave the database in a broken state. Some databases, like MySQL, do not support transactional migrations due to [implicit COMMIT](https://dev.mysql.com/doc/refman/8.0/en/implicit-commit.html). However, this can be avoided using the `--dev-url` option. Passing this to `schema apply` will first create and validate the desired state (the HCL schema file) on temporary named-databases (schemas), and only then continue to `apply` the changes if it passed successfully. ```shell $ atlas schema apply --url "mysql://root:pass@:3308/test" -f test.hcl --dev-url="mysql://root:pass@:3308/test" ``` ```text Error: create "t" table: Error 3820: Check constraint 'ck' refers to non-existing column 'd'. exit status 1 ``` ## Diffing Atlas adopts the declarative approach for maintaining the schemas desired state, but provides two ways to manage and apply changes on the database: `schema apply` and `migrate diff`. In both commands, Atlas compares the "current", and the "desired" states and suggests a migration plan to migrate the "current" state to the "desired" state. For example, the "current" state can be an inspected database or a migration directory, and the "desired" state can be an inspected database, or an HCL file. Schemas that are written in HCL files are defined in natural form by humans. However, databases store schemas in normal form (also known as canonical form). Therefore, when Atlas compares two different forms it may suggest incorrect or unnecessary schema changes, and using the `--dev-url` option can solve this (see the above section for more in-depth example). Let's see it in action, by adding an index-expression to our schema. ```hcl title="test.hcl" {6-10} table "t" { schema = schema.test column "c" { type = varchar(32) } index "i" { on { expr = "upper(concat('c', c))" } } } ``` ```shell $ atlas schema apply --url "mysql://root:pass@:3308/test" -f test.hcl ``` ```text -- Planned Changes: -- Modify "t" table ALTER TABLE `test`.`t` ADD INDEX `i` ((upper(concat('c', c)))) ✔ Apply ``` We added a new index-expression to our schema, but using `schema inspect` will show our index in its normal form. ```shell $ atlas schema inspect --url "mysql://root:pass@:3308/test" ``` ```hcl {7-11} table "t" { schema = schema.test column "c" { null = false type = varchar(32) } index "i" { on { expr = "upper(concat(_utf8mb4'c',`c`))" } } } ``` Therefore, running `schema apply` again will suggest unnecessary schema changes. ```shell $ atlas schema apply --url "mysql://root:pass@:3308/test" -f test.hcl ``` ```text -- Planned Changes: -- Modify "t" table ALTER TABLE `test`.`t` DROP INDEX `i` -- Modify "t" table ALTER TABLE `test`.`t` ADD INDEX `i` ((upper(concat('c', c)))) ✔ Abort ``` Similarly to the previous example, we will use the `--dev-url` option to solve this. ```shell $ atlas schema apply --url "mysql://root:pass@:3308/test" -f test.hcl --dev-url="mysql://root:pass@:3307/test" ``` ```text Schema is synced, no changes to be made ``` Hooray! Our desired schema is synced and no changes have to be made. :::info Atlas cleans up after itself! You can use the same instance of a "Dev Database" for multiple environments, as long as they are not accessed concurrently. ::: atlas-0.7.2/doc/md/concepts/migration-directory-integrity.md000066400000000000000000000064071431455511600241540ustar00rootroot00000000000000--- id: migration-directory-integrity slug: /concepts/migration-directory-integrity title: Migration Directory Integrity File --- Applying changes to a database schema in the wrong order can be dangerous. For this reason, Atlas is built on a workflow that enforces a _linear history_ using a _migration directory integrity file_. ### The problem Suppose you have multiple teams working on a system simultaneously, all of which need to make changes to the database schema from time to time. Unless they somehow coordinate, they may end up with a broken migration directory. Consider what would happen if Team A and B, working in parallel, both merge a migration that creates a new table named `inventory`. This is illustrated in this diagram: ![Teams committing migrations in parallel. Source: Ent blog](https://entgo.io/images/assets/migrate/no-conflict.svg) Something like this might happen: 1. Team A creates a feature branch, committing a migration creating the `inventory` table. 2. Team B creates a second feature branch, also creating a table by the same name. 3. Both branches pass code-review and continuous integration. 4. Team A's branch is merged to the mainline branch. 5. Team B's branch is merged. 6. When both changes are deployed, the first one to run will succeed and the second will fail. This will happen in an arbitrary order (migrations are run in lexicographic order, usually set by the timestamp on the developer's workstation when generating them). ### The solution Recovering from a failed migration is quite a headache, so wouldn't it be great to prevent this from ever happening? Code conflicts are usually detected by source-control systems (such as Git) when the same line in the same file is modified by two different commits. In our case, no such conflict happens because migrations are typically described in a separate file for each migration. Atlas's engine offers a way to prevent concurrent creation of new migration files and guards against accidental changes in the migration history we call *Migration Directory Integrity File*. This file is simply another file in your migration directory called `atlas.sum` and looks something like: ```text h1:KRFsSi68ZOarsQAJZ1mfSiMSkIOZlMq4RzyF//Pwf8A= 20220318104614_team_A.sql h1:EGknG5Y6GQYrc4W8e/r3S61Aqx2p+NmQyVz/2m8ZNwA= ``` The `atlas.sum` file contains the checksum of each migration file (implemented by a reverse, one branch merkle hash tree), and a sum of all files. Adding new files results in a change to the sum file, which will raise merge conflicts in most version control systems. ### How does this mechanism prevent situations like the one we described above? The migration directory integrity file is updated automatically whenever a new migration is created. Therefore, after Team A merged their changes to the mainline branch, Team B would not be able to do so without dealing with the changes landed by Team B. Because of the merge conflict on the `atlas.sum` file, in order to land their changes, Team B would need to: 1. Merge the latest changes into their branch 2. Resolve any conflicts on the database schema (and application) level 3. Re-compute the `atlas.sum` file (using the [`atlas migrate hash`](/cli-reference#atlas-migrate-hash)) command. 4. Merge their changes to the mainline branch. atlas-0.7.2/doc/md/concepts/url.mdx000066400000000000000000000043321431455511600171320ustar00rootroot00000000000000--- title: URLs id: concepts-url slug: /concepts/url --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; ## What is a URL? Uniform Resource Locators, or URLs are a standardized method for specifying a location of a resource. All the component parts of a URL are shown below: ``` driver://[username[:password]@]address/[schema|database][?param1=value1&...¶mN=valueN] ``` An example might look like: ``` mysql://localhost mysql://user:pass@localhost mysql://user:pass@localhost:3306/database ``` ``` maria://localhost maria://user:pass@localhost maria://user:pass@localhost:3306/database ``` ``` postgres://localhost:5432/database postgres://localhost:5432/database?search_path=schema postgres://postgres:pass@0.0.0.0:5432/database?sslmode=disable ``` ``` sqlite://file.db sqlite://file?cache=shared&mode=memory ``` Atlas can spin up an ephemeral local docker container for you by specifying a special URL like below. This can be useful if you need a [dev database](../concepts/dev.md) for schema validation or diffing. However, some images like `mysql` / `mariadb` take quite some time to "boot", before they are ready to be used. For a smoother developing experience consider spinning up a longer lived container by yourself. ``` docker://postgres docker://mysql/8 docker://mariadb/10.8.2-rc-focal ``` ## SSL/TLS Mode The default SSL mode for Postgres is `required`. Please follow the [Postgres documentation](https://www.postgresql.org/docs/current/libpq-ssl.html) for configuring your SSL connection for your database, or set SSL mode to `disable` with the search parameter `?sslmode=disable`. For local databases, disabling SSL is appropriate when inspecting and applying schema changes. MySQL does not require TLS by default. However, you can require TLS with the `?tls=true` search parameter. atlas-0.7.2/doc/md/concepts/workflows.md000066400000000000000000000146111431455511600201760ustar00rootroot00000000000000--- title: Declarative vs Versioned Workflows id: workflows slug: /concepts/declarative-vs-versioned --- This section introduces two types of workflows that are supported by Atlas to manage database schemas: _declarative_ and _versioned_ migrations. ### Declarative Migrations The declarative approach has become increasingly popular with engineers nowadays because it embodies a convenient separation of concerns between application and infrastructure engineers. Application engineers describe _what_ (the desired state) they need to happen, and infrastructure engineers build tools that plan and execute ways to get to that state (_how_). This division of labor allows for great efficiencies as it abstracts away the complicated inner workings of infrastructure behind a simple, easy to understand API for the application developers and allows for specialization and development of expertise to pay off for the infra people. With declarative migrations, the desired state of the database schema is given as input to the migration engine, which plans and executes a set of actions to change the database to its desired state. For example, suppose your application uses a small SQLite database to store its data. In this database, you have a `users` table with this structure: ```hcl schema "main" {} table "users" { schema = schema.main column "id" { type = int } column "greeting" { type = text } } ``` Now, suppose that you want to add a default value of `"shalom"` to the `greeting` column. Many developers are not aware that it isn't possible to modify a column's default value in an existing table in SQLite. Instead, the common practice is to create a new table, copy the existing rows into the new table and drop the old one after. Using the declarative approach, developers can change the default value for the `greeting` column: ```hcl {10} schema "main" {} table "users" { schema = schema.main column "id" { type = int } column "greeting" { type = text default = "shalom" } } ``` And have Atlas's engine devise a plan similar to this: ```sql -- Planned Changes: -- Create "new_users" table CREATE TABLE `new_users` (`id` int NOT NULL, `greeting` text NOT NULL DEFAULT 'shalom') -- Copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`id`, `greeting`) SELECT `id`, IFNULL(`greeting`, 'shalom') AS `greeting` FROM `users` -- Drop "users" table after copying rows DROP TABLE `users` -- Rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users` ``` ### Versioned Migrations As the database is one of the most critical components in any system, applying changes to its schema is rightfully considered a dangerous operation. For this reason, many teams prefer a more imperative approach where each change to the database schema is checked-in to source control and reviewed during code-review. Each such change is called a "migration", as it migrates the database schema from the previous version to the next. To support this kind of requirement, many popular database schema management tools such as [Flyway](https://flywaydb.org/), [Liquibase](https://liquibase.org/) or [golang-migrate](https://github.com/golang-migrate/migrate) support a workflow that is commonly called "versioned migrations". In addition to the higher level of control which is provided by versioned migrations, applications are often deployed to multiple remote environments at once. These environments, are not controlled (or even accessible) by the development team. In such cases, declarative migrations, which rely on a network connection to the target database and on human approval of migrations plans in real-time, are not a feasible strategy. With versioned migrations (sometimes called "change-based migrations") instead of describing the desired state ("what the database should look like"), developers describe the changes themselves ("how to reach the state"). Most of the time, this is done by creating a set of SQL files containing the statements needed. Each of the files is assigned a unique version and a description of the changes. Tools like the ones mentioned earlier are then able to interpret the migration files and to apply (some of) them in the correct order to transition to the desired database structure. The benefit of the versioned migrations approach is that it is explicit: engineers know _exactly_ what queries are going to be run against the database when the time comes to execute them. Because changes are planned ahead of time, migration authors can control precisely how to reach the desired schema. If we consider a migration as a plan to get from state A to state B, oftentimes multiple paths exist, each with a very different impact on the database. To demonstrate, consider an initial state which contains a table with two columns: ```sql CREATE TABLE users ( id int, name varchar(255) ); ``` Suppose our desired state is: ```sql CREATE TABLE users ( id int, user_name varchar(255) ); ``` There are at least two ways get from the initial to the desired state: * Drop the `name` column and create a new `user_name` column. * Alter the name of the `name` column to `user_name`. Depending on the context, either may be the desired outcome for the developer planning the change. With versioned migrations, engineers have the ultimate confidence of what change is going to happen which may not be known ahead of time in a _declarative_ approach. ### Migration Authoring The downside of the _versioned migration_ approach is, of course, that it puts the burden of planning the migration on developers. This requires a certain level of expertise that is not always available to every engineer, as we demonstrated in our example of setting a default value in a SQLite database above. As part of the Atlas project we advocate for a third combined approach that we call "Versioned Migration Authoring". Versioned Migration Authoring is an attempt to combine the simplicity and expressiveness of the declarative approach with the control and explicitness of versioned migrations. With versioned migration authoring, users still declare their desired state and use the Atlas engine to plan a safe migration from the existing to the new state. However, instead of coupling planning and execution, plans are instead written into normal migration files which can be checked into source control, fine-tuned manually and reviewed in regular code review processes. atlas-0.7.2/doc/md/contributing.md000066400000000000000000000113351431455511600170320ustar00rootroot00000000000000--- title: Contributing id: contributing slug: /contributing --- ### How to Contribute Atlas is a community project, we welcome contributions of all kinds and sizes! Here are some ways in which you can help: * File well-written and descriptive bug reports or feature requests in the [Issues page](https://github.com/ariga/atlas/issues). * Tweet about your experience with Atlas on [Twitter](https://twitter.com), don't forget to mention [@ariga_io](https://twitter.com/ariga_io) and link to [atlasgo.io](https://atlasgo.io) if you do. * Write educational content on your personal blog or websites such as [dev.to](https://dev.to) or [Medium](https://medium.com). If you do, don't hesitate to reach out to us via Discord (link below) for help proof-reading your text and using our social-media channels for distributing it to readers. * Join our [Discord Server](https://discord.com/invite/QhsmBAWzrC) to answer questions of other users and find out other ways in which you can contribute by talking to the community there! * Contribute bug-fixes or new features to the [codebase](https://github.com/ariga/atlas). ### Contributing code to Atlas As we are still starting out, we don't have an official code-style or guidelines on composing your code. As general advice, read through the area of the code that you are modifying and try to keep your code similar to what others have written in the same place. #### Code-generation Some of the code in the Atlas repository is generated. The CI process verifies that all generated files are checked in by running `go generate ./...` and then running `git status --porcelain`. Therefore, before committing changes to Atlas, please run: ```shell go generate ./... ``` #### Linting Your code will be linted using `golangci-lint` during CI. To install in locally, [follow this guide](https://golangci-lint.run/usage/install/#local-installation). To run it locally: ```shell golangci-lint run ``` #### Formatting Format your code using the standard `fmt` command: ```shell go fmt ./... ``` #### Unit-tests Your code should be covered in unit-tests, see the codebase for examples. To run tests: ```shell go test ./... ``` #### Integration tests Some features, especially those that interact directly with a database must be verified in an integration test. There is extensive infrastructure for integration tests under `internal/integration/` that runs tests under a matrix of database dialect (Postres, MySQL, etc.) and versions. To run the integration tests, first use the `docker-compose.yml` file to spin up databases to test against: ```shell cd internal/integration docker-compose up -d ``` Then run the tests, from with the `integration` directory: ```shell go test ./... ``` ### Contributing documentation The Atlas documentation website is generated from the project's main [GitHub repo](https://github.com/ariga/atlas). Follow this short guide to contribute documentation improvements and additions: #### Setting Up 1. [Locally fork and clone](https://docs.github.com/en/github/getting-started-with-github/quickstart/fork-a-repo) the [repository](https://github.com/ariga/atlas). 2. The documentation site uses [Docusaurus](https://docusaurus.io/). To run it you will need [Node.js installed](https://nodejs.org/en/). 3. Install the dependencies: ```shell cd doc/website && npm install ``` 4. Run the website in development mode: ```shell cd doc/website && npm start ``` 5. Open you browser at [http://localhost:3000](http://localhost:3000). #### General Guidelines * Documentation files are located in `doc/md`, they are [Markdown-formatted](https://en.wikipedia.org/wiki/Markdown) with "front-matter" style annotations at the top. [Read more](https://docusaurus.io/docs/docs-introduction) about Docusaurus's document format. * Atlas uses [Golang CommitMessage](https://github.com/golang/go/wiki/CommitMessage) formats to keep the repository's history nice and readable. As such, please use a commit message such as: ```text doc/md: adding a guide on contribution of docs to atlas ``` #### Adding New Documents 1. Add a new Markdown file in the `doc/md` directory, for example `doc/md/writing-docs.md`. 2. The file should be formatted as such: ```markdown --- id: writing-docs title: Writing Docs --- ... ``` Where `id` should be a unique identifier for the document, and should be the same as the filename without the `.md` suffix, and `title` is the title of the document as it will appear in the page itself and any navigation element on the site. 3. If you want the page to appear in the documentation website's sidebar, add a `doc` block to `website/sidebars.js`, for example: ```diff { type: 'doc', id: 'writing-docs', }, + { + type: 'doc', + id: 'contributing', + }, ```atlas-0.7.2/doc/md/declarative/000077500000000000000000000000001431455511600162615ustar00rootroot00000000000000atlas-0.7.2/doc/md/declarative/apply.md000066400000000000000000000040001431455511600177220ustar00rootroot00000000000000--- id: apply slug: /declarative/apply title: Declarative schema migrations --- With Atlas, users do not need to plan database schema changes themselves. Instead of figuring out the correct SQL statements to get their database to the desired state, Atlas supports a kind of workflow that we call _declarative schema migration_. With declarative schema migrations the user provides a connection string to the target database and the desired schema and Atlas does all of the planning. [Read more about declarative workflows](/concepts/declarative-vs-versioned) ### Auto-approval Before executing the migration against the target database, Atlas will print the SQL statements that it is going to run and prompt the user for approval. Users that wish to automatically approve may run the `schema apply` command with the `--auto-approve` flag. ### Dry-runs In order to skip the execution of the SQL queries against the target database, users may provide the `--dry-run` flag. When invoked with this flag, Atlas will connect to the target database, inspect its current state, calculate the diff between the provided desired schema and print out a series of SQL statements to reconcile any gaps between the inspected and desired schemas. ### Dev-database When storing schema definitions, many database engines perform some form of normalization. That is, despite us providing a specific definition of some aspect of the schema, the database will store it in another, equivalent form. This means in certain situations it may appear to Atlas as if some diff exists between the desired and inspected schemas, whereas in reality there is none. To overcome these situations, users may use the `--dev-url` flag to provide Atlas with a connection string to a _Dev-Database_. This database is used to normalize the schema prior to planning migrations and for simulating changes to ensure their applicability before execution. [Read more about Dev-Databases](/concepts/dev-database) ### Reference [CLI Command Reference](/cli-reference#atlas-schema-apply)atlas-0.7.2/doc/md/declarative/diff.mdx000066400000000000000000000053071431455511600177100ustar00rootroot00000000000000--- id: diff slug: /declarative/diff title: Declarative schema migrations --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; It is sometimes useful to be able to calculate the diff between two databases. For instance, as you are developing you may want to calculate how to move from an existing database to some other state that you are interested in. Alternatively, you may be diagnosing some issue and want to verify there is no difference between a local copy of a schema and a remote one. To accommodate these types of use-cases, Atlas supports the `schema diff` command. This command connects to two given databases, inspects them, calculates the difference in their schemas, and prints a plan of SQL statements to migrate the "from" database to the schema of the "to" database. ### Flags * `--from` (required) - the [URL](/concepts/url) of the database to calculate the diff from. * `--to` (required) - the [URL](/concepts/url) of the database to calculate the diff to. ### Examples Diff to a remote database: ``` atlas schema diff --from "mysql://user:pass@localhost:3306" --to "mysql://user:pass@remote:3306" ``` Diff to a remote schema ``` atlas schema diff --from "mysql://user:pass@localhost:3306/schema" --to "mysql://user:pass@remote:3306/schema" ``` Diff to another schema in the same database ``` atlas schema diff --from "mysql://user:pass@localhost:3306/schema" --to "mysql://user:pass@localhost:3306/other" ``` Diff to a remote database: ``` atlas schema diff --from "maria://user:pass@localhost:3306" --to "maria://user:pass@remote:3306" ``` Diff to a remote schema ``` atlas schema diff --from "maria://user:pass@localhost:3306/schema" --to "maria://user:pass@remote:3306/schema" ``` Diff to another schema in the same database ``` atlas schema diff --from "maria://user:pass@localhost:3306/schema" --to "maria://user:pass@localhost:3306/other" ``` Diff to a remote database: ``` atlas schema diff --from "postgres://localhost:5432/database" --to "postgres://remote:5432/database" ``` Diff to a remote schema: ``` atlas schema diff --from "postgres://localhost:5432/database?search_path=schema" --to "postgres://remote:5432/database?search_path=schema" ``` Diff to another schema in the same database ``` atlas schema diff --from "postgres://localhost:5432/database?search_path=schema" --to "postgres://localhost:5432/database?search_path=other" ``` ### Reference [CLI Command Reference](/cli-reference#atlas-schema-diff)atlas-0.7.2/doc/md/declarative/inspect.mdx000066400000000000000000000102501431455511600204360ustar00rootroot00000000000000--- id: inspect slug: /declarative/inspect title: Inspecting existing schemas with Atlas --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; ### Automatic Schema Inspection Many projects begin with an existing database that users wish to start managing with Atlas. In this case, instead of having developers learn the [Atlas Language](/atlas-schema/sql-resources) and reverse engineer a schema definition file that precisely describes the existing database, Atlas supports _automatic schema inspection_. With automatic schema inspection, users simply provide Atlas with a connection string to their target database and Atlas prints out a schema definition file in the Atlas language that they can use as the starting point for working with this database. ### Flags When using `schema inspect` to inspect an existing database, users may supply multiple parameters: * `--url` (required, `-u` accepted as well) - the [URL](/concepts/url) of database to be inspected. * `--schema` (optional, may be supplied multiple times) - schemas to inspect within the target database. * `--exclude` (optional, may be supplied multiple times) - filter out resources matching the given glob pattern. ### Examples #### Entire Database ``` atlas schema inspect -u "mysql://localhost" atlas schema inspect -u "mysql://user:pass@localhost:3306" ``` ``` atlas schema inspect -u "maria://localhost" atlas schema inspect -u "maria://user:pass@localhost:3306" ``` ``` atlas schema inspect -u "postgres://localhost:5432/database" atlas schema inspect -u "postgres://postgres:pass@0.0.0.0:5432/database?sslmode=disable" ``` ``` atlas schema inspect -u "sqlite://file.db" atlas schema inspect -u "sqlite://file?cache=shared&mode=memory" ``` #### Single Schema ``` atlas schema inspect -u "mysql://localhost/schema" atlas schema inspect -u "mysql://user:pass@localhost:3306/schema" ``` ``` atlas schema inspect -u "maria://localhost/schema" atlas schema inspect -u "maria://user:pass@localhost:3306/schema" ``` ``` atlas schema inspect -u "postgres://localhost:5432/database?search_path=schema" atlas schema inspect -u "postgres://postgres:pass@0.0.0.0:5432/database?sslmode=disable&sslmode=disable" ``` #### Multiple Schemas ``` atlas schema inspect -u "mysql://localhost" --schema schema1 --schema schema2 atlas schema inspect -u "mysql://user:pass@localhost:3306" -s schema1,schema2 ``` ``` atlas schema inspect -u "maria://localhost" --schema schema1 --schema schema2 atlas schema inspect -u "maria://user:pass@localhost:3306" -s schema1,schema2 ``` ``` atlas schema inspect -u "postgres://localhost:5432/database" --schema schema1 --schema schema2 atlas schema inspect -u "postgres://postgres:pass@0.0.0.0:5432/database?sslmode=disable" -s schema1,schema2 ``` #### Exclude Schemas ``` atlas schema inspect -u "mysql://localhost" --exclude "internal" atlas schema inspect -u "mysql://localhost" --exclude "schema_*" ``` #### Exclude Tables ``` atlas schema inspect -u "mysql://localhost" --exclude "*.prefix_*" atlas schema inspect -u "mysql://localhost" --exclude "schema.table" ``` #### Exclude Table Resources ``` atlas schema inspect -u "mysql://localhost" --exclude "*.*.prefix_*" atlas schema inspect -u "mysql://localhost" --exclude "public.*.c1" ``` ### Reference [CLI Command Reference](/cli-reference#atlas-schema-inspect) atlas-0.7.2/doc/md/deployment/000077500000000000000000000000001431455511600161565ustar00rootroot00000000000000atlas-0.7.2/doc/md/deployment/intro.md000066400000000000000000000102671431455511600176410ustar00rootroot00000000000000--- id: deployment slug: /deployment/intro title: Deployment --- ## Introduction :::info As our team is working on completely rewriting The Management UI, support for it has been temporarily removed in the most recent version of Atlas. To explore the UI in previous versions, download [v0.3.7](https://github.com/ariga/atlas/releases/tag/v0.3.7) or earlier. ::: Teams using Atlas to manage their databases can benefit from deploying the Atlas Management UI to their cloud environment. The Management UI can be used to gain visibility of your team's database schemas as well as provide an intuitive interface for planning and running database migrations using Atlas's core engine. In addition, the Management UI automatically tracks and audits all changes using the "Activity & History" feature. The Atlas Management UI is designed to be a long-running process. It is therefore recommended running it against a persistent database that will make sure your team's schemas, database credentials, migration history and more are not lost in case you need to restart the service. Atlas uses [tink](https://developers.google.com/tink), a battle-tested encryption library created at Google, to encrypt all sensitive information. Following recommendations from the developers of tink, Atlas uses [AEAD](https://developers.google.com/tink/aead?hl=en) encryption with an AES256_GCM type key. In this section we will review two deployment and usage options: 1. Deploying on a VM/EC2 instance with persistent storage. 2. Deploying using an official Atlas Docker container. ### Persistent Storage When serving Atlas on a VM/EC2, you will need to decide on which persistent storage you would like Atlas to store the configuration data. For now, you can choose any of the supported databases: 1. SQLite (file) (in-memory is also supported but does not require encryption) 2. MySQL 3. TiDB 4. MariaDB 5. PostgresDB An example for serving Atlas with MySQL persistent storage: ``` atlas serve --storage "mysql://root:pass@localhost:3306/atlas" ``` On your first run, Atlas will generate an encryption key and store it under ```$HOME/.atlas/keyset.json```. This (private) file should not be deleted, otherwise Atlas will not be able to restore your configuration data. ### Docker Atlas has an official docker that is updated with the latest and tagged versions, which can be found on [Docker Hub](https://hub.docker.com/r/arigaio/atlas). You can use this image to run the CLI and to serve the Atlas UI. To run Atlas in served and persisted mode inside a container: ``` docker run -v $HOME/.atlas/keyset.json:/root/.atlas/keyset.json -p 5800:5800 arigaio/atlas:latest serve --addr :5800 --storage "mysql://root:pass@tcp(host.docker.internal:3306)/atlas" ``` Let's review this command step by step: 1. ```docker run``` - running a command. 2. ```-v $HOME/.atlas/keyset.json:/root/.atlas/keyset.json``` binds a secret key from a persisted host into the docker container. If you don't have a key, use an empty file and Atlas will generate one for you. 3. ``` -p 5800:5800``` binds an exposed serving port for connecting with a web client. 4. ```5800:5800 arigaio/atlas:latest``` use the latest atlas image, for a versioned release use a specific tag such as ```arigaio/atlas:0.3.6```. 5. ```serve --addr :5800 --storage "mysql://root:pass@tcp(host.docker.internal:3306)/atlas"``` serve Atlas on port 5800 with a persistent MySQL Database. ### Encryption On its first run, Atlas generates a keyset.json file under `$HOME/.atlas/` containing an encryption keyset for you. Do not lose this file! Without this file you cannot later use any database credentials that save to Atlas. If you want to generate this key yourself, you can [install Tinkey](https://developers.google.com/tink/install-tinkey), Tink's official CLI, and use it to generate a keyset: ```shell brew tap google/tink https://github.com/google/tink brew install tinkey tinkey create-keyset --key-template AES256_GCM --out ~/.atlas/keyset.json ``` Alternatively, you can use [rotemtam/tinkey](https://hub.docker.com/r/rotemtam/tinkey), an unofficial Docker image that wraps the official binary distribution: ```shell docker run --rm rotemtam/tinkey create-keyset --key-template AES256_GCM > ~/.atlas/keyset.json ``` atlas-0.7.2/doc/md/getting-started/000077500000000000000000000000001431455511600171035ustar00rootroot00000000000000atlas-0.7.2/doc/md/getting-started/02-inspecting.mdx000066400000000000000000000135151431455511600222040ustar00rootroot00000000000000--- id: getting-started-inspection title: Inspecting Schemas slug: /cli/getting-started/inspection --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; Atlas features a Data Definition Language (DDL) that has an [HCL-syntax](https://github.com/hashicorp/hcl) for defining the desired state of database schemas. In this section we will learn how to use the Atlas CLI to inspect an existing database and write it's schema in HCL to a file. Inspection is done via the `atlas schema inspect` command. To learn about its parameters, run: ```shell atlas schema inspect --help ``` ### Inspecting our database To inspect our locally-running MySQL instance from the [previous](/getting-started/) section, use the `-u` flag and write output to a file named `schema.hcl`: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > schema.hcl ``` Then, view the contents of the file: ```hcl title="schema.hcl" schema "example" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } ``` ```shell atlas schema inspect -u "maria://root:pass@localhost:3306/example" > schema.hcl ``` Then, view the contents of the file: ```hcl title="schema.hcl" schema "example" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } ``` ```shell atlas schema inspect -u "postgres://postgres:pass@localhost:5432/example?sslmode=disable" > schema.hcl ``` Then, view the contents of the file: ```hcl title="schema.hcl" schema "public" { } ``` ```shell atlas schema inspect -u "sqlite://file.db" > schema.hcl ``` Then, view the contents of the file: ```hcl title="schema.hcl" schema "main" { } ``` As you can see, Atlas inspected our (empty) database and wrote an Atlas HCL document containing only a [Schema](../atlas-schema/sql.mdx#schema) resource. Next, let's create some tables in our SQL database and see how they are reflected in the inspected Atlas HCL document. ### Modifying our database schema manually In your database command-line prompt, create the tables: ```sql CREATE table users ( id int PRIMARY KEY, name varchar(100) ); CREATE TABLE blog_posts ( id int PRIMARY KEY, title varchar(100), body text, author_id int, FOREIGN KEY (author_id) REFERENCES users(id) ); ``` Observe that the tables are created successfully: ```text Query OK, 0 rows affected (0.02 sec) ``` Our schema represents a highly simplified blogging system with a `users` table for the authors and a `blog_posts` table for the contents: ![Blog ERD](https://atlasgo.io/uploads/images/blog-erd.png) ### Inspecting table schemas with Atlas CLI Next, let's re-run our inspection command: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > schema.hcl ``` ```shell atlas schema inspect -u "maria://root:pass@localhost:3306/example" > schema.hcl ``` ```shell atlas schema inspect -u "postgres://postgres:pass@localhost:5432/example?sslmode=disable" > schema.hcl ``` ```shell atlas schema inspect -u "sqlite://file.db" > schema.hcl ``` Browse through the updated contents of the file, it contains 3 blocks representing our schema as before, and two new blocks representing the `users` and `blog_posts` tables. Consider the following block: ```hcl table "users" { schema = schema.example column "id" { null = false type = int } column "name" { null = true type = varchar(100) } primary_key { columns = [column.id] } } ``` This block represents a [Table](../atlas-schema/sql.mdx#table) resource with `id`, and `name` columns. The `schema` field references the `example` schema that is defined elsewhere in this document. In addition, the `primary_key` sub-block defines the `id` column as the primary key for the table. In SQL databases, columns usually have a type attribute that defines the kind of data that can be stored in that column. Atlas strives to mimic the syntax of the database that the user is working against. In this case, the type for the `id` column is `int`, and `varchar(100)` for the `name` column. To see the full list of supported types, [click here](../atlas-schema/sql-types.md). Next, consider this block: ```hcl table "blog_posts" { schema = schema.example column "id" { null = false type = int } column "title" { null = true type = varchar(100) } column "body" { null = true type = text } column "author_id" { null = true type = int } primary_key { columns = [column.id] } foreign_key "blog_posts_ibfk_1" { columns = [column.author_id] ref_columns = [table.users.column.id] on_update = NO_ACTION on_delete = NO_ACTION } index "author_id" { unique = false columns = [column.author_id] } } ``` This block represents the `blog_posts` table. In addition to the elements we saw in the `users` table, here we can find a [Foreign Key](../atlas-schema/sql.mdx#foreign-key) block, declaring that the `author_id` column references the `id` column on the `users` table. To learn more about the resource types that are available to describe SQL schemas, read the [SQL Syntax](../atlas-schema/sql.mdx) documentation. In the next section, we will see how we can modify our database's schema by applying a modified Atlas HCL file using the Atlas CLI. atlas-0.7.2/doc/md/getting-started/03-applying.mdx000066400000000000000000000133331431455511600216630ustar00rootroot00000000000000--- id: getting-started-apply title: Applying Schemas slug: /cli/getting-started/applying-schemas --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; ### Declarative migrations In the previous section, we learned how to inspect an existing database and write its schema as an Atlas DDL HCL file. In this section, we will learn how to use the Atlas CLI to modify a database's schema. To do this, we will use Atlas's `atlas schema apply` command which takes a _declarative_ approach, that is, we define the _desired_ end schema, and Atlas figures out a safe-way to alter the database to get there. Let's start by viewing the help text for the `apply` command: ```shell atlas schema apply --help ``` You can see that similar to the `inspect` command, the `-u` flag is used to define the URL to connect to the database, and an additional flag `-f` specifies the path to the file containing the desired schema. ### Adding new tables to our database Let's modify our simplified blogging platform schema from the previous step by adding a third table, `categories`. Each table will have an `id` and a `name`. In addition, we will create an association table `post_categories` which creates a many-to-many relationship between blog posts and categories: ![Blog ERD](https://atlasgo.io/uploads/images/blog-erd-2.png) First, let's store the existing schema in a file named `schema.hcl`: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > schema.hcl ``` ```shell atlas schema inspect -u "maria://root:pass@localhost:3306/example" > schema.hcl ``` ```shell atlas schema inspect -u "postgres://postgres:pass@localhost:5432/example?sslmode=disable" > schema.hcl ``` ```shell atlas schema inspect -u "sqlite://file.db" > schema.hcl ``` Next, add the following table definition to the file: ```hcl table "categories" { schema = schema.example column "id" { null = false type = int } column "name" { null = true type = varchar(100) } primary_key { columns = [column.id] } } ``` To add this table to our database, let's use the `atlas schema apply` command: ```shell atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f schema.hcl ``` ```shell atlas schema apply -u "maria://root:pass@localhost:3306/example" -f schema.hcl ``` ```shell atlas schema apply -u "postgres://postgres:pass@localhost:5432/example?sslmode=disable" -f schema.hcl ``` ```shell atlas schema apply -u "sqlite://file.db" -f schema.hcl ``` Atlas plans a migration (schema change) for us and prompts us to approve it: ```text -- Planned Changes: -- Create "categories" table CREATE TABLE `example`.`categories` (`id` int NOT NULL, `name` varchar(100) NULL, PRIMARY KEY (`id`)) Use the arrow keys to navigate: ↓ ↑ → ← ? Are you sure?: ▸ Apply Abort ``` To apply the migration, press `ENTER`, and voila! ```text ✔ Apply ``` To verify that our new table was created, open the database command line tool from previous step and run: ```text mysql> show create table categories; +------------+------------------------------------------------------+ | Table | Create Table | +------------+------------------------------------------------------+ | categories | CREATE TABLE `categories` ( `id` int NOT NULL, `name` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci | +------------+------------------------------------------------------ 1 row in set (0.01 sec) ``` Amazing! Our new table was created. Next, let's define our association table, add the following block to our `schema.hcl` file: ```hcl table "post_categories" { schema = schema.example column "post_id" { type = int } column "category_id" { type = int } foreign_key "post_category_post" { columns = [column.post_id] ref_columns = [table.blog_posts.column.id] } foreign_key "post_category_category" { columns = [column.category_id] ref_columns = [table.categories.column.id] } } ``` This block defines the `post_categories` table with two columns `post_id` and `category_id`. In addition, two foreign-keys are created referencing the respective columns on the `blog_posts` and `categories` tables. Let's try to apply the schema again, this time with the updated schema: ```text atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f schema.hcl -- Planned Changes: -- Create "post_categories" table CREATE TABLE `example`.`post_categories` (`post_id` int NOT NULL, `category_id` int NOT NULL, CONSTRAINT `post_category_post` FOREIGN KEY (`post_id`) REFERENCES `example`.`blog_posts` (`id`), CONSTRAINT `post_category_category` FOREIGN KEY (`category_id`) REFERENCES `example`.`categories` (`id`)) ✔ Apply ``` ### Conclusion In this section, we've seen how to use the `atlas schema apply` command to migrate the schema of an existing database to our desired state. atlas-0.7.2/doc/md/getting-started/getting-started.mdx000066400000000000000000000214111431455511600227210ustar00rootroot00000000000000--- id: getting-started title: Quick Introduction sidebar_label: Quick Introduction --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import Discord from '../../website/src/assets/icons/discord-white.svg' Atlas CLI is an open-source tool designed to help software engineers, DBAs and DevOps practitioners to manage their database schemas. Atlas users can use the [Atlas DDL](../atlas-schema/sql-resources) (data-definition language) to describe the desired database schema and use the command-line tool to plan and apply the migrations to their systems. ### Installation Get the latest release with [Homebrew](https://brew.sh/): ```shell brew install ariga/tap/atlas ``` Download latest release. ```shell curl -LO https://release.ariga.io/atlas/atlas-darwin-amd64-latest ``` Make the atlas binary executable. ```shell chmod +x ./atlas-darwin-amd64-latest ``` Move the atlas binary to a file location on your system PATH. ```shell sudo mv ./atlas-darwin-amd64-latest /usr/local/bin/atlas ``` ```shell sudo chown root: /usr/local/bin/atlas ``` Download latest release. ```shell curl -LO https://release.ariga.io/atlas/atlas-linux-amd64-latest ``` Move the atlas binary to a file location on your system PATH. ```shell sudo install -o root -g root -m 0755 ./atlas-linux-amd64-latest /usr/local/bin/atlas ``` Download the [latest release](https://release.ariga.io/atlas/atlas-windows-amd64-latest.exe) and move the atlas binary to a file location on your system PATH. The binaries distributed in official releases are released under the [Ariga End User License](https://ariga.io/legal/atlas/eula). If you would like to build Atlas from source follow the instructions [here](https://atlasgo.io/cli-reference#building-from-source). ### Start a local database container For the purpose of this guide, we will start a local Docker container running MySQL. ```shell docker run --name atlas-db -p 3306:3306 -e MYSQL_ROOT_PASSWORD=pass -e MYSQL_DATABASE=example mysql ``` For this example, we will start off with the following database: ```sql CREATE table users ( id int PRIMARY KEY, name varchar(100) ); ``` Our schema represents a `users` table, where each user has an ID and a name. ### Inspecting our database Atlas features a Data Definition Language (DDL) that has an [HCL-syntax](https://github.com/hashicorp/hcl) for defining the desired state of database schemas. Inspection is done via the `atlas schema inspect` command. To inspect our locally-running MySQL instance, use the `-u` flag and write the output to a file named `schema.hcl`: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > schema.hcl ``` Open the `schema.hcl` file to view the Atlas schema that describes our database. ```hcl table "users" { schema = schema.example column "id" { null = false type = int } column "name" { null = true type = varchar(100) } primary_key { columns = [column.id] } } ``` This block represents a [table](../atlas-schema/sql.mdx#table) resource with `id`, and `name` columns. The `schema` field references the `example` schema that is defined elsewhere in this document. In addition, the `primary_key` sub-block defines the `id` column as the primary key for the table. Atlas strives to mimic the syntax of the database that the user is working against. In this case, the type for the `id` column is `int`, and `varchar(100)` for the `name` column. Now, consider we want to add a `blog_posts` table and have our schema represent a simplified blogging system. ![Blog ERD](https://atlasgo.io/uploads/images/blog-erd.png) Let's add the following to `schema.hcl`: ```hcl table "blog_posts" { schema = schema.example column "id" { null = false type = int } column "title" { null = true type = varchar(100) } column "body" { null = true type = text } column "author_id" { null = true type = int } primary_key { columns = [column.id] } foreign_key "author_fk" { columns = [column.author_id] ref_columns = [table.users.column.id] } index "author_id" { unique = false columns = [column.author_id] } } ``` In addition to the elements we saw in the `users` table, here we can find a [foreign key](../atlas-schema/sql.mdx#foreign-key) block, declaring that the `author_id` column references the `id` column on the `users` table. Now, let's apply these changes by running a migration. In Atlas, migrations can be applied in two types of workflows: _declarative_ and _versioned_. ### Declarative Migrations The declarative approach requires the user to define the _desired_ end schema, and Atlas provides a safe way to alter the database to get there. Let's see this in action. Continuing the example, in order to apply the changes to our database we will run the `apply` command: ```shell atlas schema apply \ -u "mysql://root:pass@localhost:3306/example" \ -f schema.hcl ``` Atlas presents the plan it created by displaying the SQL statements. For example, for a MySQL database we will see the following: ```console -- Planned Changes: -- Create "blog_posts" table // highlight-next-line-info CREATE TABLE `example`.`blog_posts` (`id` int NOT NULL, `title` varchar(100) NULL, `body` text NULL, `author_id` int NULL, PRIMARY KEY (`id`), INDEX `author_id` (`author_id`), CONSTRAINT `author_fk` FOREIGN KEY (`author_id`) REFERENCES `example`.`users` (`id`)) Use the arrow keys to navigate: ↓ ↑ → ← ? Are you sure?: ▸ Apply Abort ``` Apply the changes, and that's it! You have successfully run a declarative migration. You can reinspect the database by running the `inspect` command again to ensure that the changes have been made to the schema. ### Versioned Migrations Alternatively, the versioned migration workflow, sometimes called "change-based migrations", allows each change to the database schema to be checked into source control and reviewed during code-review. Users can still benefit from Atlas intelligently planning migrations for them, however they are not automatically applied. To start, we will calculate the difference between the _desired_ and _current_ state of the database by running the `atlas migrate diff` command. To run this command, we need to provide the necessary parameters: * `--dir` the URL to the migration directory, by default it is `file://migrations`. * `--to` the URL of the desired state, an HCL file or a database connection. * `--dev-url` a URL to a [Dev Database](/concepts/dev-database) that will be used to compute the diff. ```shell atlas migrate diff create_blog_posts \ --dir="file://migrations" \ --to="file://schema.hcl" \ --dev-url="mysql://root:pass@:3306/test" ``` Run `ls migrations`, and you will notice that Atlas has created two files: ```sql -- create "blog_posts" table CREATE TABLE `example`.`blog_posts` (`id` int NOT NULL, `title` varchar(100) NULL, `body` text NULL, `author_id` int NULL, PRIMARY KEY (`id`), INDEX `author_id` (`author_id`), CONSTRAINT `author_fk` FOREIGN KEY (`author_id`) REFERENCES `example`.`users` (`id`)) ``` In addition to the migration directory, Atlas maintains a file name `atlas.sum` which is used to ensure the integrity of the migration directory and force developers to deal with situations where migration order or contents was modified after the fact. ```text h1:t1fEP1rSsGf1gYrYCjsGyEyuM0cnhATlq93B7h8uXxY= 20220811074144_create_blog_posts.sql h1:liZcCBbAn/HyBTqBAEVar9fJNKPTb2Eq+rEKZeCFC9M= ``` Now that we have our migration files ready, you can use your favorite migration tool to apply the changes generated by Atlas. ### Next Steps In this short tutorial we learned how to use Atlas to inspect databases, as well as use declarative and versioned migrations. Read more about the use-cases for the two approaches [here](/concepts/declarative-vs-versioned) to help you decide which workflow works best for you. :::info Need help getting started? We have a super friendly [#getting-started](https://discord.gg/8mvDUG22) channel on our community chat on Discord. For web-based, free, and fun (GIFs included) support: Join our Discord server :::atlas-0.7.2/doc/md/guides/000077500000000000000000000000001431455511600152565ustar00rootroot00000000000000atlas-0.7.2/doc/md/guides/ddl.md000066400000000000000000000142751431455511600163540ustar00rootroot00000000000000--- id: ddl slug: /guides/ddl title: Data Definition Language --- ## Introduction In the core of the Atlas project resides the Atlas Data Definition Language (DDL). The DDL is designed to capture an organization's data topologies and other aspects of its data infrastructure. In the design of the DDL, we put an emphasis on extensibility: As data topologies can contain a set of diverse data technologies, the language is designed to be modular with different extensions extending the types of resources and relationships that can be described using it. ## HCL The Atlas DDL currently supports an HCL syntax we call Atlas HCL. It is similar to other HCL based languages such as Terraform. ### Resources Documents written in the Atlas DDL usually describe _resources_. Resources are described as HCL blocks and have a type, and optionally a name. Consider this block: ```hcl user "rotemtam" { // ... } ``` This block describes a resource of type `user` with a name of `rotemtam`. ### Qualifiers In some cases, a document may contain multiple resources with the same name. To differentiate between the different resources, the Atlas DDL supports _qualifiers_, an additional label preceding the resource name: ```hcl person "dr" "jekyll" { } ``` This block describes a resource of type `person`, with a name of "jekyll" which is qualified by "dr". ### Attributes Resources can have named attributes with primitive types (string, boolean, integer or float) or lists of primitive values. For example: ```hcl user "rotemtam" { email = "rotem@atlasgo.io" active = true credits = 42 tags = [ "data", "infrastructure", "hcl" ] } ``` ### Children Resources can have child resources. For example: ```hcl user "rotemtam" { .. project "atlas" { started = 2021 metadata { category = "data" } } } ``` ### References Attributes can hold references to other resources. The address of any resource is `.`, recursively. Suppose we have this block describing some HTTP service: ```hcl service "todolist" { port "http" { number = 8080 } } ``` If we want to reference the child "port" resource of the service we can use `service.todolist.port.http`: ```hcl server "production" { endpoint "todo" { path = "/todo" service_port = service.todolist.port.http } } ``` Attributes can hold references to other attributes. When a document is parsed the reference is replaced with the referenced value. The address of any attribute is `..`. ```hcl group "seinfeld" { id = 1 } show "friends" { id = 2 } playlist "comedy" { show_ids = [ show.seinfeld.id // will equal 1 show.friends.id, // will equal 2 ] } ``` ### Reading with Go To read an Atlas HCL document with Go use the `EvalBytes` ([doc](https://pkg.go.dev/ariga.io/atlas/schemahcl#EvalBytes)) function from the `schemahcl` package: ```go func ExampleUnmarshal() { f := ` show "seinfeld" { writer "jerry" { full_name = "Jerry Seinfeld" } writer "larry" { full_name = "Larry David" } }` type ( Writer struct { ID string `spec:",name"` FullName string `spec:"full_name"` } Show struct { Name string `spec:",name"` Writers []*Writer `spec:"writer"` } ) var test struct { Shows []*Show `spec:"show"` } err := EvalBytes([]byte(f), &test, nil) if err != nil { panic(err) } seinfeld := test.Shows[0] fmt.Printf("the show %q has %d writers.", seinfeld.Name, len(seinfeld.Writers)) // Output: the show "seinfeld" has 2 writers. } ``` This function takes a byte-slice, an empty interface as arguments and a map of strings as input. The empty interface should be a pointer to a struct into which the `EvalBytes` function will read the values. The struct fields must be annotated with `spec` tags that define the mapping from HCL to the Go type. This mapping is discussed in the section about [Extensions](#extensions). The final map argument may contain [Input Values](/atlas-schema/input.md) to be passed as parameters of the evaluation. ### Writing with Go To encode a Go struct back into HCL, use the `schemahcl.Marshal` ([doc](https://pkg.go.dev/ariga.io/atlas/schemahcl#Marshal)) function: ```go func ExampleMarshal() { type ( Point struct { ID string `spec:",name"` X int `spec:"x"` Y int `spec:"y"` } ) var test = struct { Points []*Point `spec:"point"` }{ Points: []*Point{ {ID: "start", X: 0, Y: 0}, {ID: "end", X: 1, Y: 1}, }, } b, err := Marshal(&test) if err != nil { panic(err) } fmt.Println(string(b)) // Output: point "start" { // x = 0 // y = 0 // } // point "end" { // x = 1 // y = 1 // } } ``` ## Extensions Applications working with the Atlas DDL are expected to extend the Atlas language by defining their own type structs that objects can be handled in a type-safe way. The mapping between the extension struct fields and the configuration syntax is done by placing tags on the extension struct field using the `spec` key in the tag. To specify that a field should be mapped to the corresponding resource's name specify ",name" to the tag value. For example, ```go type Point struct { ID string `spec:",name"` X int `spec:"x"` Y int `spec:"y"` } ``` Would be able to capture a Resource defined in Atlas HCL as: ```hcl point "origin" { x = 100 y = 200 } ``` To operate correctly, struct extensions should be registered using the `schemahcl.Register` function: ```go schemahcl.Register("point", &Point{}) ``` Extension structs may implement the [Remainer](https://pkg.go.dev/ariga.io/atlas/schemahcl#Remainer) interface if they wish to store any attributes and children that are not matched by their tagged fields. As a convenience the `schemahcl` package exports a `DefaultExtension` type that can be embedded to support this behavior. ### Qualifiers In cases where resources may need to be qualified, a field of the target struct can be annotated with the `,qualifier` tag. For instance this struct: ```go type Person struct { Name string `spec:",name"` Title string `spec:",qualifier"` } ``` Can capture a qualified HCL resource such as: ```hcl person "dr" "jekyll" { } ``` atlas-0.7.2/doc/md/guides/guides.md000066400000000000000000000005361431455511600170640ustar00rootroot00000000000000--- title: Guides id: guides slug: /guides --- # Guides Welcome to the Atlas Guides. ## Databases ### MySQL [Generated Columns](mysql/generated-columns.md) ### PostgreSQL [Serial Type Columns](postgres/serial-columns.md) [Partial Indexes](postgres/partial-indexes.md) ## Migration tools [`golang-migrate`](migration-tools/golang-migrate.md)atlas-0.7.2/doc/md/guides/migration-tools/000077500000000000000000000000001431455511600204055ustar00rootroot00000000000000atlas-0.7.2/doc/md/guides/migration-tools/golang-migrate.md000066400000000000000000000174131431455511600236320ustar00rootroot00000000000000--- id: golang-migrate title: Automatic migration planning for golang-migrate slug: /guides/migration-tools/golang-migrate --- ## TL;DR * [`golang-migrate`](https://github.com/golang-migrate/migrate) is a popular database migration CLI tool and Go library that's widely used in the Go community. * [Atlas](https://atlasgo.io) is an open-source tool for inspecting, planning, linting and executing schema changes to your database. * Developers using `golang-migrate` can use Atlas to automatically plan schema migrations for them, based on the desired state of their schema instead of crafting them by hand. ## Automatic migration planning for golang-migrate Atlas can automatically plan database schema migrations for developers using `golang-migrate`. Atlas plans migrations by calculating the diff between the _current_ state of the database, and it's _desired_ state. For golang-migrate users, the current state can be thought of as the sum of all _up_ migrations in a migration directory. The desired state can be provided to Atlas via an Atlas schema [HCL file](https://atlasgo.io/atlas-schema/sql-resources) or as a connection string to a database that contains the desired schema. In this guide, we will show how Atlas can automatically plan schema migrations for golang-migrate users. ## Prerequisites * An existing project with a `golang-migrate` migrations directory. * Docker * Atlas ([installation guide](https://atlasgo.io/getting-started/#installation)) ## Dev database To plan a migration from the current to the desired state, Atlas uses a [Dev Database](/concepts/dev-database), which is usually provided by a locally running container with an empty database of the type you work with (such as MySQL or PostgreSQL). To spin up a local MySQL database that will be used as a dev-database in our example, run: ```text docker run --rm --name atlas-db-dev -d -p 3306:3306 -e MYSQL_DATABASE=dev -e MYSQL_ROOT_PASSWORD=pass mysql:8 ``` As reference for the next steps, the URL for the Dev Database will be: ```text mysql://root:pass@localhost:3306/dev ``` ## Migration directory integrity To ensure migration history is correct while multiple developers work on the same project in parallel Atlas enforces [migration directory integrity](/concepts/migration-directory-integrity) using a file name `atlas.sum`. For this example, we will assume your migrations are stored in a directory named `migrations` in your current working directory: To generate this file run: ```text atlas migrate hash --dir file://migrations ``` Observe a new file named `atlas.sum` was created in your migrations directory which contains a hash sum of each file in your directory as well as a total sum. For example: ```text h1:y6Zf8kAu98N0jAR+yemZ7zT91nUyECLWzxxR7GHJIAg= 1_init.down.sql h1:0zpQpoUZcacEatOD+DYXgYD1XvfWUC7EM+agXIRzKRU= 1_init.up.sql h1:kOM+4u8UsYvvjQMFYAo2hDv5rbx3Mdbh9GvhmbpS0Ig= ``` ## Convert you migrations directory to an Atlas schema file With Atlas, users can describe their desired schema using an [HCL-based configuration language](https://atlasgo.io/atlas-schema/sql-resources). As a new user coming from an existing project, you may not want to learn this new language and prefer that Atlas will generate a schema file that reflects your existing schema. :::info If you want to read the desired state from a database instead of an Atlas HCL schema file, have a look [here](/guides/migration-tools/golang-migrate#alternative-use-an-existing-database-as-the-desired-state). ::: Let's see how you can get your current schema from `golang-migrate` to the Atlas schema format. 1\. Open a MySQL shell inside our running container: ```text docker exec -it atlas-db-dev mysql -ppass ``` 2\. Create a new database named `migrate-current`: ```text CREATE DATABASE `migrate-current` ``` The database is created successfully: ```text Query OK, 1 row affected (0.01 sec) ``` 3\. Close the shell and run `golang-migrate` to run all migrations on our new database: ```text migrate -source file://migrations -database 'mysql://root:pass@tcp(localhost:3306)/migrate-current' up ``` All migrations are executed successfully: ```text 1/u init (35.601678ms) ``` 4\. Next, use Atlas's `schema inspect` command to write a file named `schema.hcl` with an HCL representation of your migration directory. Notice that we exclude the `schema_migrations` table which contains `golang-migrate`'s revision history. ```text atlas schema inspect -u mysql://root:pass@localhost:3306/migrate-current --exclude '*.schema_migrations' > schema.hcl ``` Observe that a new file named `schema.hcl` was created: ```hcl table "users" { schema = schema.migrate-current column "id" { null = false type = int } column "name" { null = true type = varchar(100) } primary_key { columns = [column.id] } } schema "migrate-current" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } ``` ## Plan a new migration Next, let's modify the desired state of our database by modifying the `schema.hcl` file to add a new table `blog_posts` that has a foreign key pointing to the existing `users` table: ```hcl table "blog_posts" { schema = schema.migrate-current column "id" { type = int } column "title" { type = varchar(255) } column "body" { type = text } column "author_id" { type = int } foreign_key "blog_author_fk" { columns = [column.author_id] ref_columns = [table.users.column.id] } } ``` Now, let's use Atlas's `migrate diff` command to plan a migration from the current state as it exists in the migrations directory to the desired state that is defined by the `schema.hcl` file: ```text atlas migrate diff --dir file://migrations --dev-url mysql://root:pass@localhost:3306/dev --to file://schema.hcl --dir-format golang-migrate add_blog_posts ``` Notice that we used the `dir-format` flag to specify that we're using `golang-migrate` as the directory format. Hooray! Two new files were created in the migrations directory: ```text {5-6} . ├── migrations │ ├── 1_init.down.sql │ ├── 1_init.up.sql │ ├── 20220922123326_add_blog_posts.down.sql │ ├── 20220922123326_add_blog_posts.up.sql │ └── atlas.sum └── schema.hcl ``` An up migration: ```sql -- create "blog_posts" table CREATE TABLE `blog_posts` (`id` int NOT NULL, `title` varchar(255) NOT NULL, `body` text NOT NULL, `author_id` int NOT NULL, INDEX `blog_author_fk` (`author_id`), CONSTRAINT `blog_author_fk` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON UPDATE NO ACTION ON DELETE NO ACTION) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` And a down migration: ```sql -- reverse: create "blog_posts" table DROP TABLE `blog_posts`; ``` ## Alternative: use an existing database as the desired state In some cases, it is convenient to use the schema of an existing database as the desired state for your project, instead of defining it in HCL. Atlas's `migrate diff` command can plan a migration from your current migration directory state to an existing schema. Suppose such a database was available at `mysql://root:pass@some.db.io:3306/db`, a migration to the state of that database could be planned by running: ```text atlas migrate diff --dir file://migrations --dev-url mysql://root:pass@localhost:3306/dev --to mysql://root:pass@some.db.io:3306/db --dir-format golang-migrate migration_name ``` ## Conclusion We began our demo by explaining how to set up a dev-database and `atlas.sum` file for your project. Next, we showed how to use Atlas's `schema inspect` command to extract the current desired schema of your project from an existing migration directory. Finally, we showed how to automatically plan a schema migration by modifying the desired schema definition and using Atlas's `migrate diff` command. Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/md/guides/mysql/000077500000000000000000000000001431455511600164235ustar00rootroot00000000000000atlas-0.7.2/doc/md/guides/mysql/generated-columns.md000066400000000000000000000213601431455511600223630ustar00rootroot00000000000000--- id: generated-columns title: Generated Columns in MySQL with Atlas slug: /guides/mysql/generated-columns --- **MySQL** is a popular open-source relational database. **Generated columns** are a feature of MySQL that allows you to define tables with columns whose value is a function of the value stored in other columns; without requiring complex expressions in `SELECT`, `INSERT` or `UPDATE` queries. ## What are Generated Columns? Generated columns are columns that contain values calculated by expressions which can be dependent on other columns; in a similar manner to formulas in a spreadsheet. There are two types of generated columns in MySQL: _Stored_ and _Virtual_. ### Stored Generated Columns Stored generated columns are stored and evaluated when a row is inserted or updated. As a result, stored generated columns use disk space in addition to CPU cycles during the execution of `INSERT` and `UPDATE` statements. ### Virtual Generated Columns Virtual generated columns are not stored, and only evaluated when a row is read _(after BEFORE [triggers](https://dev.mysql.com/doc/refman/5.7/en/trigger-syntax.html))_. As a result, virtual generated columns take no storage at the cost of CPU cycles for `SELECT` statements. ### Limitations of Generated Columns Generated column expressions must be deterministic which means that — given the same input — an expression must always produce the same output. As a result, generated columns can not be used with stored variables, functions, procedures, and subqueries; which could cause the output to be non-deterministic. Following this constraint, generated columns can not be used to generate random values. On the other hand, a generated column may reference any non-generated column _regardless_ of its position within the table row and any other generated column within the same table row, as long as those columns are declared before the generated column. ## When to use Generated Columns? Generated columns should be used whenever you want to create a column with a value that can be directly determined from the values of other columns in the same row. In simpler words, for data that is dependent on other data. This saves the developer from complex application code that is prone to errors on `SELECT`, `INSERT` and `UPDATE` statements. It also ensures that data which must be consistent, stays consistent. **MySQL Syntax for a Generated Column** ```sql column_name data_type [GENERATED ALWAYS] AS (expr) [VIRTUAL | STORED] [NOT NULL | NULL] [UNIQUE [KEY]] [[PRIMARY] KEY] [COMMENT 'string'] ``` ### Using Stored Generated Columns Stored generated columns should be used for data _(in a table)_ that is read more frequently than it is updated. This saves CPU cycles while reading rows _(via `SELECT`)_. Stored generated columns should also be used when you want to use the column in the table primary key or use it as a foreign key constraint. Alternatively, use stored generated columns as a cache for complex conditions that are costly to calculate. #### Example The following example declares a stored generated column in a table that stores the base and height of a triangle in the `base` and `height` column, then computes its area in `area` _(when triangles are inserted or updated)_. ```sql CREATE TABLE triangles ( base DOUBLE, height DOUBLE, area DOUBLE AS (base * height * 1/2) STORED ); ``` ### Using Virtual Generated Columns Virtual generated columns should be used for computed data _(in a table)_ that is updated more frequently than it is read or computed data that is expensive to store on disk _(via `INSERT` or `UPDATE`)_. Since values are calculated on the fly, virtual generated columns are perfect for table columns that will have a new value for every `SELECT` statement. If you use the _InnoDB Storage Engine_, secondary indexes can be defined on virtual columns. #### Example The following example declares a virtual generated column in a table that stores the price and amount of products sold in the `price` and `quantity` column, then computes its `revenue` _(when products are read)_. ```sql CREATE TABLE products ( price DOUBLE, quantity INT, revenue DOUBLE AS (price * quantity) VIRTUAL ); ``` ## Managing Generated Columns is easy with Atlas Managing generated columns and database schemas in MySQL is confusing and error-prone. [Atlas](https://atlasgo.io) is an open-source tool that allows you to manage your database using a simple declarative syntax (similar to Terraform). Instead of creating complex SQL statements that break upon schema migration, we will implement generated columns using Atlas. ### Getting started with Atlas Install the latest version of Atlas using the [Guide to Setting Up Atlas](/cli/getting-started/setting-up). ### Generated Column Syntax in Atlas Use `as` in a column in a table to declare a MySQL generated column. For examples with other databases, read the [Atlas Generated Columns DDL](/atlas-schema/sql.mdx#generated-columns). ```hcl column "name" { type = data_type as { expr = expression type = [STORED | VIRTUAL] } } ``` ### Implementing Stored Generated Columns with Atlas The following example declares a stored generated column in a table that stores the lengths of the sides of right-triangles in the `a` and `b` column, then computes the hypotenuse in `c` _(when triangles are inserted or updated)_. ```hcl table "triangles" { schema = schema.example column "a" { type = numeric } column "b" { type = numeric } column "hypotenuse" { type = numeric as { expr = "SQRT(a * a + b * b)" type = STORED } } } ``` Guarantee the table is created by applying the schema to the database. ``` atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f atlas.hcl ``` Approve the schema migration plan that Atlas creates for you _(if applicable)_. ``` -- Planned Changes: -- Create "triangles" table CREATE TABLE "example"."triangles" ("a" numeric NOT NULL, "b" numeric NOT NULL, "hypotenuse" numeric NOT NULL GENERATED ALWAYS AS (SQRT(a * a + b * b)) STORED) Use the arrow keys to navigate: ↓ ↑ → ← ? Are you sure?: > Apply Abort ``` Insert triangles into the table. ```sql INSERT INTO triangles (a, b) VALUES (1,1); INSERT INTO triangles (a, b) VALUES (3,4); INSERT INTO triangles (a, b) VALUES (6,8); ``` Select all the triangles in the table using `SELECT * FROM triangles` to receive a table with the following output. | a | b | c | | :--- | :--- | :----------------- | | 1 | 1 | 1.4142135623730951 | | 3 | 4 | 5 | | 6 | 8 | 10 | ### Implementing Virtual Generated Columns Columns with Atlas The following example declares a virtual generated column in a TABLE that stores the first and last name of a person, and computes the full name of the person _(when people are selected)_. ```hcl table "people" { schema = schema.example column "first_name" { type = varchar(255) } column "last_name" { type = varchar(255) } column "full_name" { type = varchar(255) as { expr = "first_name + ' ' + last_name" type = VIRTUAL } } } ``` Alternatively, use the default type of generated column _(VIRTUAL in MySQL)_. ```hcl table "people" { schema = schema.example column "first_name" { type = varchar(255) } column "last_name" { type = varchar(255) } column "full_name" { type = varchar(255) as = "first_name + ' ' + last_name" } } ``` Approve the schema migration plan that Atlas creates for you _(if applicable)_. ``` -- Planned Changes: -- Create "people" table CREATE TABLE "example"."people" ("first_name" character varying(255) NOT NULL, "last_name" character varying(255) NOT NULL, "full_name" character varying(255) NOT NULL GENERATED ALWAYS AS (first_name + ' ' + last_name) VIRTUAL) Use the arrow keys to navigate: ↓ ↑ → ← ? Are you sure?: > Apply Abort ``` Insert people into the table. ```sql INSERT INTO people (first_name, last_name) VALUES ("Bob", "Bark"); INSERT INTO people (first_name, last_name) VALUES ("Kat", "Meow"); INSERT INTO people (first_name, last_name) VALUES ("Ty", "Garoar"); ``` Select all the people in the table using `SELECT * FROM people` to receive a table with the following output. | first_name | last_name | full_name | | :--------- | :-------- | :-------- | | Bob | Bark | Bob Bark | | Kat | Meow | Kat Meow | | Ty | Garoar | Ty Garoar | ## Need More Help? [Join the Ariga Discord Server](https://discord.gg/zZ6sWVg6NT) for early access to features and the ability to provide exclusive feedback that improves your Database Management Tooling. atlas-0.7.2/doc/md/guides/postgres/000077500000000000000000000000001431455511600171245ustar00rootroot00000000000000atlas-0.7.2/doc/md/guides/postgres/partial-indexes.md000066400000000000000000000273051431455511600225460ustar00rootroot00000000000000--- id: partial-indexes title: Partial Indexes in PostgreSQL slug: /guides/postgres/partial-indexes --- ### Overview of Partial Indexes #### What are Partial Indexes? With PostgreSQL, users may create _partial indexes_, which are types of indexes that exist on a subset of a table, rather than the entire table itself. If used correctly, partial indexes improve performance and reduce costs, all while minimizing the amount of storage space they take up on the disk. #### Why do we need them? Let's demonstrate a case where partial indexes may be useful by contrasting them with a non-partial index. ​​If you have many records in an indexed table, the number of records the index needs to track also grows. If the index grows in size, the disk space needed to store the index itself increases as well. In many tables, different records are not accessed with uniform frequency. A subset of a table's records might not be searched very frequently or not searched at all. Records take up precious space in your index whether they are queried or not, and are updated when a new entry is added to the field. Partial indexes come into the picture to filter unsearched values and give you, as an engineer, a tool to index only what's important. :::info You can learn more about partial indexes in PostgreSQL [here](https://www.postgresql.org/docs/current/indexes-partial.html) ::: #### Advantages of using Partial Indexes In cases where we know ahead of time the access pattern to a table and can reduce the size of an index by making it partial: 1. Response time for SELECT operations is improved because the database searches through a smaller index. 2. On average, response time for UPDATE operations is also improved as the index is not going to get updated in all cases. 3. Index is smaller in size and can fit into RAM more easily. s 4. Less space is required to store the index on disk. #### Basic PostgreSQL syntax for using Partial Index ```sql CREATE INDEX index_name ON table_name(column_list) WHERE condition; ``` #### Example of Non-partial Index vs Partial Index in PostgreSQL Let's see this in action by creating a table with the following command: ```sql CREATE TABLE "vaccination_data" ( id SERIAL PRIMARY KEY, country varchar(20), title varchar(10), names varchar(20), vaccinated varchar(3) ); ``` Here is how a portion of the table might look like after inserting values: ```sql SELECT * FROM vaccination_data; ``` ```console title="Output" id | country | title | names | vaccinated -----+--------------------+-------+-------------+------------ 1 | Poland | Mr. | Teagan | No 2 | Ukraine | Ms. | Alden | No 3 | Ukraine | Mr. | Ima | No 4 | Colombia | Mr. | Lawrence | Yes 5 | Turkey | Mrs. | Keegan | No 6 | China | Mrs. | Kylan | No 7 | Netherlands | Dr. | Howard | No ... 289690 | Russian Federation | Mrs. | Ray | Yes 289689 | Austria | Dr. | Lenore | Yes 289688 | Sweden | Dr. | Walker | Yes 289687 | Turkey | Dr. | Emerson | No 289686 | Vietnam | Dr. | Addison | Yes (289686 rows) ``` In the following example, suppose we want a list of doctors from India that have taken the vaccine. If we want to use normal index, we can create it on the “vaccinated” column with the following command: ```sql CREATE INDEX vaccinated_idx ON vaccination_data(vaccinated); ``` ```console title="Output" CREATE INDEX Time: 333.891 ms ``` Now, let's check the performance of querying data of doctors from India that have taken the vaccine with the following command: ```sql EXPLAIN ANALYZE SELECT * FROM vaccination_data WHERE vaccinated = 'Yes' AND country = 'India' AND title = 'Dr.'; ``` ```console title="Output" QUERY PLAN --------------------------------------------------------------------------- Bitmap Heap Scan on vaccination_data (cost=758.64..4053.40 rows=699 width=25) (actual time=4.142..16.212 rows=582 loops=1) Recheck Cond: ((vaccinated)::text = 'Yes'::text) Filter: (((country)::text = 'India'::text) AND ((title)::text = 'Dr.'::text)) Rows Removed by Filter: 69334 Heap Blocks: exact=1337 -> Bitmap Index Scan on vaccinated_idx (cost=0.00..758.46 rows=69072 width=0) (actual time=3.940 ..3.941 rows=69916 loops=1) Index Cond: ((vaccinated)::text = 'Yes'::text) Planning Time: 0.188 ms Execution Time: 16.292 ms (9 rows) ``` :::info The EXPLAIN command is used for understanding the performance of a query. You can learn more about usage of EXPLAIN command with ANALYZE option [here](https://www.postgresql.org/docs/14/using-explain.html#USING-EXPLAIN-ANALYZE) ::: Notice that total Execution Time is 16.292ms. Also, let's check the index size with the following command: ```sql SELECT pg_size_pretty(pg_relation_size('vaccinated_idx')); ``` ```console title="Output" pg_size_pretty ---------------- 1984 kB (1 row) ``` Now, suppose we want to accelerate the same query using the partial index. Let's begin by dropping the existing index that we created earlier: ```sql DROP INDEX vaccinated_idx; ``` ```console title="Output" DROP INDEX Time: 7.183 ms ``` In the following command, we have created an index with a WHERE clause that precisely describes list of doctors from India that have taken the vaccine. ```sql CREATE INDEX vaccinated_idx ON vaccination_data(vaccinated) WHERE vaccinated = 'Yes' AND country = 'India' AND title = 'Dr.'; ``` ```console title="Output" CREATE INDEX Time: 94.567 ms ``` Notice that the partial index with the WHERE clause is created in 94.567ms, compared to the 333.891ms taken for the non-partial index on the 'vaccinated' column. Let's check the performance of querying list of doctors from India that have taken the vaccine again, using the following command: ```sql EXPLAIN ANALYZE SELECT * FROM vaccination_data WHERE vaccinated = 'Yes' AND country = 'India' AND title = 'Dr.'; ``` ```console title="Output" QUERY PLAN --------------------------------------------------------------------------- Index Scan using vaccinated_idx on vaccination_data (cost=0.15..1455.12 rows=699 width=25) (actual time=0.015..0.704 rows=582 loops=1) Planning Time: 0.442 ms Execution Time: 0.880 ms (3 rows) ``` Observe that total execution time has dropped significantly and is now only 0.880ms, compared to 16.292ms achieved by using a non-partial index on the 'vaccinated' column. Once again, let's check the index size with the following command: ```sql SELECT pg_size_pretty(pg_relation_size('vaccinated_idx')); ``` ```console title="Output" pg_size_pretty ---------------- 16 kB (1 row) ``` As we can observe, the index size for the partial index takes significantly less space (16kb) compared to the non-partial index that we created earlier on the 'vaccinated' column (1984kb). Here is a summary from our tests: | Parameter | Non-partial Index | Partial Index | Ratio of change(%) | |:-------------------------------------------- |:----------------------- |:----------------------- |:------------------ | | Estimated start-up cost | 758.64 arbitrary units | 0.15 arbitrary units | 99.9% reduced cost | | Estimated total cost | 4053.40 arbitrary units | 1455.12 arbitrary units | 64.1% reduced cost | | Time to create index | 333.891ms | 94.567ms | 71.6% less time | | Execution time for query with “WHERE” clause | 16.292ms | 0.880ms | 94.5% less time | | Size of index | 1984kb | 16kb | 99.1% less space | (Note: The results will vary, ​​depending on the data that is stored in the database) We have seen that creating a partial index is a better choice where only a small subset of the values stored in the database are accessed frequently. Now, let's see how we can easily manage partial indexes using Atlas. ### Managing Partial Indexes is easy with Atlas Managing partial indexes and database schemas in PostgreSQL can be confusing and error-prone. Atlas is an open-source project which allows us to manage our database using a simple and easy-to-understand declarative syntax (similar to Terraform). We will now learn how to manage partial indexes using Atlas. :::info If you are just getting started, install the latest version of Atlas using the guide to [setting up Atlas](https://atlasgo.io/cli/getting-started/setting-up). ::: #### Managing Partial Index in Atlas We will first use the `atlas schema inspect` command to get an HCL representation of the table which we created earlier by using the Atlas CLI: ```console atlas schema inspect -u "postgres://postgres:mysecretpassword@localhost:5432/vaccination_data?sslmode=disable" > schema.hcl ``` ```hcl title="schema.hcl" table "vaccination_data" { schema = schema.public column "id" { null = false type = serial } column "country" { null = true type = character_varying(20) } column "title" { null = true type = character_varying(10) } column "names" { null = true type = character_varying(20) } column "vaccinated" { null = true type = character_varying(3) } primary_key { columns = [column.id] } } schema "public" { } ``` Now, lets add the following index definition to the file: ```hcl index "vaccinated_idx" { columns = [column.vaccinated] where = "(vaccinated::text = 'Yes'::text AND country::text = 'India'::text AND title::text = 'Dr.'::text)" } ``` Save and apply the schema changes on the database by using the following command: ```console atlas schema apply -u "postgres://postgres:mysecretpassword@localhost:5432/vaccination_data?sslmode=disable" -f schema.hcl ``` Atlas generates the necessary SQL statements to add the new partial index to the database schema. Press Enter while the `Apply` option is highlighted to apply the changes: ```console -- Planned Changes: -- Create index "vaccinated_idx" to table: "vaccination_data" CREATE INDEX "vaccinated_idx" ON "public"."vaccination_data" ("vaccinated") WHERE (vaccinated::text = 'Yes'::text AND country::text = 'India'::text AND title::text = 'Dr.'::text) ✔ Apply Abort ``` To verify that our new index was created, open the database command line tool from previous step and run: ```sql SELECT indexname, indexdef FROM pg_indexes WHERE tablename = 'vaccination_data'; ``` ```console title="Output" [ RECORD 1 ] indexname | vaccinated_idx indexdef | CREATE INDEX vaccinated_idx ON public.vaccination_data USING btree (vaccinated) WHERE (((vaccinated)::text = 'Yes'::text) AND ((country)::text = 'India'::text) AND ((title)::text = 'Dr.'::text)) ``` Amazing! Our new partial index is now created! ### Limitation of using Partial Index Partial indexes are useful in cases where we know ahead of time that a table is most frequently queried with a certain `WHERE` clause. As applications evolve, access patterns to the database also change. Consequently, we may find ourselves in a situation where our index no longer covers many queries, causing them to become resource consuming and slow. ### Conclusion In this section, we learned about PostgreSQL partial indexes and how we can easily create partial indexes in our database by using Atlas. ## Need More Help?​ [Join the Ariga Discord Server](https://discord.gg/zZ6sWVg6NT) for early access to features and the ability to provide exclusive feedback that improves your Database Management Tooling.atlas-0.7.2/doc/md/guides/postgres/serial-columns.md000066400000000000000000000157221431455511600224120ustar00rootroot00000000000000--- id: serial-columns title: Serial Type Columns in PostgreSQL slug: /guides/postgres/serial-columns --- PostgreSQL allows creating columns of types `smallserial`, `serial`, and `bigserial`. These types are not _actual_ types, but more like "macros" for creating non-nullable integer columns with sequences attached. We can see this in action by creating a table with 3 "serial columns": ```sql CREATE TABLE serials( c1 smallserial, c2 serial, c3 bigserial ); ``` ```sql title="Serials Description" Column | Type | Nullable | Default --------+----------+----------+------------------------------- c1 | smallint | not null | nextval('t_c1_seq'::regclass) c2 | integer | not null | nextval('t_c2_seq'::regclass) c3 | bigint | not null | nextval('t_c3_seq'::regclass) ``` As you can see, each serial column was created as non-nullable integer with a default value set to the next sequence value. :::info Note that `nextval` increments the sequence by 1 and returns its value. Thus, the first call to `nextval('serials_c1_seq')` returns 1, the second returns 2, etc. ::: ### `ALTER COLUMN` type to serial Sometimes it is necessary to change the column type from `integer` type to `serial`. However, as mentioned above, the `serial` type is not a true type, and therefore, the following commands will fail: ```sql CREATE TABLE t( c integer not null primary key ); ALTER TABLE t ALTER COLUMN c TYPE serial; // highlight-next-line-error-message ERROR: type "serial" does not exist ``` We can achieve this by manually creating a [sequence](https://www.postgresql.org/docs/current/sql-createsequence.html) owned by the column `c`, and setting the column `DEFAULT` value to the incremental counter of the sequence using the [`nextval`](https://www.postgresql.org/docs/current/functions-sequence.html) function. :::note Note that it is recommended to follow the PostgreSQL naming format (i.e. `
__seq`) when creating the sequence as some database tools know to detect such columns as "serial columns". ::: ```sql -- Create the sequence. CREATE SEQUENCE "public"."t_c_seq" OWNED BY "public"."t"."c"; -- Assign it to the table default value. ALTER TABLE "public"."t" ALTER COLUMN "c" SET DEFAULT nextval('"public"."t_c_seq"'); ``` ### Update the sequence value When a sequence is created, its value starts from 0 and the first call to `nextval` returns 1. Thus, in case the column `c` from the example above already contains values, we may face a constraint error on insert when the sequence number will reach to the minimum value of `c`. Let's see an example: ```sql SELECT "c" FROM "t"; // highlight-start c --- 2 3 // highlight-end -- Works! INSERT INTO "t" DEFAULT VALUES; -- Fails! INSERT INTO "t" DEFAULT VALUES; // highlight-next-line-error-message ERROR: duplicate key value violates unique constraint "t_pkey" // highlight-next-line-error-message DETAIL: Key (c)=(2) already exists. ``` We can work around this by setting the sequence current value to the maximum value of `c`, so the following call to `nextval` will return `MAX(c)+1`, the one after `MAX(c)+2`, and so on. ```sql SELECT setval('"public"."t_c_seq"', (SELECT MAX("c") FROM "t")); // highlight-start setval -------- 3 // highlight-end -- Works! INSERT INTO "t" DEFAULT VALUES; SELECT "c" FROM "t"; // highlight-start c --- 2 3 4 // highlight-end ``` ### Managing Serial Columns with Atlas Atlas makes it easier to define and manipulate columns of `serial` types. Let's use the [`atlas schema inspect`](../../reference.md#atlas-schema-inspect) command to get a representation of the table we created above in the Atlas HCL format : ```console atlas schema inspect -u "postgres://postgres:pass@:5432/test?sslmode=disable" > schema.hcl ``` ```hcl title="schema.hcl" table "t" { schema = schema.public column "c" { null = false type = serial } primary_key { columns = [column.c] } } schema "public" { } ``` After inspecting the schema, we can modify it to demonstrate Atlas's capabilities in migration planning: #### Change a column type from `serial` to `bigserial` ```hcl title="schema.hcl" table "t" { schema = schema.public column "c" { null = false // highlight-start type = bigserial // highlight-end } primary_key { columns = [column.c] } } schema "public" { } ``` Next, running `schema apply` will plan and execute the following changes: ```console atlas schema apply -u "postgres://postgres:pass@:5432/test?sslmode=disable" -f schema.hcl -- Planned Changes: -- Modify "t" table // highlight-next-line-info ALTER TABLE "public"."t" ALTER COLUMN "c" TYPE bigint ✔ Apply ``` As you can see, Atlas detected that only the underlying integer type was changed as `serial` maps to `integer` and `bigserial` maps to `bigint`. #### Change a column type from `bigserial` to `bigint` ```hcl title="schema.hcl" table "t" { schema = schema.public column "c" { null = false // highlight-start type = bigint // highlight-end } primary_key { columns = [column.c] } } schema "public" { } ``` After changing column `c` to `bigint`, we can run `schema apply` and let Atlas plan and execute the new changes: ```console atlas schema apply -u "postgres://postgres:pass@:5432/test?sslmode=disable" -f schema.hcl -- Planned Changes: -- Modify "t" table // highlight-next-line-info ALTER TABLE "public"."t" ALTER COLUMN "c" DROP DEFAULT -- Drop sequence used by serial column "c" // highlight-next-line-info DROP SEQUENCE IF EXISTS "public"."t_c_seq" ✔ Apply ``` As you can see, Atlas dropped the `DEFAULT` value that was created by the `serial` type, and in addition removed the sequence that was attached to it, as it is no longer used by the column. #### Change a column type from `bigint` to `serial` ```hcl title="schema.hcl" table "t" { schema = schema.public column "c" { null = false // highlight-start type = serial // highlight-end } primary_key { columns = [column.c] } } schema "public" { } ``` Changing a column type from `bigint` to `serial` requires 3 changes: 1. Create a sequence named `t_c_seq` owned by `c`. 2. Set the `DEFAULT` value of `c` to `nextval('"public"."t_c_seq"')`. 3. Alter the column type, as `serial` maps to `integer` (!= `bigint`). We call [`atlas schema apply`](../../reference.md#atlas-schema-apply) to plan and execute this three step process with Atlas: ```console atlas schema apply -u "postgres://postgres:pass@:5432/test?sslmode=disable" -f schema.hcl -- Planned Changes: -- Create sequence for serial column "c" // highlight-next-line-info CREATE SEQUENCE IF NOT EXISTS "public"."t_c_seq" OWNED BY "public"."t"."c" -- Modify "t" table // highlight-next-line-info ALTER TABLE "public"."t" ALTER COLUMN "c" SET DEFAULT nextval('"public"."t_c_seq"'), ALTER COLUMN "c" TYPE integer ✔ Apply ``` ## Need More Help? [Join the Ariga Discord Server](https://discord.gg/zZ6sWVg6NT) for early access to features and the ability to provide exclusive feedback that improves your Database Management Tooling. atlas-0.7.2/doc/md/integrations/000077500000000000000000000000001431455511600165045ustar00rootroot00000000000000atlas-0.7.2/doc/md/integrations/github-actions.mdx000066400000000000000000000150731431455511600221440ustar00rootroot00000000000000--- title: GitHub Actions id: github-actions slug: /integrations/github-actions --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; [GitHub Actions](https://github.com/features/actions) is a popular CI/CD product from GitHub. With GitHub Actions, users can easily define workflows that are triggered in various lifecycle events related to a Git repository. For example, many teams configure GitHub actions to run all unit tests in a repository on each change that is committed to a repository. One of the powerful features of GitHub Actions is its extensibility: it is very easy to package a piece of functionality as a module (called an "action") that can later be re-used by many projects. ## CI for database schema changes Teams using GitHub that wish to ensure all changes to their database schema are safe can use the [`atlas-action`](https://github.com/ariga/atlas-action) GitHub Action. This action is used for [linting migration directories](/versioned/lint) using the `atlas migrate lint` command. This command validates and analyzes the contents of migration directories and generates insights and diagnostics on the selected changes: * Ensure the migration history can be replayed from any point at time. * Protect from unexpected history changes when concurrent migrations are written to the migration directory by multiple team members. Read more about the consistency checks in the section below. * Detect whether destructive or irreversible changes have been made or whether they are dependent on tables' contents and can cause a migration failure. ### Supported directory formats This action supports analyzing migration directories in formats accepted by different schema migration tools: * [Atlas](https://atlasgo.io) * [golang-migrate](https://github.com/golang-migrate/migrate) * [goose](https://github.com/pressly/goose) * [dbmate](https://github.com/amacneil/dbmate) ### Usage Add `.github/workflows/atlas-ci.yaml` to your repo with the following contents: ```yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: - 'path/to/migration/dir/*' jobs: lint: services: # Spin up a mysql:8.0.29 container to be used as the dev-database for analysis. mysql: image: mysql:8.0.29 env: MYSQL_ROOT_PASSWORD: pass MYSQL_DATABASE: test ports: - "3307:3306" options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: dir: path/to/migrations dir-format: golang-migrate # Or: atlas, goose, dbmate dev-url: mysql://root:pass@localhost:3307/test ``` ```yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: - 'path/to/migration/dir/*' jobs: lint: services: # Spin up a maria:10.7 container to be used as the dev-database for analysis. maria107: image: mariadb:10.7 env: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass ports: - 4306:3306 options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: dir: path/to/migrations dir-format: golang-migrate # Or: atlas, goose, dbmate dev-url: maria://root:pass@localhost:4306/test ``` ```yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: - 'path/to/migration/dir/*' jobs: lint: services: # Spin up a postgres:10 container to be used as the dev-database for analysis. postgres10: image: postgres:10 env: POSTGRES_DB: test POSTGRES_PASSWORD: pass ports: - 5430:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: dir: path/to/migrations dir-format: golang-migrate # Or: atlas, goose, dbmate dev-url: postgres://postgres:pass@localhost:5430/test?sslmode=disable ``` ### Configuration Configure the action by passing input parameters in the `with:` block. #### `dir` Sets the directory that contains the migration scripts to analyze. #### `dir-format` Sets the format of the migration directory. Options: `atlas` (default), `golang-migrate`, `goose` or `dbmate`. Coming soon: `flyway`, `liquibase`. #### `dev-url` The URL of the dev-database to use for analysis. * Read about [Atlas URL formats](/concepts/url) * Read about [dev-databases](/concepts/dev-database) #### `latest` Use the `latest` mode to decide which files to analyze. By default, Atlas will use `git-base` to analyze any files that are present in the diff between the base branch and the current. Unless this option is set, the base branch (`master`/`main`/etc) must be checked out locally or you will see an error such as: ``` Atlas failed with code 1: Error: git diff: exit status 128 ``` The full list of input options can be found in [action.yml](https://github.com/ariga/atlas-action/blob/master/action.yml).atlas-0.7.2/doc/md/integrations/go-api.md000066400000000000000000000125171431455511600202100ustar00rootroot00000000000000--- title: Go API id: go-api slug: /integrations/go-api --- In addition to using Atlas as a CLI tool, all of Atlas's core-engine capabilities are available as a [Go module](https://pkg.go.dev/ariga.io/atlas) that you can use programmatically. This guide provides high-level documentation on how to use Atlas from within Go programs. ## Installation To install Atlas, use: ```shell go get ariga.io/atlas@latest ``` This installs the latest release of Atlas. If you would like to get the most recent version from the `master` branch, use: ```shell go get ariga.io/atlas@master ``` ## Drivers Atlas currently supports three core capabilities for working with SQL schemas. * "Inspection" - Connecting to a database and understanding its schema. * "Diff" - Compares two schemas and producing a set of changes needed to reconcile the target schema to the source schema. * "Apply" - creates concrete set of SQL queries to migrate the target database. The implementation details for these capabilities vary greatly between the different SQL databases. Atlas currently has three supported drivers: * MySQL (+MariaDB, TiDB) * PostgreSQL * SQLite Atlas drivers build on top of the standard library [`database/sql`](https://pkg.go.dev/database/sql) package. To initialize the different drivers, we need to initialize a `sql.DB` and pass it to the Atlas driver constructor. For example: ```go package main import ( "database/sql" "log" "testing" _ "github.com/mattn/go-sqlite3" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlite" ) func Test(t *testing.T) { // Open a "connection" to sqlite. db, err := sql.Open("sqlite3", "file:example.db?cache=shared&_fk=1&mode=memory") if err != nil { log.Fatalf("failed opening db: %s", err) } // Open an atlas driver. driver, err := sqlite.Open(db) if err != nil { log.Fatalf("failed opening atlas driver: %s", err) } // ... do stuff with the driver } ``` ## Inspection Inspection is the one of Atlas's core capabilities. When we say "inspection" in the context of this project we mean the process of connecting to an existing database, querying its metadata tables to understand the structure of the different tables, types, extensions, etc. Databases vary greatly in the API they provide users to understand a specific database's schema, but Atlas goes to great lengths to abstract these differences and provide a unified API for inspecting databases. Consider the `Inspector` interface in the [sql/schema](https://pkg.go.dev/ariga.io/atlas@v0.3.2/sql/schema#Inspector) package: ```go // Inspector is the interface implemented by the different database // drivers for inspecting multiple tables. type Inspector interface { // InspectSchema returns the schema description by its name. An empty name means the // "attached schema" (e.g. SCHEMA() in MySQL or CURRENT_SCHEMA() in PostgreSQL). // A NotExistError error is returned if the schema does not exists in the database. InspectSchema(ctx context.Context, name string, opts *InspectOptions) (*Schema, error) // InspectRealm returns the description of the connected database. InspectRealm(ctx context.Context, opts *InspectRealmOption) (*Realm, error) } ``` As you can see, the `Inspector` interface provides methods for inspecting on different levels: * `InspectSchema` - provides inspection capabilities for a single schema within a database server. * `InspectRealm` - inspects the entire connected database server. Each database driver (for example [MySQL](https://pkg.go.dev/ariga.io/atlas@master/sql/mysql#Driver), [Postgres](https://pkg.go.dev/ariga.io/atlas@master/sql/postgres#Driver) or [SQLite](https://pkg.go.dev/ariga.io/atlas@master/sql/sqlite#Driver)) implements this interface. Let's see how we can use this interface by inspecting a "dummy" SQLite database. ```go func TestInspect(t *testing.T) { // ... skipping driver creation ctx := context.Background() // Create an "example" table for Atlas to inspect. _, err = db.ExecContext(ctx, "create table example ( id int not null );") if err != nil { log.Fatalf("failed creating example table: %s", err) } // Open an atlas driver. driver, err := sqlite.Open(db) if err != nil { log.Fatalf("failed opening atlas driver: %s", err) } // Inspect the created table. sch, err := driver.InspectSchema(ctx, "main", &schema.InspectOptions{ Tables: []string{"example"}, }) if err != nil { log.Fatalf("failed inspecting schema: %s", err) } tbl, ok := sch.Table("example") require.True(t, ok, "expected to find example table") require.EqualValues(t, "example", tbl.Name) id, ok := tbl.Column("id") require.True(t, ok, "expected to find id column") require.EqualValues(t, &schema.ColumnType{ Type: &schema.IntegerType{T: "int"}, // An integer type, specifically "int". Null: false, // The column has NOT NULL set. Raw: "INT", // The raw type inspected from the DB. }, id.Type) } ``` In this example, we first created a table named "example" by executing a query directly against the database. We next used the driver's `InspectSchema` method to inspect the schema of the table we created. Finally, we made some assertions on the returned `schema.Table` instance to verify that it was inspected correctly. atlas-0.7.2/doc/md/integrations/terraform.md000066400000000000000000000071111431455511600210270ustar00rootroot00000000000000--- title: Terraform Provider id: terraform-provider slug: /integrations/terraform-provider --- ## Introduction The official [Atlas Terraform provider](https://registry.terraform.io/providers/ariga/atlas/latest) allows you to use Atlas with Terraform to manage your database schemas as part of you Infrastructure-as-Code (Iac) workflow . Read about the release announcement [here](https://atlasgo.io/blog/2022/05/04/announcing-terraform-provider). * [Documentation](https://registry.terraform.io/providers/ariga/atlas/latest/docs) * [GitHub Repository](https://github.com/ariga/terraform-provider-atlas) ## Installation Add Atlas to your [required providers](https://www.terraform.io/language/providers/requirements#requiring-providers): ```hcl terraform { required_providers { atlas = { source = "ariga/atlas" version = "~> 0.1.0" } } } ``` ## Basic example Currently, the Atlas Terraform provider uses an [HCL file](/atlas-schema/sql.mdx) to describe the desired state of the database, and performs migrations according to the state difference between the HCL file and the target database. To use the Terraform provider, you will need such a file. If you are working against a fresh, empty database, start by creating a file named `schema.hcl` that only contains a single [`schema`](/atlas-schema/sql.mdx#schema) resource. If your database contains a schema (named database) is named `example`, use something like: ```hcl schema "example" { // Basic charset and collation for MySQL. charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } ``` For instructions on using a database with an existing schema, [see below](#working-with-an-existing-database) ### Configure Terraform Use the following configuration to apply the HCL file `schema.hcl` onto a target MySQL database (but you can specify any of the [supported databases](https://github.com/ariga/atlas#supported-databases)): ```hcl title="main.tf" provider "atlas" {} // Load (and normalize) the desired schema from an HCL file. data "atlas_schema" "market" { dev_db_url = "mysql://root:pass@localhost:3307/market" src = file("${path.module}/schema.hcl") } // Sync the state of the target database with the hcl file. resource "atlas_schema" "market" { hcl = data.atlas_schema.market.hcl url = "mysql://root:pass@localhost:3306/market" dev_db_url = "mysql://root:pass@localhost:3307/market" } ``` For the full documentation and examples of the provider visit the [registry page](https://registry.terraform.io/providers/ariga/atlas/latest/docs). ## Working with an existing database When you first run the Atlas Terraform Provider on a database, the database's state isn't yet present in Terraform's representation of the world (described in the [Terraform State](https://www.terraform.io/language/state)). To prevent a situation where Terraform accidentally creates a plan that includes the deletion of resources (such as tables or schemas) that exist in your database on this initial run, make sure that the HCL file that you pass to the `atlas_schema` resources is up-to-date. Luckily, you do not need to write this file by hand. The Atlas CLI's [`schema inspect` command](https://atlasgo.io/cli-reference#atlas-schema-inspect) can do this for you. To inspect an existing database and write its HCL representation to a file simply run: ``` atlas schema inspect -u > ``` Replacing `` with the [URL](/concepts/url) for your database, and `` with the name of the file you want to write the output to. For example: ``` atlas schema inspect -u mysql://user:pass@localhost:3306 > schema.hcl ``` atlas-0.7.2/doc/md/lint/000077500000000000000000000000001431455511600147445ustar00rootroot00000000000000atlas-0.7.2/doc/md/lint/analyzers.md000066400000000000000000000214061431455511600173010ustar00rootroot00000000000000--- title: Migration Analyzers slug: /lint/analyzers --- The database is often the most critical component in software architectures. Being a stateful component, it cannot be easily rebuilt, scaled-out or fixed by a restart. Outages that involve damage to data or simply unavailability of the database are notoriously hard to manage and recover from, often taking long hours of careful work by a team's most senior engineers. As most outages happen directly as a result of a change to a system, Atlas provides users with means to verify the safety of planned changes before they happen. The [`sqlcheck`](https://pkg.go.dev/ariga.io/atlas@master/sql/sqlcheck) package provides interfaces for analyzing the contents of SQL files to generate insights on the safety of many kinds of changes to database schemas. With this package developers may define an `Analyzer` that can be used to diagnose the impact of SQL statements on the target database. Using these interfaces, Atlas provides different `Analyzer` implementations that are useful for determining the safety of migration scripts. ## Analyzers Below are the `Analyzer` implementations currently supported by Atlas. ### Destructive Changes Destructive changes are changes to a database schema that result in loss of data. For instance, consider a statement such as: ```sql ALTER TABLE `users` DROP COLUMN `email_address`; ``` This statement is considered destructive because whatever data is stored in the `email_address` column will be deleted from disk, with no way to recover it. There are definitely situations where this type of change is desired, but they are relatively rare. Using the `destructive` ([GoDoc](https://pkg.go.dev/ariga.io/atlas@master/sql/sqlcheck/destructive)) Analyzer, teams can detect this type of change and design workflows that prevent it from happening accidentally. Running migration linting locally on in CI fails with exit code 1 in case destructive changes are detected. However, users can disable this by configuring the `destructive` analyzer in the [`atlas.hcl`](../atlas-schema/projects#configure-migration-linting) file: ```hcl title="atlas.hcl" {2-4} lint { destructive { error = false } } ``` ### Data-dependent Changes Data-dependent changes are changes to a database schema that _may_ succeed or fail, depending on the data that is stored in the database. For instance, consider a statement such as: ```sql ALTER TABLE `example`.`orders` ADD UNIQUE INDEX `idx_name` (`name`); ``` This statement is considered data-dependent because if the `orders` table contains duplicate values on the name column we will not be able to add a uniqueness constraint. Consider we added two records with the name `atlas` to the table: ``` mysql> create table orders ( name varchar(100) ); Query OK, 0 rows affected (0.11 sec) mysql> insert into orders (name) values ("atlas"); Query OK, 1 row affected (0.06 sec) mysql> insert into orders (name) values ("atlas"); Query OK, 1 row affected (0.01 sec) ``` Attempting to add a uniqueness constraint on the `name` column, will fail: ```sql mysql> ALTER TABLE `example`.`orders` ADD UNIQUE INDEX `idx_name` (`name`); // highlight-next-line-error-message ERROR 1062 (23000): Duplicate entry 'atlas' for key 'orders.idx_name' ``` This type of change is tricky because a developer trying to simulate it locally might succeed in performing it only to be surprised that their migration script fails in production, breaking a deployment sequence or causing other unexpected behavior. Using the `data_depend` ([GoDoc](https://pkg.go.dev/ariga.io/atlas@master/sql/sqlcheck/datadepend)) Analyzer, teams can detect this risk early and account for it in pre-deployment checks to a database. By default, data-dependent changes are reported but not cause migration linting to fail. Users can change this by configuring the `data_depend` analyzer in the [`atlas.hcl`](../atlas-schema/projects#configure-migration-linting) file: ```hcl title="atlas.hcl" {2-4} lint { data_depend { error = true } } ``` ## Checks The following schema change checks are provided by Atlas: | **Check** | **Short Description** | |------------------------------------|-----------------------------------------------------------------------------| | [**DS1**](#destructive-changes) | Destructive changes | | [DS101](#DS101) | Schema was dropped | | [DS102](#DS102) | Table was dropped | | [DS103](#DS103) | Non-virtual column was dropped | | [**MF1**](#data-dependent-changes) | Changes that might fail | | [MF101](#MF101) | Add unique index to existing column | | [MF102](#MF102) | Modifying non-unique index to unique | | [MF103](#MF103) | Adding a non-nullable column to an existing table | | [MF104](#MF104) | Modifying a nullable column to non-nullable | | **MY** | MySQL and MariaDB specific checks | | [MY101](#MY101) | Adding a non-nullable column without a `DEFAULT` value to an existing table | | **LT** | SQLite specific checks | | [LT101](#LT101) | Modifying a nullable column to non-nullable without a `DEFAULT` value | #### DS101 {#DS101} Destructive change that is reported when a database schema was dropped. For example: ```sql DROP SCHEMA test; ``` #### DS102 {#DS102} Destructive change that is reported when a table schema was dropped. For example: ```sql DROP TABLE test.t; ``` #### DS103 {#DS103} Destructive change that is reported when a non-virtual column was dropped. For example: ```sql ALTER TABLE t DROP COLUMN c; ``` #### MF101 {#MF101} Adding a unique index to a table might fail in case one of the indexed columns contain duplicate entries. For example: ```sql CREATE UNIQUE INDEX i ON t(c); ``` #### MF102 {#MF102} Modifying a non-unique index to be unique might fail in case one of the indexed columns contain duplicate entries. :::note Since index modification is done with `DROP` and `CREATE`, this check will be reported only when analyzing changes programmatically or when working with the [declarative workflow](../concepts/workflows.md#declarative-migrations). ::: #### MF103 {#MF103} Adding a non-nullable column to a table might fail in case the table is not empty. For example: ```sql ALTER TABLE t ADD COLUMN c int NOT NULL; ``` #### MF104 {#MF104} Modifying nullable column to non-nullable might fail in case it contains NULL values. For example: ```sql ALTER TABLE t MODIFY COLUMN c int NOT NULL; ``` The solution, in this case, is to backfill `NULL` values with a default value: ```sql {1} UPDATE t SET c = 0 WHERE c IS NULL; ALTER TABLE t MODIFY COLUMN c int NOT NULL; ``` #### MY101 {#MY101} Adding a non-nullable column to a table without a `DEFAULT` value implicitly sets existing rows with the column zero (default) value. For example: ```sql ALTER TABLE t ADD COLUMN c int NOT NULL; // highlight-next-line -- Append column `c` to all existing rows with the value 0. ``` #### LT101 {#LT101} Modifying a nullable column to non-nullable without setting a `DEFAULT` might fail in case it contains `NULL` values. The solution is one of the following: 1\. Set a `DEFAULT` value on the modified column: ```sql {2} -- create "new_users" table CREATE TABLE `new_users` (`a` int NOT NULL DEFAULT 1); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`a`) SELECT IFNULL(`a`, 1) FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; ``` 2\. Backfill `NULL` values with a default value: ```sql {1-2} -- backfill previous rows UPDATE `users` SET `a` = 1 WHERE `a` IS NULL; -- disable the enforcement of foreign-keys constraints PRAGMA foreign_keys = off; -- create "new_users" table CREATE TABLE `new_users` (`a` int NOT NULL); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`a`) SELECT `a` FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; -- enable back the enforcement of foreign-keys constraints PRAGMA foreign_keys = on; ```atlas-0.7.2/doc/md/reference.md000066400000000000000000000355651431455511600162740ustar00rootroot00000000000000--- title: CLI Reference id: cli-reference slug: cli-reference --- ## Introduction This document serves as reference documentation for all available commands in the Atlas CLI. Similar information can be obtained by running any atlas command with the `-h` or `--help` flags. For a more detailed introduction to the CLI capabilities, head over to the [Getting Started](/getting-started/) page. ## Distributed Binaries The binaries distributed in official releases are released under the [Ariga End User License](https://ariga.io/legal/atlas/eula). If you would like to build Atlas from source follow the instructions [here](https://atlasgo.io/cli-reference#building-from-source). ### Building from Source If you would like to build Atlas from source without the UI code run: ```shell go get ariga.io/atlas/cmd/atlas ``` ## atlas env Print atlas environment variables. #### Usage ``` atlas env ``` #### Details 'atlas env' prints atlas environment information. Every set environment param will be printed in the form of NAME=VALUE. List of supported environment parameters: * ATLAS_NO_UPDATE_NOTIFIER: On any command, the CLI will check for new releases using the GitHub API. This check will happen at most once every 24 hours. To cancel this behavior, set the environment variable "ATLAS_NO_UPDATE_NOTIFIER". ## atlas license Display license information #### Usage ``` atlas license ``` ## atlas migrate Manage versioned migration files #### Usage ``` atlas migrate ``` #### Details 'atlas migrate' wraps several sub-commands for migration management. #### Flags ``` --dir string select migration directory using URL format (default "file://migrations") --env string set which env from the project file to use --var stringToString input variables (default []) ``` ### atlas migrate apply Applies pending migration files on the connected database. #### Usage ``` atlas migrate apply [flags] [count] ``` #### Details 'atlas migrate apply' reads the migration state of the connected database and computes what migrations are pending. It then attempts to apply the pending migration files in the correct order onto the database. The first argument denotes the maximum number of migration files to apply. As a safety measure 'atlas migrate apply' will abort with an error, if: - the migration directory is not in sync with the 'atlas.sum' file - the migration and database history do not match each other If run with the "--dry-run" flag, atlas will not execute any SQL. #### Example ``` atlas migrate apply -u mysql://user:pass@localhost:3306/dbname atlas migrate apply --dir file:///path/to/migration/directory --url mysql://user:pass@localhost:3306/dbname 1 atlas migrate apply --env dev 1 atlas migrate apply --dry-run --env dev 1 ``` #### Flags ``` --log string log format to use (default "tty") --revisions-schema string schema name where the revisions table resides --dry-run do not actually execute any SQL but show it on screen --from string calculate pending files from the given version (including it) --baseline string start the first migration after the given baseline version --tx-mode string set transaction mode [none, file, all] (default "file") --allow-dirty allow start working on a non-clean database -u, --url string [driver://username:password@address/dbname?param=value] select a database using the URL format ``` ### atlas migrate diff Compute the diff between the migration directory and a desired state and create a new migration file. #### Usage ``` atlas migrate diff [flags] [name] ``` #### Details 'atlas migrate diff' uses the dev-database to re-run all migration files in the migration directory, compares it to a given desired state and create a new migration file containing SQL statements to migrate the migration directory state to the desired schema. The desired state can be another connected database or an HCL file. #### Example ``` atlas migrate diff --dev-url mysql://user:pass@localhost:3306/dev --to file://atlas.hcl atlas migrate diff --dev-url mysql://user:pass@localhost:3306/dev --to file://atlas.hcl add_users_table atlas migrate diff --dev-url mysql://user:pass@localhost:3306/dev --to mysql://user:pass@localhost:3306/dbname atlas migrate diff --env dev ``` #### Flags ``` --dev-url string [driver://username:password@address/dbname?param=value] select a database using the URL format --to strings [driver://username:password@address/dbname?param=value ...] select a desired state using the URL format --qualifier string qualify tables with custom qualifier when working on a single schema ``` ### atlas migrate hash Hash (re-)creates an integrity hash file for the migration directory. #### Usage ``` atlas migrate hash ``` #### Details 'atlas migrate hash' computes the integrity hash sum of the migration directory and stores it in the atlas.sum file. This command should be used whenever a manual change in the migration directory was made. #### Example ``` atlas migrate hash ``` ### atlas migrate import Import a migration directory from another migration management tool to the Atlas format. #### Usage ``` atlas migrate import [flags] ``` #### Example ``` atlas migrate import --dir-format liquibase --from file:///path/to/source/directory --to file:///path/to/migration/directory ``` #### Flags ``` --from string select migration directory using URL format (default "file://migrations") --to string select migration directory using URL format (default "file://migrations") ``` ### atlas migrate lint Run analysis on the migration directory #### Usage ``` atlas migrate lint [flags] ``` #### Example ``` atlas migrate lint --env dev atlas migrate lint --dir file:///path/to/migration/directory --dev-url mysql://root:pass@localhost:3306 --latest 1 atlas migrate lint --dir file:///path/to/migration/directory --dev-url mysql://root:pass@localhost:3306 --git-base master atlas migrate lint --dir file:///path/to/migration/directory --dev-url mysql://root:pass@localhost:3306 --log '{{ json .Files }}' ``` #### Flags ``` --dev-url string [driver://username:password@address/dbname?param=value] select a database using the URL format --git-base string run analysis against the base Git branch --git-dir string path to the repository working directory (default ".") --latest uint run analysis on the latest N migration files --log string custom logging using a Go template ``` ### atlas migrate new Creates a new empty migration file in the migration directory. #### Usage ``` atlas migrate new [name] ``` #### Details 'atlas migrate new' creates a new migration according to the configured formatter without any statements in it. #### Example ``` atlas migrate new my-new-migration ``` ### atlas migrate set Set the current version of the migration history table. #### Usage ``` atlas migrate set [flags] ``` #### Details 'atlas migrate set' edits the revision table to consider all migrations up to and including the given version to be applied. This command is usually used after manually making changes to the managed database. #### Example ``` atlas migrate set-revision 3 --url mysql://user:pass@localhost:3306/ atlas migrate set-revision 4 --env local atlas migrate set-revision 1.2.4 --url mysql://user:pass@localhost:3306/my_db --revision-schema my_revisions ``` #### Flags ``` -u, --url string [driver://username:password@address/dbname?param=value] select a database using the URL format ``` ### atlas migrate status Get information about the current migration status. #### Usage ``` atlas migrate status [flags] ``` #### Details 'atlas migrate status' reports information about the current status of a connected database compared to the migration directory. #### Example ``` atlas migrate status --url mysql://user:pass@localhost:3306/ atlas migrate status --url mysql://user:pass@localhost:3306/ --dir file:///path/to/migration/directory ``` #### Flags ``` --revisions-schema string schema name where the revisions table resides -u, --url string [driver://username:password@address/dbname?param=value] select a database using the URL format ``` ### atlas migrate validate Validates the migration directories checksum and SQL statements. #### Usage ``` atlas migrate validate [flags] ``` #### Details 'atlas migrate validate' computes the integrity hash sum of the migration directory and compares it to the atlas.sum file. If there is a mismatch it will be reported. If the --dev-url flag is given, the migration files are executed on the connected database in order to validate SQL semantics. #### Example ``` atlas migrate validate atlas migrate validate --dir file:///path/to/migration/directory atlas migrate validate --dir file:///path/to/migration/directory --dev-url mysql://user:pass@localhost:3306/dev atlas migrate validate --env dev ``` #### Flags ``` --dev-url string [driver://username:password@address/dbname?param=value] select a database using the URL format ``` ## atlas schema Work with atlas schemas. #### Usage ``` atlas schema ``` #### Details The `atlas schema` command groups subcommands for working with Atlas schemas. #### Flags ``` --env string set which env from the project file to use --var stringToString input variables (default []) ``` ### atlas schema apply Apply an atlas schema to a target database. #### Usage ``` atlas schema apply [flags] ``` #### Details 'atlas schema apply' plans and executes a database migration to bring a given database to the state described in the provided Atlas schema. Before running the migration, Atlas will print the migration plan and prompt the user for approval. The schema is provided by one or more paths (to a file or directory) using the "-f" flag: atlas schema apply -u URL -f file1.hcl -f file2.hcl atlas schema apply -u URL -f schema/ -f override.hcl As a convenience, schemas may also be provided via an environment definition in the project file (see: https://atlasgo.io/cli/projects). If run with the "--dry-run" flag, atlas will exit after printing out the planned migration. #### Example ``` atlas schema apply -u "mysql://user:pass@localhost/dbname" -f atlas.hcl atlas schema apply -u "mysql://localhost" -f schema.hcl --schema prod --schema staging atlas schema apply -u "mysql://user:pass@localhost:3306/dbname" -f schema.hcl --dry-run atlas schema apply -u "mariadb://user:pass@localhost:3306/dbname" -f schema.hcl atlas schema apply --url "postgres://user:pass@host:port/dbname?sslmode=disable" -f schema.hcl atlas schema apply -u "sqlite://file:ex1.db?_fk=1" -f schema.hcl ``` #### Flags ``` -f, --file strings [paths...] file or directory containing the HCL files -u, --url string URL to the database using the format: [driver://username:password@address/dbname?param=value] --exclude strings List of glob patterns used to filter resources from applying. -s, --schema strings Set schema names. --dev-url string URL for the dev database. Used to validate schemas and calculate diffs before running migration. --dry-run Dry-run. Print SQL plan without prompting for execution. --auto-approve Auto approve. Apply the schema changes without prompting for approval. ``` ### atlas schema clean Removes all objects from the connected database. #### Usage ``` atlas schema clean [flags] ``` #### Details 'atlas schema clean' drops all objects in the connected database and leaves it in an empty state. As a safety feature, 'atlas schema clean' will ask for confirmation before attempting to execute any SQL. #### Example ``` atlas schema clean -u mysql://user:pass@localhost:3306/dbname atlas schema clean -u mysql://user:pass@localhost:3306/ ``` #### Flags ``` --auto-approve Auto approve. Apply the schema changes without prompting for approval. -u, --url string URL to the database using the format: [driver://username:password@address/dbname?param=value] ``` ### atlas schema diff Calculate and print the diff between two schemas. #### Usage ``` atlas schema diff [flags] ``` #### Details 'atlas schema diff' connects to two given databases, inspects them, calculates the difference in their schemas, and prints a plan of SQL statements to migrate the "from" database to the schema of the "to" database. #### Flags ``` --from string [driver://username:password@protocol(address)/dbname?param=value] select a database using the URL format --to string [driver://username:password@protocol(address)/dbname?param=value] select a database using the URL format ``` ### atlas schema fmt Formats Atlas HCL files #### Usage ``` atlas schema fmt [path ...] ``` #### Details 'atlas schema fmt' formats all ".hcl" files under the given path using canonical HCL layout style as defined by the github.com/hashicorp/hcl/v2/hclwrite package. Unless stated otherwise, the fmt command will use the current directory. After running, the command will print the names of the files it has formatted. If all files in the directory are formatted, no input will be printed out. ### atlas schema inspect Inspect a database and print its schema in Atlas DDL syntax. #### Usage ``` atlas schema inspect [flags] ``` #### Details 'atlas schema inspect' connects to the given database and inspects its schema. It then prints to the screen the schema of that database in Atlas DDL syntax. This output can be saved to a file, commonly by redirecting the output to a file named with a ".hcl" suffix: atlas schema inspect -u "mysql://user:pass@localhost:3306/dbname" > schema.hcl This file can then be edited and used with the `atlas schema apply` command to plan and execute schema migrations against the given database. In cases where users wish to inspect all multiple schemas in a given database (for instance a MySQL server may contain multiple named databases), omit the relevant part from the url, e.g. "mysql://user:pass@localhost:3306/". To select specific schemas from the databases, users may use the "--schema" (or "-s" shorthand) flag. #### Example ``` atlas schema inspect -u "mysql://user:pass@localhost:3306/dbname" atlas schema inspect -u "mariadb://user:pass@localhost:3306/" --schema=schemaA,schemaB -s schemaC atlas schema inspect --url "postgres://user:pass@host:port/dbname?sslmode=disable" atlas schema inspect -u "sqlite://file:ex1.db?_fk=1" ``` #### Flags ``` --exclude strings List of glob patterns used to filter resources from inspection -s, --schema strings Set schema name -u, --url string [driver://username:password@protocol(address)/dbname?param=value] select a database using the URL format ``` ## atlas version Prints this Atlas CLI version information. #### Usage ``` atlas version ``` atlas-0.7.2/doc/md/ui/000077500000000000000000000000001431455511600144135ustar00rootroot00000000000000atlas-0.7.2/doc/md/ui/intro.md000066400000000000000000000130401431455511600160660ustar00rootroot00000000000000--- id: atlas-ui-intro slug: /ui/intro title: Management UI --- :::info The Management UI is temporarily unavailable in the latest version of Atlas. The UI will be brought back soon, however if you wish to use the UI in the meantime, download [Atlas v0.3.7](https://github.com/ariga/atlas/releases/tag/v0.3.7). ::: ## Introduction As many other CLI tools, you can use Atlas to manage your schemas via your terminal. While this is the common interface for many infrastructure management workflows we believe that a visual, integrated environment can be beneficial in many use-cases. ### Opening the UI Get started with the UI by running the command `atlas serve`. To ensure that your team's schemas, database credentials, migration history and more are not lost, save the data in the UI by running the command with [persistent storage](https://atlasgo.io/deployment/intro#persistent-storage): ``` atlas serve --storage mysql://root:pass@localhost:3306/storage_db ``` Alternatively, you can run the commands `schema inspect` or `schema apply` with the `-w` command-line flag: #### Inspect ``` atlas schema inspect -u "mysql://root:pass@localhost:3306/example" -w ``` #### Apply ``` atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f atlas.hcl -w ``` ### Usage :::note The following example is based on the schema created in the Getting Started tutorial. Click [here](http://localhost:3000/cli/getting-started/setting-up) to start the tutorial from the beginning. ::: When the UI opens, we are taken to the schema page. This screen displays a high-level view of our Atlas schema. ![Atlas Schema Synced](https://atlasgo.io/uploads/images/docs/synced-atlas-schema.png) On the top-left (1) we can see a box with information about our schema. The "Activity & History" panel (2) holds an audit history for all changes made to the schema. Each log is clickable, allowing us to view the diff between the schema and its previous state. On the bottom-right (3), the schema is described using the Atlas DDL. On the bottom-left part of the screen (4), the UI displays an ERD (Entity-relation diagram) showing the different tables and the connections between them (via foreign-keys). By clicking the "expand" icon on the top-right corner of the ERD panel, we can open a more detailed view of our schema. ![Schema ERD open](https://atlasgo.io/uploads/images/docs/schema-erd-open.png) #### Running a migration Now that we have seen how Atlas can visualize our database schema in the UI, let’s see how to initiate a migration. First, we will make changes to our schema by clicking on the "Edit Schema" button on the top right-hand corner. ![Edit Schema Button](https://atlasgo.io/uploads/images/docs/edit-schema-button.png) Next, add the table `likes` to the schema: ```hcl table "likes" { schema = schema.example column "id" { null = false type = int } column "user_id" { null = false type = int } column "blog_post_id" { null = false type = int } primary_key { columns = [table.likes.column.id] } foreign_key "like_blog_post" { columns = [table.likes.column.blog_post_id] ref_columns = [table.blog_posts.column.id] on_update = RESTRICT on_delete = RESTRICT } foreign_key "like_user" { columns = [table.likes.column.user_id] ref_columns = [table.users.column.id] on_update = RESTRICT on_delete = RESTRICT } index "like_blog_post" { columns = [table.likes.column.blog_post_id] } index "like_user" { columns = [table.likes.column.user_id] } } ``` Click "save" on the top right-hand corner. ![Save Schema Button](https://atlasgo.io/uploads/images/docs/save-schema.png) Going back to our schema page, we can notice a few changes. First, our schema is now out of sync. This means we have created a change in the desired state that isn't yet reflected in the database. When a schema is out of sync, two separate schemas appear: desired and inspected. The _desired_ schema is the one being edited in the UI. The _inspected_ schema is the true, current state of the database. By clicking the toggle and changing the view to "unified", the schema is shown as one with a highlighted diff. Looking at the ERD, we can see that our table `likes` is green and has a plus-sign (+) next to it, showing that it needs to be added. This means the change has been recorded, but not yet applied. ![Atlas Schema Not Synced Split](https://atlasgo.io/uploads/images/docs/schema-out-of-sync-unified.png) Now, let’s run a migration! Click "Migrate Schema" in the Schema Status box to apply the changes we want to make to our schema. The migration screen will open up, running the setup automatically. After the setup succeeds, we will see the migration plan. The diff in the schema in HCL on the left pane, and the planned SQL statements on the right. ![Atlas Schema Not Synced Split](https://atlasgo.io/uploads/images/docs/migrate-plan-step.png) To start the migration, click "Apply". If you ever wish to not go through with a migration, you can click "Abort". ![Atlas Schema Not Synced Split](https://atlasgo.io/uploads/images/docs/migrate-apply-step.png) Congrats, we have successfully completed a migration! As expected, after executing our migration plan, our database and desired schema are now synced. ![Atlas Schema Not Synced Split](https://atlasgo.io/uploads/images/docs/synced-schema-after-migration.png) ### Conclusion In this section we learned how to use the Management UI to inspect our database and run migrations. atlas-0.7.2/doc/md/versioned/000077500000000000000000000000001431455511600157745ustar00rootroot00000000000000atlas-0.7.2/doc/md/versioned/apply.mdx000066400000000000000000000131051431455511600176330ustar00rootroot00000000000000--- id: apply slug: /versioned/apply title: Migration Applying --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; Atlas has its own migration execution engine, that works with the Atlas migration file format, e.g. migration files generated by [`atlas migrate diff`](/versioned/diff). ### Arguments `atlas migrate apply` accepts one positional integer argument to specify how many pending migration files to run. * `atlas migrate apply` run all pending migrations * `atlas migrate apply 2` run at most 2 pending migrations ### Flags When using `migrate apply` to apply migrations, users must supply multiple parameters: * `--url` the [URL](/concepts/url) to the database to apply migrations on. * `--dir` the URL of the migration directory, by default it is `file://migrations`, e.g a directory named `migrations` in the current working directory. ### Schema Revision Information Atlas saves information about the applied migrations on a table called `atlas_schema_revisions` in the connected database schema (e.g. `mysql://user@host/my_schema` or `postgres://user@host/db?search_path=my_schema`). If the database connection is not bound to a specific schema (e.g. `mysql://user@host/` or `postgres://user@host/db`), the table is stored in its own schema called `atlas_schema_revisions`. This behavior can be changed by setting the schema manually: + `--revisions-schema my_schema` to store the data in `my_schema.atlas_schema_revisions`. ### Transaction Configuration By default, Atlas creates one transaction per migration file and will roll back that transaction if a statement in the wrapped migration fails to execute. Atlas supports three different transaction modes: * `--tx-mode file` (default) will wrap each pending migration into its own transaction. * `--tx-mode all` will wrap all pending migration files into one transaction. * `--tx-mode none` will not create any transaction. If a statement fails, the execution will stop. However, Atlas is smart enough to detect which statement fails and on another migration attempt will continue with the failed statement. This means altering the migration file from the failed statements onwards is safe and recommended. :::caution Please be aware, that non DDL transactional databases like MySQL (due to [implicit commits](https://dev.mysql.com/doc/refman/8.0/en/implicit-commit.html)) can not be safely rolled back completely, and you might end up with a mismatched schema and revision table state. Atlas will handle those cases in future releases. A good source of information can be found in the [PostgreSQL wiki](https://wiki.postgresql.org/wiki/Transactional_DDL_in_PostgreSQL:_A_Competitive_Analysis). ::: ### Existing Databases If you have an existing database project and want to switch over to Atlas Versioned Migrations, you need to provide Atlas with a starting point. The first step is to create a migration file reflecting the current schema state. This can be easily done: ```shell atlas migrate diff my_baseline \ --dir "file://migrations" \ --to "mysql://root:pass@remote:3306/my_schema" ``` ```sql CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ) ``` ```sql CREATE TABLE `pets` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`) ) ``` On your first migration execution attempt, you can then specify a baseline version. Atlas will mark this version as already applied and proceed with the next version: * `--baseline 1` will skip all files including version 1 (`1_baseline.sql`) and proceed with the next version (`2_add_pets_table`). If your database does contain resources but no revision information yet, Atlas will refuse to execute migration files. One way to override that behavior is by using the `--baseline` flag above. However, e.g. in cases where the existing tables are not managed by Atlas at all and should not be part of a baseline file, you can run the initial migration attempt by providing the following flag: * `--allow-dirty` to allow starting migration execution on a non-clean database. `--allow-dirty` and `--baseline` are mutually exclusive. ### Dry Run If you want to check what exactly Atlas would do when attempting a migration execution, you can provide the `--dry-run` flag: * `--dry-run` to not execute any SQL but print it on the screen. ### Examples First time apply with baseline on production environment: ```shell atlas migrate apply \ --env "production" \ --baseline "20220811074144" ``` Execute 1 pending migration file, but don't run, but print SQL statements on screen: ```shell atlas migrate apply 1 \ --env "production" \ --baseline "20220811074144" \ --dry-run ``` Specify revision table schema and custom migration directory path: ```shell atlas migrate apply \ --url "mysql://root:pass@remote:3306/my_database" \ --revisions-schema "atlas_migration_history" \ --dir "file://custom/path/to/dir" ``` Ignore unclean database and run the first 3 migrations: ```shell atlas migrate apply 3 \ --url "mysql://root:pass@remote:3306/my_database" \ --dir "file://custom/path/to/dir" ``` Run all pending migrations, but do not use a transaction: ```shell atlas migrate apply \ --url "mysql://root:pass@remote:3306/my_database" \ --tx-mode "none" ``` atlas-0.7.2/doc/md/versioned/diff.mdx000066400000000000000000000255631431455511600174310ustar00rootroot00000000000000--- id: diff slug: /versioned/diff title: Automatic Migration Authoring --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; With the `atlas migrate diff` command users can implement a kind of workflow that we call _versioned migration authoring_. This workflow is a synthesis between _declarative_ workflows, where developers specify the desired state of their database, and _versioned migrations_ where each change is explicitly defined as a migration script with a specific version. Practically speaking, this means developers define the desired state with either an HCL [schema definition](/atlas-schema/sql-resources) or a database connection, and Atlas maintains the `migrations` directory, which contains the explicit SQL scripts to move from one version to the next. ### Generate migrations from Atlas schemas Suppose we have an Atlas schema with one table and an empty migration directory: ```hcl title="schema.hcl" schema "test" {} table "users" { schema = schema.test column "id" { type = int } } ``` Let's run `atlas migrate diff` with the necessary parameters to generate a migration script for creating our `users` table: * `--dir` the URL to the migration directory, by default it is `file://migrations`. * `--to` the URL of the desired state, an HCL file or a database connection. * `--dev-url` a [URL](/concepts/url) to a [Dev Database](/concepts/dev-database) that will be used to compute the diff. ```shell atlas migrate diff create_users \ --dir "file://migrations" \ --to "file://schema.hcl" \ --dev-url "mysql://root:pass@:3306/test" ``` Run `ls migrations`, and you will notice Atlas created 2 files: By default, migration files are named with the following format `{{ now }}_{{ name }}.sql`. If you wish to use a different file format, use the `--dir-format` option. ```sql -- create "users" table CREATE TABLE `users` (`id` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` In addition to the migration directory, Atlas maintains a file name `atlas.sum` which is used to ensure the integrity of the migration directory and force developers to deal with situations where migration order or contents was modified after the fact. ```text h1:t1fEP1rSsGf1gYrYCjsGyEyuM0cnhATlq93B7h8uXxY= 20220811074144_create_users.sql h1:liZcCBbAn/HyBTqBAEVar9fJNKPTb2Eq+rEKZeCFC9M= ``` Let's repeat the process above by changing our HCL schema file and running Atlas migration authoring again. We add a new column `name` to our HCL schema: ```hcl title="schema.hcl" {8-10} schema "test" {} table "users" { schema = schema.test column "id" { type = int } column "name" { type = varchar(255) } } ``` Then, run `atlas migrate diff`: ```shell atlas migrate diff add_users_name \ --dir "file://migrations" \ --to "file://schema.hcl" \ --dev-url "mysql://root:pass@:3306/test" ``` You will notice Atlas added a new file to the migration directory: ```sql -- create "users" table CREATE TABLE `users` (`id` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` ```sql -- modify "users" table ALTER TABLE `users` ADD COLUMN `name` varchar(255) NOT NULL; ``` ```text h1:w2ODzVxhTKdBVBdzqntHw7rHV8lKQF98TmNevOEZfIo= 20220811074144_create_users_table.sql h1:KnMSZM/E4TBGidYCZ+UHxkHEWaRWeyuPIUjSHRybQqA= 20220811074314_add_users_name.sql h1:jUpaANgD0SjI5DjaHuJxtHZ6Wq98act0MmE5oZ+NRU0= ``` The following diagram explains how it works. Atlas loads the **current state** by replaying the migration directory onto the provided [dev database](/concepts/dev-database), compares it against the **desired state** and writes a new migration script for moving from the current to the desired state. ![Diff From Schema](https://atlasgo.io/uploads/images/versioned-migration/diff-from-schema.png) ### Generate migrations from database schemas Suppose we have a database with a `users` table that was created manually or by an ORM like [Ent](https://entgo.io), we can tell Atlas that this is our **desired** state, and we want to generate a migration script to create this table. ```text mysql> describe users; +-------+------+------+-----+---------+-------+ | Field | Type | Null | Key | Default | Extra | +-------+------+------+-----+---------+-------+ | id | int | YES | | NULL | | +-------+------+------+-----+---------+-------+ ``` Let's run `atlas migrate diff` with the necessary parameters to generate a migration script for creating our `users` table: ```shell atlas migrate diff create_users \ --dir "file://migrations" \ --to "mysql://root:pass@:3306/public" \ --dev-url "mysql://root:pass@:3306/test" ``` Run `ls migrations`, and you will notice Atlas created 2 files: By default, migration files are named with the following format `{{ now }}_{{ name }}.sql`. If you wish to use a different file format, use the `--dir-format` option. ```sql -- create "users" table CREATE TABLE `users` (`id` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` In addition to the migration directory, Atlas maintains a file name `atlas.sum` which is used to ensure the integrity of the migration directory and force developers to deal with situations where migration order or contents was modified after the fact. ```text h1:/6UW2WaPV1laJpEB7oVesKB9RuL59dgDhlTp5atDbNg= 20220811102532_create_users.sql h1:IJiVpbh3FLMeDJSzxxKPuFU3m4AHBgThBfs3VFXAXVo= ``` ### Generate migrations with custom qualifiers When working on a specific database schema, Atlas generates migration scripts without schema qualifiers to allow executing them multiple times on different schemas. However, in some cases, it is necessary to have those qualifiers. To address this, Atlas allows passing another flag to `migrate diff` named `--qualifier`. Let's run the [example above](#generate-migration-from-database-schema), with the `--qualifier` flag and compare the output: ```shell atlas migrate diff create_users \ --dir "file://migrations" \ --to "file://schema.hcl" \ --dev-url "mysql://root:pass@:3306/test" \ --qualifier "market" ``` Running `cat migrations/*.sql` will print the same migration script but the `users` table will be qualified with the `market` schema: ```sql -- create "users" table CREATE TABLE `market`.`users` (`id` int NOT NULL, `name` varchar(255) NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` ### Generate migrations with custom formats Some migration tools use a different file format than the one used by Atlas. Therefore, Atlas provides an option named `--dir-format` that allows controlling the format of the migration directory. ```shell atlas migrate diff create_users \ --dir "file://migrations" \ --to "file://schema.hcl" \ --dev-url "mysql://root:pass@:3306/test" \ --dir-format "golang-migrate" ``` Run `ls migrations`, and you will notice Atlas created 3 files: ```sql -- create "users" table CREATE TABLE `users` (`name` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` ```sql -- reverse: create "users" table DROP TABLE `users`; ``` In addition to the migration directory, Atlas maintains a file name `atlas.sum` which is used to ensure the integrity of the migration directory and force developers to deal with situations where migration order or contents was modified after the fact. ```text h1:NRHsNIjvSSzprr/EzMdtszQg3t3pVLk4G4N1tX4rMfk= 20220811114629_create_users.up.sql h1:Ng3GHrdk2davokjOctgVdxC+6QsK4JzaLX6RT3QstJc= ``` ### Generate migrations for the entire database Atlas supports generating migrations for databases or multiple schemas. In PostgreSQL, a database can be created with the `CREATE DATABASE` command and can hold multiple schemas. In MySQL however, a database is an instance with one or more schemas. Suppose we have an Atlas schema that defines two database schemas where each one contains a single table. ```hcl title="schema.hcl" schema "auth" {} schema "market" {} table "users" { schema = schema.market column "name" { type = int } } table "tokens" { schema = schema.auth column "value" { type = int } } ``` Let's run `atlas migrate diff` to generate migration scripts for creating the entire schema. However, unlike the previous examples where the `--dev-url` flag was set to a [URL](concepts/url.mdx) of a specific schema, in this case we omit the schema name from the connection string. ```shell atlas migrate diff create_all \ --dir "file://migrations" \ --to "file://schema.hcl" \ --dev-url "mysql://root:pass@:3306/" ``` Running `cat migrations/*.sql` will print the followings: ```sql -- add new schema named "auth" CREATE DATABASE `auth`; -- add new schema named "market" CREATE DATABASE `market`; -- create "tokens" table CREATE TABLE `auth`.`tokens` (`value` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; -- create "users" table CREATE TABLE `market`.`users` (`name` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` ```shell atlas migrate diff create_all \ --dir "file://migrations" \ --to "file://schema.hcl" \ --dev-url "postgres://root:pass@:5434/database?sslmode=disable" ``` Running `cat migrations/*.sql` will print the followings: ```sql -- Add new schema named "auth" CREATE SCHEMA "auth"; -- Add new schema named "market" CREATE SCHEMA "market"; -- create "tokens" table CREATE TABLE "auth"."tokens" ("value" integer NOT NULL); -- create "users" table CREATE TABLE "market"."users" ("name" integer NOT NULL); ``` As you can see, Atlas generates statements for creating the `auth` and `market` schemas, and added them as qualifiers in the created tables. ### Reference [CLI Command Reference](/cli-reference#atlas-migrate-diff)atlas-0.7.2/doc/md/versioned/import.mdx000066400000000000000000000114351431455511600200240ustar00rootroot00000000000000--- id: import slug: /versioned/import title: Migration Directory Import --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; Atlas supports the generation of [custom migration file formats](/versioned/diff#generate-migrations-with-custom-formats) for a variety of existing migration management tools, e.g. [Flyway](https://flywaydb.org/) or [golang-migrate/migrate](https://github.com/golang-migrate/migrate). But Atlas has its own format as well and provides a convenient command to import existing migration directories of supported tools into the Atlas format. ### Flags When using `atlas migrate import` to import a migration directory, users must supply multiple parameters: * `--from` the [URL](/concepts/url) to the migration directory to import. * `--to` the URL of the migration directory to save imported migration files into, by default it is `file://migrations`. * `--dir-format` one of `[golang-migrate, goose, flyway, liquibase, dbmate]` to specifiy the migration file format of the migration directory to import. ### Limitations Importing an existing migration directory has some limitations: #### Comments not directly preceding a SQL statement will get lost. ```sql -- This comment will get lost -- This will be preserved /* This will be preserved as well /* CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ); -- This will get lost. ``` ```sql -- This will be preserved /* This will be preserved as well /* CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ); ``` #### Rollback migrations will not get imported. Atlas does not have the concept of rollback migrations. Therefore migrations to undo an applied migration, often called "down" or "undo" migrations, will not be imported into the new migration directory. For migration formats having the rollback migration part of one file separated by some directive, the rollback parts are stripped away. ```sql -- +goose Up CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ); -- +goose Down DROP TABLE `users`; ``` ```sql CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ); ``` #### Repeatable Migrations Flyway has the concept of repeatable migrations, however, Atlas does not. In Flyway repeatable migrations are run last, if their contents did change. Atlas tries to reproduce this behavior by creating versioned migrations out of each repeatable migration file found and giving them the character `R` as version suffix. ```sql CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ); ``` ```sql CREATE VIEW `users_over_30` AS SELECT * FROM `users` where `age` > 30; ``` ```sql CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ); ``` ```sql CREATE VIEW `users_over_30` AS SELECT * FROM `users` where `age` > 30; ``` ### Examples Import existing `golang-migrate/migrate` migration directory: ```shell atlas migrate import \ --from "file://migrations" \ --to "file://atlas-migrations" \ -- dir-format "golang-migrate" ``` Import existing Flyway migration directory: ```shell atlas migrate import \ --from "file://migrations" \ --to "file://atlas-migrations" \ -- dir-format "flyway" ``` atlas-0.7.2/doc/md/versioned/lint.mdx000066400000000000000000000102611431455511600174540ustar00rootroot00000000000000--- id: lint slug: /versioned/lint title: Verifying migration safety --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; With the `atlas migrate lint` command, users can analyze the migration directory to detect potentially dangerous changes to the database schema. This command may be incorporated in _continuous integration_ pipelines to enable teams to enforce desired policies with regard to schema changes. [Learn more about Atlas's analyzers](/lint/analyzers) ### Flags When using `migrate lint` to analyze migrations, users must supply multiple parameters: * `--dev-url` a [URL](/concepts/url) to a [Dev-database](/concepts/dev-database) that will be used to simulate the changes and verify their correctness. * `--dir` the URL of the migration directory, by default it is `file://migrations`, e.g a directory named `migrations` in the current working directory. ### Changeset detection When we run the `lint` command, we need to instruct Atlas on how to decide what set of migration files to analyze. Currently, two modes are supported. * `--git-base `: which selects the diff between the provided branch and the current one as the changeset. * `--latest ` which selects the latest n migration files as the changeset. ### `nolint` directive Annotating a statement with the `--atlas:nolint` directive allows excluding it from the analysis reporting. For example: Using `--atlas:nolint` excludes the annotated statement from all linters. ```sql //highlight-next-line -- atlas:nolint ALTER TABLE `t1` DROP COLUMN `c1`, ADD COLUMN `d1` varchar(255) NOT NULL; //highlight-next-line --atlas:nolint ALTER TABLE `t2` DROP COLUMN `c2`, ADD COLUMN `d2` varchar(255) NOT NULL; //highlight-next-line /*atlas:nolint*/ ALTER TABLE `t3` DROP COLUMN `c3`, ADD COLUMN `d3` varchar(255) NOT NULL; //highlight-next-line #atlas:nolint ALTER TABLE `t4` DROP COLUMN `c4`, ADD COLUMN `d4` varchar(255) NOT NULL; ``` Using `--atlas:nolint [names...]` excludes reporting specific analyzers for the annotated statements. ```sql //highlight-next-line -- Ignore reporting destructive changes. //highlight-next-line -- atlas:nolint destructive ALTER TABLE `t1` DROP COLUMN `c1`, ADD COLUMN `d1` varchar(255) NOT NULL; //highlight-next-line -- Ignore reporting destructive and data-dependent changes. //highlight-next-line --atlas:nolint destructive data_depend ALTER TABLE `t2` DROP COLUMN `c2`, ADD COLUMN `d2` varchar(255) NOT NULL; //highlight-next-line /*atlas:nolint data_depend*/ ALTER TABLE `t3` DROP COLUMN `c3`, ADD COLUMN `d3` varchar(255) NOT NULL; //highlight-next-line #atlas:nolint destructive data_depend ALTER TABLE `t4` DROP COLUMN `c4`, ADD COLUMN `d4` varchar(255) NOT NULL; ``` Using `--atlas:nolint [checks...]` excludes reporting specific [checks](../lint/analyzers.md#checks) for the annotated statement. ```sql //highlight-next-line -- atlas:nolint DS103 MY101 ALTER TABLE `t1` DROP COLUMN `c1`, ADD COLUMN `d1` varchar(255) NOT NULL; ``` ### Output Users may supply a [Go template](https://pkg.go.dev/text/template) string as the `--log` parameter to format the output of the `lint` command. ### Examples Analyze all changes relative to the `master` Git branch: ```shell atlas migrate lint \ --dir "file://my/project/migrations" \ --dev-url "mysql://root:pass@localhost:3306/dev" \ --git-base "master" ``` Analyze the latest 2 migration files: ```shell atlas migrate lint \ --dir "file://my/project/migrations" \ --dev-url "mysql://root:pass@localhost:3306/dev" \ --latest 2 ``` Format output as JSON: ```shell atlas migrate lint \ --dir "file://my/project/migrations" \ --dev-url "mysql://root:pass@localhost:3306/dev" \ --git-base "master" \ --log "{{ json .Files }}" ``` Use the configuration defined in [`atlas.hcl`](../atlas-schema/projects#configure-migration-linting): ```shell atlas migrate lint --env "local" ``` ### Reference [CLI Command Reference](/cli-reference#atlas-migrate-lint)atlas-0.7.2/doc/md/versioned/new.mdx000066400000000000000000000101141431455511600172740ustar00rootroot00000000000000--- id: new slug: /versioned/new title: Manual Migrations --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; In some cases it is desirable to add a migration file manually. This could be done to provision resources that Atlas does not yet capture in its DDL (such as triggers and views) or to seed data with `INSERT` statements. To manually add a new migration file to the directory use the `migrate new` command. ### Flags When using `migrate new` to create a new migration file users may supply the following flags: * `--dir` the URL of the migration directory, by default it is `file://migrations`, e.g a directory named `migrations` in the current working directory. ### Migration name Users may optionally add a final positional argument to set the name of the migration file. This name will be appended to the migration version number in the filename as such: `_.sql`. ### Custom statements delimiter The semicolon character (`;`) is recognized by Atlas as a statement delimiter. In some cases, however, the delimiter may need to be redefined because the semicolon itself is used in one of the DDL statements. For example, a stored program containing semicolon characters. #### Using the `DELIMITER` command used by [MySQL client](https://dev.mysql.com/doc/refman/8.0/en/mysql-commands.html) ```sql {1,7-8} DELIMITER // CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END // DELIMITER ; CALL dorepeat(100) ``` Statement 1: ```sql CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END ``` Statement 2: ```sql CALL dorepeat(100); ``` #### Using the `atlas:delimiter` directive to set `\n\n\n` as a separator: ```sql //highlight-next-line-info -- atlas:delimiter \n\n\n CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; CALL dorepeat(1000); ``` Statement 1: ```sql CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; ``` Statement 2: ```sql CALL dorepeat(1000); ``` #### Using the `atlas:delimiter` directive to set `-- end` as a separator: ```sql //highlight-next-line-info -- atlas:delimiter -- end CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; -- end CALL dorepeat(1000); ``` Statement 1: ```sql CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; ``` Statement 2: ```sql CALL dorepeat(1000); ``` ### Recalculating the directory hash Atlas maintains a file named `atlas.sum` in the migration directory. This file is used to ensure the integrity of the migration directory and force developers to deal with situations where migration order or contents was modified after the fact. After manually editing the contents of a newly created migration file, the checksums for the directory must be recalculated. This can be done by running `atlas migrate hash` command. ### Examples Creating a new migration file: ```shell atlas migrate new ``` Creating a new migration file named "add_user": ```shell atlas migrate new add_user ``` Creating a new migration file in a specific directory: ```shell atlas migrate new --dir "file://custom/path/to/dir" ``` atlas-0.7.2/doc/website/000077500000000000000000000000001431455511600150405ustar00rootroot00000000000000atlas-0.7.2/doc/website/.gitignore000066400000000000000000000003511431455511600170270ustar00rootroot00000000000000# Dependencies /node_modules # Production /build # Generated files .docusaurus .cache-loader # Misc .DS_Store .env.local .env.development.local .env.test.local .env.production.local npm-debug.log* yarn-debug.log* yarn-error.log* atlas-0.7.2/doc/website/README.md000066400000000000000000000013471431455511600163240ustar00rootroot00000000000000# Website This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. ## Installation ```console yarn install ``` ## Local Development ```console yarn start ``` This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. ## Build ```console yarn build ``` This command generates static content into the `build` directory and can be served using any static contents hosting service. ## Deployment ```console GIT_USER= USE_SSH=true yarn deploy ``` If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. atlas-0.7.2/doc/website/babel.config.js000066400000000000000000000001311431455511600177020ustar00rootroot00000000000000module.exports = { presets: [require.resolve('@docusaurus/core/lib/babel/preset')], }; atlas-0.7.2/doc/website/blog/000077500000000000000000000000001431455511600157635ustar00rootroot00000000000000atlas-0.7.2/doc/website/blog/2021-11-25-meet-atlas.md000066400000000000000000000243021431455511600213670ustar00rootroot00000000000000--- title: "Meet Atlas CLI: Inspect and Apply changes to your database schema" date: "2021-11-25" author: Yoni Davidson authorURL: "https://github.com/yonidavidson" authorImageURL: "https://avatars0.githubusercontent.com/u/5472778" authorTwitter: yonidavidson url: /meet-atlas-cli/ image: https://blog.ariga.io/images/logo.png --- At [Ariga](https://ariga.io), we are building a new kind of platform that we call an Operational Data Graph. This platform enables software engineers to manage, maintain and access complex data architectures as if they were one database. Today, we are open-sourcing a CLI for [Atlas](https://atlasgo.io), one of the fundamental building blocks of our platform. During my career, the scope of what is expected of me as a software engineer has increased significantly. Developers are no longer expected just to write code, we are expected to provision infrastructure, manage databases, define deployments and monitor systems in production. Nowadays, one of the responsibilities we have as software engineers is to manage the database schema of our applications. Once seen as falling strictly under the domain of DBAs, today developers everywhere are responsible for defining database schemas and changing them over time. Because an application's database carries its state, all clients and servers are severely impacted if it stops functioning properly. Therefore, over the years many techniques and tools were developed to deal with this process, which is called _migrating_ the database. In the last few years we have seen a lot of progress in the field of tools for provisioning infrastructure. From early projects such as [Chef](https://www.chef.io/) and [Puppet](https://puppet.com/), to more recent work such as [Terraform](https://www.terraform.io/), a lot of thought and effort has been put across the industry to build tools that simplify and standardize the process. Instead of manually installing and configuring software and services, the common thread between all of these projects is that they are based on machine-readable definition files, a concept also known as [infrastructure-as-code](https://en.wikipedia.org/wiki/Infrastructure_as_code) (IaC). ### Enter: Atlas Atlas is at the core of Ariga's platform. In this post, I would like to share with you the work we've done so far to provide a solid foundation for managing databases in a way that's akin to infrastructure-as-code practices. * The [Atlas DDL](https://atlasgo.io/ddl) (Data-definition Language): we have created the Atlas DDL, a new configuration language designed to capture an organization's data topology - including relational database schemas. This language is currently described in an HCL syntax (similar to TerraForm), but will support more syntaxes such as JSON and TypeScript in the future. The Atlas DDL currently supports defining schemas for [SQL](https://atlasgo.io/sql-hcl) databases such as MySQL, Postgres, SQLite and MariaDB, but in the future, we plan to add support for other types of databases. For example: ```hcl table "users" { schema = "default" column "id" { type = "int" } column "name" { type = "string" } column "manager_id" { type = "int" } primary_key { columns = [ table.users.column.id ] } index "idx_name" { columns = [ table.users.column.name ] unique = true } foreign_key "manager_fk" { columns = [table.users.column.manager_id] ref_columns = [table.users.column.id] on_delete = "CASCADE" on_update = "NO ACTION" } } ``` * __The Atlas CLI__ On top of the building blocks provided by the DDL, we started building our CLI tool to support the two most basic functions: * __"Schema Inspect"__ - Create a schema specification file from a database. * __"Schema Apply"__ - Migrate a database to a new desired state. Many infrastructure-as-code projects have taken the declarative approach, in which the developer articulates the desired state of the system and the tool is responsible for figuring out a plan to get there. As we discussed above, changing database schemas safely is a delicate practice, so we had to build the Atlas CLI to be smart enough to understand the nuance of changes for each type of database. ### Atlas in action Let's see how Atlas CLI works with real databases. Let's start a MySQL container: ```shell docker run --name atlas-db -p 3306:3306 -e MYSQL_ROOT_PASSWORD=pass -e MYSQL_DATABASE=example mysql:8.0.27 ``` Connect to our database using a native client to validate: ```shell docker exec -it atlas-db mysql --password='pass' example ``` ```shell mysql> show tables; Empty set (0.00 sec) mysql> ``` Let's see how Atlas inspects it: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > atlas.hcl ``` As expected, an empty schema: ```hcl # cat atlas.hcl schema "example" { } ``` Let's update our schema to: ```hcl # cat atlas.hcl table "users" { schema = "example" column "id" { null = false type = "int" } column "name" { null = false type = "string" size = 255 } column "manager_id" { null = false type = "int" } primary_key { columns = [table.users.column.id, ] } foreign_key "manager_fk" { columns = [table.users.column.manager_id, ] ref_columns = [table.users.column.id, ] on_update = "NO ACTION" on_delete = "CASCADE" } index "idx_name" { unique = true columns = [table.users.column.name, ] } index "manager_fk" { unique = false columns = [table.users.column.manager_id, ] } } schema "example" { } ``` And apply our changes! ```shell atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f atlas.hcl -- Planned Changes: -- Add Table : users CREATE TABLE `example`.`users` (`id` int NOT NULL, `name` varchar(255) NOT NULL, `manager_id` int NOT NULL, PRIMARY KEY (`id`), UNIQUE INDEX `idx_name` (`name`), CONSTRAINT `manager_fk` FOREIGN KEY (`manager_id`) REFERENCES `example`.`users` (`id`) ON UPDATE NO ACTION ON DELETE CASCADE) ; Use the arrow keys to navigate: ↓ ↑ → ← ? Are you sure?: ▸ Apply Abort ``` Of course we are sure ! Using CLI to examine our database: ```sql mysql> describe users; +------------+--------------+------+-----+---------+-------+ | Field | Type | Null | Key | Default | Extra | +------------+--------------+------+-----+---------+-------+ | id | int | NO | PRI | NULL | | | name | varchar(255) | NO | UNI | NULL | | | manager_id | int | NO | MUL | NULL | | +------------+--------------+------+-----+---------+-------+ 3 rows in set (0.00 sec) mysql> ``` Let's make sure that it has the FK: ```sql mysql> show create table users; +-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Table | Create Table | +-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | users | CREATE TABLE `users` ( `id` int NOT NULL, `name` varchar(255) NOT NULL, `manager_id` int NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `idx_name` (`name`), KEY `manager_fk` (`manager_id`), CONSTRAINT `manager_fk` FOREIGN KEY (`manager_id`) REFERENCES `users` (`id`) ON DELETE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci | +-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 1 row in set (0.00 sec) mysql> ``` Now let's see that Atlas inspects this correctly: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" > atlas.hcl ``` ```hcl # cat atlas.hcl table "users" { schema = "example" column "id" { null = false type = "int" } column "name" { null = false type = "string" size = 255 } column "manager_id" { null = false type = "int" } primary_key { columns = [table.users.column.id, ] } foreign_key "manager_fk" { columns = [table.users.column.manager_id, ] ref_columns = [table.users.column.id, ] on_update = "NO ACTION" on_delete = "CASCADE" } index "idx_name" { unique = true columns = [table.users.column.name, ] } index "manager_fk" { unique = false columns = [table.users.column.manager_id, ] } } schema "example" { } ``` Let's see what happens when we try to reapply the same change: ```shell atlas schema apply -u "mysql://root:pass@localhost:3306/example" -f atlas.hcl Schema is synced, no changes to be made ``` In this example we have shown how we can inspect a MySQL database schema and apply a change. ### What's Next? The Atlas DDL opens up a world of tools and services, and with the help of our community, we are planning to push the development ecosystem forward. A list of tools that are on our road map includes: * Integrations with Terraform, GitHub actions and Kubernetes. * Extended migration logics such as renaming columns, adding or dropping nullability and altering enums. * Toolsets for examining the migration history and reproducing it. We hope that you find Atlas CLI as exciting as we do, and we invite you to [contribute](https://github.com/ariga/atlas) your ideas and code. atlas-0.7.2/doc/website/blog/2022-01-19-atlas-v030.md000066400000000000000000000154331431455511600211350ustar00rootroot00000000000000--- title: "Announcing Atlas v0.3.0: A UI-powered schema migration experience" date: "2022-01-19" author: Hila Kashai authorURL: "https://github.com/hilakashai" authorImageURL: "https://avatars.githubusercontent.com/u/73284641?v=4" authorTwitter: HilaKash url: /atlas-v030/ image: https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-post-save.png --- Earlier this week we released [v0.3.0](https://github.com/ariga/atlas/releases/tag/v0.3.0) of the Atlas CLI. This version features a ton of improvements to database inspection, diffing and migration planning. You can read about those in the release notes page, but we wanted to take the time and introduce the biggest feature in this release, the _Atlas Management UI_. To recap, [Atlas](https://atlasgo.io/) is an open source CLI tool that helps developers manage their database schemas. Contrary to existing tools, Atlas intelligently plans schema migrations for you, based on your desired state. Atlas currently has two main commands: `inspect` and `apply`. The `inspect` command inspects your database, generating an Atlas HCL document. The `apply` command allows you to migrate your schema from its current state in the database to your desired state by providing an HCL file with the relevant schema. In this post we will showcase the latest addition to the CLI's feature set, the Management UI. Until now, you could use Atlas to manage your schemas via your terminal. While this is the common interface for many infrastructure management workflows, we believe that a visual, integrated environment can be beneficial in many use-cases. ### Inspecting our database using the Atlas UI Let's see how we can use the Atlas UI to inspect our database. For the purpose of demonstration let's assume that you have a locally running MySQL database. If you want to follow along, check out the [Setting Up](https://atlasgo.io/cli/getting-started/setting-up) tutorial on the Atlas website for instructions on starting up a MySQL database locally using Docker. We will be working with a MySQL database that has the following tables: ```sql CREATE table users ( id int PRIMARY KEY, name varchar(100) ); CREATE TABLE blog_posts ( id int PRIMARY KEY, title varchar(100), body text, author_id int, FOREIGN KEY (author_id) REFERENCES users(id) ); ``` To inspect the database, we can use the [atlas schema inspect](https://atlasgo.io/cli-reference#atlas-schema-inspect) command. Starting with this version, we can add the `-w` flag to open the (local) web UI: ```text atlas schema inspect -u "mysql://root:pass@localhost:3306/example" -w ``` Our browser will open automatically, and we should see this output in the CLI: ```text Atlas UI available at: http://127.0.0.1:5800/projects/25769803777/schemas/1 Press Ctrl+C to stop ``` ![inspect_image](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-post-ui.png) We can see that our schema has been inspected, and that it's currently synced. On the bottom-left part of the screen the UI displays an ERD (Entity-relation Diagram) showing the different tables and the connections between them (via foreign-keys). On the bottom-right, we can see the current schema, described using the Atlas DDL. In addition, on the top-right, we see the "Activity & History" panel that holds an audit history for all changes to our schema. ### Migrating our database schema with the Atlas Management UI Visualizing the current schema of the database is great, let's now see how we can use the UI to initiate a change (migration) to our schema. Click on the `Edit Schema` button in the top-right corner and add the following two tables to our schema: ```hcl table "categories" { schema = schema.example column "id" { null = false type = int } column "name" { null = true type = varchar(100) } primary_key { columns = [table.categories.column.id, ] } } table "post_categories" { schema = schema.example column "post_id" { type = int } column "category_id" { type = int } foreign_key "post_category_post" { columns = [table.post_categories.column.post_id, ] ref_columns = [table.blog_posts.column.id, ] } foreign_key "post_category_category" { columns = [table.post_categories.column.category_id, ] ref_columns = [table.categories.column.id, ] } } ``` Click the `Save` button and go back to the schema page. Observe that a few things changed on the screen: ![The UI after saving](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-post-save.png) First, we can see that the UI states that our schema is "Out of Sync". This is because there is a difference between our *desired schema*, the one we are currently working on, and the *inspected schema*, which is the actual, current schema of our database. Second, we can see that our ERD has changed reflecting the addition of the `categories` and `post_categories` tables to our schema. These two tables that have been added are now shown in green. By clicking the "expand" icon on the top-right corner of the ERD panel, we can open a more detailed view of our schema. ![ERD displaying diff](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-post-erd.png) Going back to our schema page, click the "Migrate Schema" to initiate a migration to apply the changes we want to make to our schema. Next, Atlas will setup the migration. Click "Plan Migration" to see the migration plan to get to the desired schema: ![Migration Prep](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-migration-prep.png) Atlas displays the diff in the schema in HCL on the left pane, and the planned SQL statements on the right. Click "Apply Migration" to begin executing the plan. ![Migration Plan](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-migration-plan.png) In the final screen of the migration flow, Atlas displays informative logs about the migration process. In this case, our migration completed successfully! Let's click "Done" to return to the schema detail page. ![Applying Migration](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-migration-apply.png) As expected, after executing our migration plan, our database and desired schema are now synced! ![Post Migrations](https://blog.ariga.io/uploads/images/posts/atlas-v0.3.0/atlas-migration-post.png) ### Wrapping Up In this post, we've introduced the Atlas Management UI and showed one of the possible workflows that are supported in it. There's much more inside, and we invite you to [install it today](https://atlasgo.io/cli/getting-started/setting-up) and give it a try. ### What next? * Follow the [Getting Started](https://atlasgo.io/cli/getting-started/setting-up) guide.. * Join our [Discord Server](https://discord.gg/zZ6sWVg6NT). * Follow us [on Twitter](https://twitter.com/ariga_io). atlas-0.7.2/doc/website/blog/2022-02-01-atlas-v032-multi-schema.md000066400000000000000000000270151431455511600235140ustar00rootroot00000000000000--- title: "Announcing Atlas v0.3.2: multi-schema support" date: "2022-02-01" author: Ze'ev Manilovich authorURL: "https://github.com/zeevmoney" authorImageURL: "https://avatars.githubusercontent.com/u/7361100?v=4" authorTwitter: zeevmoney url: /announcing-atlas-v.0.3.2-multi-schema/ image: https://blog.ariga.io/uploads/images/posts/v0.3.2/multi-schema.png --- Last week we released [v0.3.2](https://github.com/ariga/atlas/releases/tag/v0.3.2) of the Atlas CLI. [Atlas](https://atlasgo.io) is an open source tool that helps developers manage their database schemas. Atlas plans database migrations for you based on your desired state. The two main commands are `inspect` and `apply`. The `inspect` command inspects your database and the `apply` command runs a migration by providing an HCL document with your desired state. The most notable change in this version is the ability to interact with multiple schemas in both database inspection and migration (the `apply` command). Some other interesting features include: * `schema apply --dry-run` - running `schema apply` in dry-run mode connects to the target database and prints the SQL migration to bring the target database to the desired state without prompting the user to approve it. * `schema fmt` - adds basic formatting capabilities to .hcl files. * `schema diff` - Connects to two given databases, inspects them, calculates the difference in their schemas, and prints a plan of SQL statements needed to migrate the "from" database to the state of the "to" database. In this post we will explore the topic of multi-schema support. We will start our discussion with a brief explanation of database schemas, next we'll present the difference between how MySQL and PostgreSQL treat "schemas". We will then show how the existing `schema inspect` and `schema apply` commands work with multi-schema support, and wrap up with some plans for future releases. #### What is a database schema? Within the context of relational (SQL) databases, a database schema is a logical unit within a physical database instance (server/cluster) that forms a namespace of sorts. Inside each schema you can describe the structure of the tables, relations, indexes and other attributes that belong to it. In other words, the database schema is a "blueprint" of the data structure inside a logical container (Note: in Oracle databases a schema [is linked to the user](https://docs.oracle.com/cd/B19306_01/server.102/b14196/schema.htm#ADMQS008), so it carries a different meaning which is out of scope for this post). As you can guess from the title of this post, many popular relational databases allow users to host _multiple_ (logical) schemas on the same (physical) database. #### Where are database schemas used in practice? Why is this level of logical division necessary? Isn't it enough to be able physically split data into different database instances? In my career, I've seen multiple scenarios in which organizations opt to split a database into multiple schemas. First, grouping different parts of your application into logical units makes it simpler to reason about and govern. For instance, it is possible to create multiple user accounts in our database and give each of them permission to access a subset of the schemas in the database. This way, each user can only touch the parts of the database they need, preventing the practice of creating an almighty super-user account that has no permission boundary. An additional pattern I've seen used, is in applications with a multi-tenant architecture where each tenant has its own schema with the same exact table structure (or some might have a different structure since they use different versions of the application). This pattern is used to create a stronger boundary between the different tenants (customers) preventing the scenario where one tenant accidentally has access to another's data that is incidentally hosted on the same machine. Another useful feature of schemas is the ability to divide the same server into different environments for different development states. For example, you can have a "dev" and "staging" schema inside the same server. #### What are the differences between schemas in MySQL and PostgreSQL? A common source of confusion for developers (especially when switching teams or companies) is the difference between the meaning of schemas in MySQL and PostgreSQL. Both are currently supported by Atlas, and have some differences that should be clarified. Looking at the MySQL [glossary](https://dev.mysql.com/doc/refman/8.0/en/glossary.html#glos_schema), it states: > "In MySQL, physically, a schema is synonymous with a database. You can substitute the keyword SCHEMA instead of DATABASE in MySQL SQL syntax, for example using CREATE SCHEMA instead of CREATE DATABASE" As we can see, MySQL doesn't distinguish between schemas and databases in the terminology, but the underlying meaning is still the same - a logical boundary for resources and permissions. To demonstrate this, open your favorite MySQL shell and run: ```shell mysql> create schema atlas; Query OK, 1 row affected (0.00 sec) ``` To create a table in our new schema, assuming we have the required permissions, we can switch to the context of the schema that we just created, and create a table: ```sql USE atlas; CREATE table some_name ( id int not null ); ``` Alternatively, we can prefix the schema, by running: ```sql CREATE TABLE atlas.cli_versions ( id bigint auto_increment primary key, version varchar(255) not null ); ``` This prefix is important since, as we said, schemas are logical boundaries (unlike database servers). Therefore, we can create references between them using foreign keys from tables in `SchemaA` to `SchemaB`. Let's demonstrate this by creating another schema with a table and connect it to a table in the `atlas` schema: ```sql CREATE SCHEMA atlantis; CREATE TABLE atlantis.ui_versions ( id bigint auto_increment primary key, version varchar(255) not null, atlas_version_id bigint null, constraint ui_versions_atlas_version_uindex unique (atlas_version_id) ); ``` Now let's link `atlantis` to `atlas`: ```sql alter table atlantis.ui_versions add constraint ui_versions_cli_versions_id_fk foreign key (atlas_version_id) references atlas.cli_versions (id) on delete cascade; ``` That's it! We've created 2 tables in 2 different schemas with a reference between them. ##### How does PostgreSQL treat schemas? When booting a fresh PostgreSQL server, we get a default logical schema called "public". If you wish to split your database into logical units as we've shown with MySQL, you can create a new schema: ```sql CREATE SCHEMA atlas; ``` Contrary to MySQL, Postgres provides an additional level of abstraction: databases. In Postgres, a single physical server can host multiple databases. Unlike schemas (which are basically the same as in MySQL) - you can't reference a table from one PostgreSQL database to another. In Postgres, the following statement will create an entirely new database, where we can place different schemas and tables with that may contain references between them: ```sql create database releases; ``` When we run this statement, the database will be created with the default Postgres metadata tables and the default `public` schema. In Postgres, you can give permissions to an entire database(s), schema(s), and/or table(s), and of course other objects in the Postgres schema. Another distinction from MySQL is that in addition to sufficient permissions, a user must have the schema name inside their [search_path](https://www.postgresql.org/docs/14/ddl-schemas.html#DDL-SCHEMAS-PATH) in order to use it without a prefix. To sum up, both MySQL and Postgres allow the creation of separate logical schemas within a physical database server, schemas can refer to one another via foreign-keys. PostgreSQL supports an additional level of separation by allowing users to create completely different _databases_ on the server. #### Atlas multi-schema support As we have shown, having multiple schemas in the same database is a common scenario with popular relational databases. Previously, the Atlas CLI only supported inspecting or applying changes to a single schema (even though this has been long supported in the [Go API](https://atlasgo.io/go-api/intro)). With this release, we have added support for inspecting and applying multiple schemas with a single `.hcl` file. Next, let's demonstrate how we can use the Atlas CLI to inspect and manage a database with multiple schemas. Start by [downloading and installing the latest version](https://atlasgo.io/cli/getting-started/setting-up#install-the-cli) of the CLI. For the purpose of this demo, we will start with a fresh database of MySQL running in a local `docker` container: ```shell docker run --name atlas-db -p 3306:3306 -e MYSQL_ROOT_PASSWORD=pass -e MYSQL_DATABASE=example mysql:8 ``` By passing `example` in the `MYSQL_DATABASE` environment variable a new schema named "example" is created. Let's verify this by using the `atlas schema inspect` command. In previous versions of Atlas, users had to specify the schema name as part of the DSN for connecting to the database, for example: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/example" ``` Starting with `v0.3.2`, users can omit the schema name from the DSN to instruct Atlas to inspect the entire database. Let's try this: ```shell $ atlas schema inspect -u "mysql://root:pass@localhost:3306/" > atlas.hcl cat atlas.hcl schema "example" { charset = "utf8mb4" collation = "utf8mb4_0900_ai_ci" } ``` Let's verify that this works correctly by editing the `atlas.hcl` that we have created above and adding a new schema: ```hcl schema "example" { charset = "utf8mb4" collation = "utf8mb4_0900_ai_ci" } schema "example_2" { charset = "utf8mb4" collation = "utf8mb4_0900_ai_ci" } ``` Next, we will use the `schema apply` command to apply our changes to the database: ```shell atlas schema apply -u "mysql://root:pass@localhost:3306/" -f atlas.hcl ``` Atlas plans a migration to add the new `DATABASE` (recall that in MySQL `DATABASE` and `SCHEMA` are synonymous) to the server, when prompted to approve the migration we choose "Apply": ```shell -- Planned Changes: -- Add new schema named "example_2" CREATE DATABASE `example_2` ✔ Apply ``` To verify that `schema inspect` works properly with multiple schemas, lets re-run: ```shell atlas schema inspect -u "mysql://root:pass@localhost:3306/" ``` Observe that both schemas are inspected: ```hcl schema "example" { charset = "utf8mb4" collation = "utf8mb4_0900_ai_ci" } schema "example_2" { charset = "utf8mb4" collation = "utf8mb4_0900_ai_ci" } ``` To learn more about the different options for working with multiple schemas in `inspect` and `apply` commands, consult the [CLI Reference Docs](https://atlasgo.io/cli-reference#atlas-schema-inspect). #### What's next for multi-schema support? I hope you agree that multi-schema support is a great improvement to the Atlas CLI, but there is more to come in this area. In our previous [blogpost](https://atlasgo.io/blog/2022-01-01-atlas-v030/) we have shared that Atlas also has a [Management UI](https://atlasgo.io/ui/intro) (-w option in the CLI) and multi-schema support is not present there yet - stay tuned for updates on multi-schema support for the UI in an upcoming release! #### Getting involved with Atlas * Follow the [Getting Started](https://atlasgo.io/cli/getting-started/setting-up) guide.. * Join our [Discord Server](https://discord.gg/zZ6sWVg6NT). * Follow us [on Twitter](https://twitter.com/ariga_io). atlas-0.7.2/doc/website/blog/2022-02-09-programmatic-inspection-in-go-with-atlas.md000066400000000000000000000410571431455511600272640ustar00rootroot00000000000000--- title: Programmatic inspection of databases in Go using Atlas date: "2022-02-09" author: Rotem Tamir authorURL: "https://github.com/rotemtam" authorImageURL: "https://s.gravatar.com/avatar/36b3739951a27d2e37251867b7d44b1a?s=80" authorTwitter: _rtam url: /programmatic-inspection-of-databases-in-go-using-atlas/ image: https://release.ariga.io/images/assets/inspector-carbon.png --- Database inspection is the process of connecting to a database to extract metadata about the way data is structured inside it. In this post, we will present some use cases for inspecting a database, demonstrate why it is a non-trivial problem to solve, and finally show how it can be solved using [Atlas](https://atlasgo.io), an open-source package (and command-line tool) written in [Go](https://go.dev) that we are maintaining at Ariga. As an infrastructure engineer, I have wished many times to have a simple way to programmatically inspect a database. Database schema inspection can be useful for many purposes. For instance, you might use it to create visualizations of data topologies, or use it to find table columns that are no longer in use and can be deprecated. Perhaps you would like to automatically generate resources from this schema (such as documentation or GraphQL schemas), or to use to locate fields that might carry personally-identifiable information for compliance purposes. Whatever your use case may be, having a robust way to get the schema of your database is the foundation for many kinds of infrastructure applications. When we started working on the core engine for Atlas, we quickly discovered that there wasn't any established tool or package that could parse the information schema of popular databases and return a data structure representing it. Why is this the case? After all, most databases provide some command-line tool to perform inspection. For example, `psql`, the standard CLI for Postgres, supports the `\d` command to describe a table: ```text postgres=# \d users; Table "public.users" Column | Type | Collation | Nullable | Default --------+------------------------+-----------+----------+--------- id | integer | | not null | name | character varying(255) | | | Indexes: "users_pkey" PRIMARY KEY, btree (id) ``` So what makes inspection a non-trivial problem to solve? In this post, I will discuss two aspects that I think are interesting. The first is the variance in how databases expose schema metadata and the second is the complexity of the data model that is required to represent a database schema. #### How databases expose schema metadata Most of the SQL that we use in day-to-day applications is pretty standard. However, when it comes to exposing schema metadata, database engines vary greatly in the way they work. The way to retrieve information about things like available schemas and tables, column types and their default values and many other aspects of the database schema looks completely different in each database engine. For instance, consider this query ([source](https://github.com/ariga/atlas/blob/2e0886e03c5862c54247f41f906f60d64f9c7eaf/sql/postgres/inspect.go#L728)) which can be used to get the metadata about table columns from a Postgres database: ```sql SELECT t1.table_name, t1.column_name, t1.data_type, t1.is_nullable, t1.column_default, t1.character_maximum_length, t1.numeric_precision, t1.datetime_precision, t1.numeric_scale, t1.character_set_name, t1.collation_name, t1.udt_name, t1.is_identity, t1.identity_start, t1.identity_increment, t1.identity_generation, col_description(to_regclass("table_schema" || '.' || "table_name")::oid, "ordinal_position") AS comment, t2.typtype, t2.oid FROM "information_schema"."columns" AS t1 LEFT JOIN pg_catalog.pg_type AS t2 ON t1.udt_name = t2.typname WHERE table_schema = $1 AND table_name IN (%s) ORDER BY t1.table_name, t1.ordinal_position ``` As you can see, while it's definitely possible to get the needed metadata, information about the schema is stored in multiple tables in a way that isn't particularly well documented, and often requires delving into the actual source code to understand fully. Here's a query to get similar information from MySQL ([source](https://github.com/ariga/atlas/blob/2e0886e03c5862c54247f41f906f60d64f9c7eaf/sql/mysql/inspect.go#L631)): ```sql SELECT `TABLE_NAME`, `COLUMN_NAME`, `COLUMN_TYPE`, `COLUMN_COMMENT`, `IS_NULLABLE`, `COLUMN_KEY`, `COLUMN_DEFAULT`, `EXTRA`, `CHARACTER_SET_NAME`, `COLLATION_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` IN (%s) ORDER BY `ORDINAL_POSITION` ``` While this query is much shorter, you can see that it's completely different from the one we ran to inspect Postgres column metadata. This demonstrates just one way in inspecting Postgres is difference from inspecting MySQL. #### Mapping database schemas into a useful data structure To be a solid foundation for building infrastructure, inspection must produce a useful data structure that can be traversed and analyzed to provide insights, in other words, a graph representing the data topology. As mentioned above, such graphs can be used to create ERD (entity-relation diagram) charts, such as the schema visualizations on the [Atlas Management UI](https://atlasgo.io/ui/intro): ![Schema ERD open](https://atlasgo.io/uploads/images/docs/schema-erd-open.png) Let's consider some aspects of database schemas that such a data structure should capture: * Databases are split into logical schemas. * Schemas contain tables, and may have attributes (such as default collation). * Tables contain columns, indexes and constraints. * Columns are complex entities that have types, that may be standard to the database engine (and version) or custom data types that are defined by the user. In addition, Columns may have attributes, such as default values, that may be a literal or an expression (it is important to be able to discern between `now()` and `"now()"`). * Indexes contain references to columns of the table they are defined on. * Foreign Keys contain references to column in other tables, that may reside in other schemas. * ...and much, much more! To capture any one of these aspects boils down to figuring out the correct query for the specific database engine you are working with. To be able to provide developers with a data structure that captures all of it, and to do it well across different versions of multiple database engines we've learned, is not an easy task. This is a perfect opportunity for an infrastructure project: a problem that is annoyingly complex to solve and that if solved well, becomes a foundation for many kinds of applications. This was one of our motivations for creating [Atlas](https://atlasgo.io) ([GitHub](https://github.com/ariga/atlas)) - an open-source project that we maintain here at [Ariga](https://ariga.io). Using Atlas, database schemas can be inspected to product Go structs representing a graph of the database schema topology. Notice the many cyclic references that make it hard to print (but very ergonomic to travere :-)): ```go &schema.Realm{ Schemas: { &schema.Schema{ Name: "test", Tables: { &schema.Table{ Name: "users", Schema: &schema.Schema{(CYCLIC REFERENCE)}, Columns: { &schema.Column{ Name: "id", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "int", Unsigned: false, }, Null: false, }, }, }, PrimaryKey: &schema.Index{ Unique: false, Table: &schema.Table{(CYCLIC REFERENCE)}, Attrs: nil, Parts: { &schema.IndexPart{ SeqNo: 0, Desc: false, C: &schema.Column{(CYCLIC REFERENCE)}, }, }, }, }, &schema.Table{ Name: "posts", Schema: &schema.Schema{(CYCLIC REFERENCE)}, Columns: { &schema.Column{ Name: "id", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "int", Unsigned: false, }, Null: false, }, }, &schema.Column{ Name: "author_id", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "int", Unsigned: false, }, Null: true, }, }, }, PrimaryKey: &schema.Index{ Unique: false, Table: &schema.Table{(CYCLIC REFERENCE)}, Parts: { &schema.IndexPart{ SeqNo: 0, Desc: false, C: &schema.Column{(CYCLIC REFERENCE)}, }, }, }, ForeignKeys: { &schema.ForeignKey{ Symbol: "owner_id", Table: &schema.Table{(CYCLIC REFERENCE)}, Columns: { &schema.Column{(CYCLIC REFERENCE)}, }, RefTable: &schema.Table{(CYCLIC REFERENCE)}, RefColumns: { &schema.Column{(CYCLIC REFERENCE)}, }, OnDelete: "SET NULL", }, }, }, }, }, }, } ``` #### Inspecting databases in Go using Atlas While Atlas is commonly used as a [CLI tool](https://atlasgo.io/cli/getting-started/setting-up), all of Atlas's core-engine capabilities are available as a [Go module](https://pkg.go.dev/ariga.io/atlas) that you can use programmatically. Let's get started with database inspection in Go: To install Atlas, use: ```shell go get ariga.io/atlas@master ``` #### Drivers Atlas currently supports three core capabilities for working with SQL schemas. * "Inspection" - Connecting to a database and understanding its schema. * "Plan" - Compares two schemas and produces a set of changes needed to reconcile the target schema to the source schema. * "Apply" - creates concrete set of SQL queries to migrate the target database. In this post we will dive into the inspection with Atlas. The way inspection is done varies greatly between the different SQL databases. Atlas currently has four supported drivers: * MySQL * MariaDB * PostgreSQL * SQLite Atlas drivers are built on top of the standard library [`database/sql`](https://pkg.go.dev/database/sql) package. To initialize the different drivers, we need to initialize a `sql.DB` and pass it to the Atlas driver constructor. For example: ```go package tutorial import ( "database/sql" "log" "testing" _ "github.com/mattn/go-sqlite3" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlite" ) func Test(t *testing.T) { // Open a "connection" to sqlite. db, err := sql.Open("sqlite3", "file:example.db?cache=shared&_fk=1&mode=memory") if err != nil { log.Fatalf("failed opening db: %s", err) } // Open an atlas driver. driver, err := sqlite.Open(db) if err != nil { log.Fatalf("failed opening atlas driver: %s", err) } // ... do stuff with the driver } ``` #### Inspection As we mentioned above, inspection is one of Atlas's core capabilities. Consider the `Inspector` interface in the [sql/schema](https://pkg.go.dev/ariga.io/atlas@master/sql/schema#Inspector) package: ```go package schema // Inspector is the interface implemented by the different database // drivers for inspecting multiple tables. type Inspector interface { // InspectSchema returns the schema description by its name. An empty name means the // "attached schema" (e.g. SCHEMA() in MySQL or CURRENT_SCHEMA() in PostgreSQL). // A NotExistError error is returned if the schema does not exists in the database. InspectSchema(ctx context.Context, name string, opts *InspectOptions) (*Schema, error) // InspectRealm returns the description of the connected database. InspectRealm(ctx context.Context, opts *InspectRealmOption) (*Realm, error) } ``` As you can see, the `Inspector` interface provides methods for inspecting on different levels: * `InspectSchema` - provides inspection capabilities for a single schema within a database server. * `InspectRealm` - inspects the entire connected database server. Each database driver (for example [MySQL](https://pkg.go.dev/ariga.io/atlas@master/sql/mysql#Driver), [Postgres](https://pkg.go.dev/ariga.io/atlas@master/sql/postgres#Driver) or [SQLite](https://pkg.go.dev/ariga.io/atlas@master/sql/sqlite#Driver)) implements this interface. Let's see how we can use this interface by inspecting a "dummy" SQLite database. Continuing on the example from above: ```go package tutorial func TestInspect(t *testing.T) { // ... skipping driver creation ctx := context.Background() // Create an "example" table for Atlas to inspect. _, err = db.ExecContext(ctx, "create table example ( id int not null );") if err != nil { log.Fatalf("failed creating example table: %s", err) } // Open an atlas driver. driver, err := sqlite.Open(db) if err != nil { log.Fatalf("failed opening atlas driver: %s", err) } // Inspect the created table. sch, err := driver.InspectSchema(ctx, "main", &schema.InspectOptions{ Tables: []string{"example"}, }) if err != nil { log.Fatalf("failed inspecting schema: %s", err) } tbl, ok := sch.Table("example") require.True(t, ok, "expected to find example table") require.EqualValues(t, "example", tbl.Name) id, ok := tbl.Column("id") require.True(t, ok, "expected to find id column") require.EqualValues(t, &schema.ColumnType{ Type: &schema.IntegerType{T: "int"}, // An integer type, specifically "int". Null: false, // The column has NOT NULL set. Raw: "INT", // The raw type inspected from the DB. }, id.Type) } ``` The full source-code for this example is available in the [atlas-examples repo](https://github.com/ariga/atlas-examples/blob/fb7fef80ca0ad635f056c40a0a1ea223ccf0a9c0/inspect_test.go#L15) . And voila! In this example, we first created a table named "example" by executing a query directly against the database. Next, we used the driver's `InspectSchema` method to inspect the schema of the table we created. Finally, we made some assertions on the returned `schema.Table` instance to verify that it was inspected correctly. #### Inspecting using the CLI If you don't want to write any code and just want to get a document representing your database schema, you can always use the Atlas CLI to do it for you. To get started, [head over to the docs](https://atlasgo.io/cli/getting-started/setting-up). #### Wrapping up In this post we presented the Go API of Atlas, which we initially built around our use case of building a new database migration tool, as part of the [Operational Data Graph Platform](https://blog.ariga.io/data-access-should-be-an-infrastructure-problem/) that we are creating here at Ariga. As we mentioned in the beginning of this post, there are a lot of cool things you can build if you have proper database inspection, which raises the question, what will **you** build with it? #### Getting involved with Atlas * Follow the [Getting Started](https://atlasgo.io/cli/getting-started/setting-up) guide. * Join our [Discord Server](https://discord.gg/zZ6sWVg6NT). * Follow us [on Twitter](https://twitter.com/ariga_io). atlas-0.7.2/doc/website/blog/2022-05-04-announcing-terraform-provider.md000066400000000000000000000172631431455511600253320ustar00rootroot00000000000000--- title: "Terraform for databases: Announcing the official Terraform provider for Atlas" authors: Hedwigz tags: [terraform, announcement] image: https://blog.ariga.io/uploads/images/posts/terraform-provider/terraform-provider.png --- Today we are glad to announce the release of the official [Atlas Terraform Provider](https://registry.terraform.io/providers/ariga/atlas). ### What is Terraform [Terraform](https://terraform.io/) is a popular open-source tool created by [HashiCorp](https://www.hashicorp.com/), used to greatly simplify the task of provisioning and managing resources in the cloud. With Terraform, organizations can describe the desired state of their infrastructure in a simple configuration language and let Terraform plan and apply these changes in an automated way. This way, Terraform allows teams to truly deliver infrastructure-as-code (IaC), which completely change how teams and organizations manage their cloud infrastructure. ### Infrastructure-as-Code and database management Most cloud-native applications are backed by a database. The database is often the most critical part of many software systems, so making changes to its schema (structure and layout of the data inside) is a very risky business. However, schemas must evolve: as functionality changes over time, the backing tables are added, columns are dropped, indexes are created for performance reasons, and more. Therefore it is surprising that there is no established way of integrating the management of schema changes (commonly called schema "migrations") into popular Infrastructure-as-Code workflows. For this reason, many organizations are running migrations from within the application code or using solutions outside the ecosystem of Terraform, meaning that management of the production environment is fragmented and hard to synchronize. Atlas aims to change that. The Atlas Terraform provider allows you to synchronize your database with your desired schema, in a safe and stateful manner. By using Atlas’s core migration engine and embedding it in a Terraform provider, we are enabling teams to manage their database schemas as part of their full IaC workflow. This way, teams can use existing providers (such as AWS or GCP) to provision the database instance and use the Atlas provider to keep the schema in sync. Integrating Atlas with Terraform is especially useful because it couples the state of the infrastructure with the state of the database. It is also extremely neat when using a [dev database](https://atlasgo.io/concepts/dev-database), which is a feature that combines infrastructure and DB management to provide safety and correctness. ### Demo #### Prerequisites Make sure you have installed: * [Docker](https://docs.docker.com/get-docker/) * [Terraform](https://www.terraform.io/downloads) Let’s see an example of the provider in action. First, spin a database using docker: ```bash docker run -p 3306:3306 --name iloveatlas -e MYSQL_ROOT_PASSWORD=pass -e MYSQL_DATABASE=market -d mysql:8 ``` Great! Now we have an instance of MySQL database running. As an extra measure of safety, we will run another identical database which will serve as a [Dev Database](https://atlasgo.io/concepts/dev-database). In short, the dev-db helps to catch errors that can only be detected when applying the schema. It is also useful to format the schema in a correct and predictable way. Read more about it [here](https://atlasgo.io/cli/dev-database). Run a second instance of MySQL on another port, to serve as a dev-db: ```shell docker run -p 3307:3306 --name devdb-greatness -e MYSQL_ROOT_PASSWORD=pass -e MYSQL_DATABASE=market -d mysql:8 ``` Next, we need an HCL file describing the desired state of our database. You can use [atlas cli](https://atlasgo.io/cli/getting-started/setting-up) to [inspect](https://atlasgo.io/cli/getting-started/inspection) the state of another database or you can use the following basic schema: ```hcl title="schema.hcl" table "orders" { schema = schema.market column "id" { null = false type = int auto_increment = true } column "name" { null = false type = varchar(20) } primary_key { columns = [column.id] } } schema "market" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } ``` Save the schema file locally in a file named `schema.hcl`. Now that we have our database schema we can use terraform to apply that schema to our database. Create a file named `main.tf` and copy the following snippet: ```hcl title="main.tf" terraform { required_providers { atlas = { version = "~> 0.1.0" source = "ariga/atlas" } } } provider "atlas" {} data "atlas_schema" "market" { dev_db_url = "mysql://root:pass@localhost:3307/market" src = file("${path.module}/schema.hcl") } resource "atlas_schema" "market" { hcl = data.atlas_schema.market.hcl url = "mysql://root:pass@localhost:3306/market" dev_db_url = "mysql://root:pass@localhost:3307/market" } ``` Finally, init terraform: ```shell terraform init ``` And apply the schema to the database by executing: ```shell terraform apply --auto-approve ``` Awesome! Now your database should have a table named `orders`. To verify that we can connect to the database: ```shell $ docker exec -it iloveatlas mysql -ppass --database=market mysql> show tables; +------------------+ | Tables_in_market | +------------------+ | orders | +------------------+ 1 row in set (0.00 sec) mysql> show create table orders; +--------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | Table | Create Table | +--------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | orders | CREATE TABLE `orders` ( `id` int NOT NULL AUTO_INCREMENT, `name` varchar(20) NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci | +--------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 1 row in set (0.00 sec) ``` For more examples and documentation visit the official [GitHub repository](https://github.com/ariga/terraform-provider-atlas) or the [provider page](https://registry.terraform.io/providers/ariga/atlas/latest) on Terraform registry. ### What's next In this post, we presented the [Atlas Terraform Provider](https://registry.terraform.io/providers/ariga/atlas/latest). The provider currently supports the basic, declarative migration workflow that is available in the Atlas engine. In upcoming versions, we will add support for an additional kind of workflow that is supported by the engine and is called [versioned migration authoring](https://entgo.io/blog/2022/03/14/announcing-versioned-migrations/). In addition, more advanced safety checks (such as simulation on database snapshots) and migration strategies are also being worked on. While the Terraform provider has just been released, the [core engine](https://github.com/ariga/atlas) that it is driving, is well tested and widely used (especially as the migration engine backing the popular [Ent](https://github.com/ent/ent) framework.) If you, like me, have always wanted to manage your database schema as part of your team's infrastructure-as-code workflow, give the Atlas Terraform provider a try! Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/2022-05-23-announcing-atlas-projects.md000066400000000000000000000111031431455511600244200ustar00rootroot00000000000000--- title: "Announcing Atlas Project Files" authors: rotemtam tags: [cli, announcement] image: https://blog.ariga.io/uploads/images/posts/v0.4.1/project-file.png --- A few days ago we released [v0.4.1](https://github.com/ariga/atlas/releases/tag/v0.4.1) of Atlas. Along with [a multitude](https://github.com/ariga/atlas/compare/v0.4.0...v0.4.1) of improvements and fixes, I'm happy to announce the release of a feature that we've been planning for a while: [Project Files](https://atlasgo.io/atlas-schema/projects). Project files provide a way to describe and interact with multiple environments while working with Atlas. A project file is a file named `atlas.hcl` that contains one or more `env` blocks, each describing an environment. Each environment has a reference to where the schema definition file resides, a database URL and an array of the schemas in the database that are managed by Atlas: ```hcl // Define an environment named "local". env "local" { // Declare where the schema definition file resides. src = "./schema/project.hcl" // Define the URL of the database which is managed in // this environment. url = "mysql://localhost:3306" // Define the URL of the Dev Database for this environment. // See: https://atlasgo.io/dev-database dev = "mysql://localhost:3307" // The schemas in the database that are managed by Atlas. schemas = ["users", "admin"] } env "dev" { // ... a different env } ``` Project files arose from the need to provide a better experience for developers using the CLI. For example, consider you are using Atlas to plan migrations for your database schema. In this case, you will be running a command similar to this to plan a migration: ``` atlas migrate diff --dev-url mysql://root:password@localhost:3306 --to file://schema.hcl --dir file://migrations --format atlas ``` With project files, you can define an environment named `local`: ```hcl env "local" { url = "mysql://root:password@localhost:3306" dev = "mysql://root:password@localhost:3307" src = "./schema.hcl" migration { dir = "file://migrations" format = atlas } } ``` Then run the `migrate diff` command against this environment using the `--env` flag: ``` atlas migrate diff --env local ``` Alternatively, suppose you want to use Atlas to apply the schema on your staging environment. Without project files, you would use: ``` atlas schema apply -u mysql://root:password@db.ariga.dev:3306 --dev-url mysql://root:password@localhost:3307 -f schema.hcl ``` To do the same using a project file, define another env named `staging`: ```hcl env "staging" { url = "mysql://root:password@db.ariga.dev:3306" dev = "mysql://root:password@localhost:3307" src = "./schema.hcl" } ``` Then run: ``` atlas schema apply --env staging ``` ### Passing credentials as input values Similar to [schema definition files](/atlas-schema/sql-resources), project files also support [Input Variables](/ddl/input-variables). This means that we can define `variable` blocks on the project file to declare which values should be provided when the file is evaluated. This mechanism can (and should) be used to avoid committing to source control database credentials. To do this, first define a variable named `db_password`: ```hcl variable "db_password" { type = string } ``` Next, replace the database password in all connection strings with a reference to this variable, for example: ```hcl env "staging" { url = "mysql://root:${var.db_password}@db.ariga.dev:3306" dev = "mysql://root:${var.db_password}@localhost:3307" src = "./schema.hcl" } ``` If we run `schema apply` without providing the password input variable, we will receive an error message: ``` Error: missing value for required variable "db_password" ``` To provide the input variable run: ``` atlas schema apply --env staging --var db_password=pass ``` Input variables can be used for many other use cases by passing them as [input values to schema files](https://atlasgo.io/atlas-schema/projects#project-input-variables). ### What's next In this post, I presented [Project Files](https://atlasgo.io/atlas-schema/projects), a new feature recently added to Atlas to help developers create more fluent workflows for managing changes to their database schemas. In the coming weeks we will be adding a few more improvements to the dev flow, such as support for marking a specific environment as the default one (alleviating the need to specify `--env` in many cases) and [multi-file schema definitions](https://github.com/ariga/atlas/issues/510). Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/2022-06-09-announcing-cockroachdb-support-preview.md000066400000000000000000000140541431455511600271350ustar00rootroot00000000000000--- title: Announcing v0.4.2 with preview support for CockroachDB authors: Hedwigz tags: [cockroachdb, integration, announcement] image: https://blog.ariga.io/uploads/images/posts/cockroachdb/atlas_n_crdb.png --- Today, I'm happy to announce the release of [v0.4.2](https://github.com/ariga/atlas/releases/tag/v0.4.2) of the Atlas CLI. This version includes many improvements and fixes, but I wanted to share with you exciting news about something I personally worked on. As of v0.4.2, Atlas includes preview support for CockroachDB 🎉 [Atlas](https://atlasgo.io) is an open-source project that helps developers to better manage their database schemas. It has a [CLI tool](https://atlasgo.io/cli-reference) and a [Terraform integration](https://atlasgo.io/blog/2022/05/04/announcing-terraform-provider). By using Atlas's Data Definition Language (with a syntax similar to Terraform), users can plan, verify and apply changes to their databases in a simple, declarative workflow. Earlier this year, Atlas became the [migration engine for Ent](https://entgo.io/blog/2022/01/20/announcing-new-migration-engine), a widely popular, Linux Foundation backed entity framework for Go. [CockroachDB](https://www.cockroachlabs.com/) is an [open-source](https://github.com/cockroachdb/cockroach) NewSQL database. From their README: > CockroachDB is a distributed SQL database built on a transactional and strongly-consistent > key-value store. It scales horizontally; survives disk, machine, rack, and even datacenter > failures with minimal latency disruption and no manual intervention; supports strongly-consistent > ACID transactions; and provides a familiar SQL API for structuring, manipulating, and querying data. CockroachDB has been gaining popularity and many of you [have](https://github.com/ent/ent/issues/2545) [been](https://github.com/ariga/atlas/issues/785#issue-1231951038) [asking](https://github.com/ariga/atlas/issues/785#issuecomment-1125853135) for Atlas to support it. While CockroachDB aims to be PostgreSQL compatible, it still has some incompatibilities (e.g. [1](https://github.com/cockroachdb/cockroach/issues/20296#issuecomment-1066140651), [2](https://github.com/cockroachdb/cockroach/issues/82064),[3](https://github.com/cockroachdb/cockroach/issues/81659)) which prevented Atlas users using the existing Postgres dialect from working with it. With the latest release of Atlas, the Postgres driver automatically detects if it is connected to a CockroachDB database and uses a custom driver which provides compatability with CockroachDB. ### Getting started with Atlas and CockroachDB Let's see how we can use Atlas CLI to manage the schema of a CockroachDB database. Start by downloading the latest version of Atlas, on macOS: ``` brew install ariga/tap/atlas ``` For installation instructions on other platforms, see [the docs](https://atlasgo.io/cli/getting-started/setting-up#install-the-cli). For the purpose of this example, let's spin up a local, [single-node CockroachDB cluster](https://www.cockroachlabs.com/docs/stable/cockroach-start-single-node.html) in a container by running: ``` docker run --rm -d -p 26257:26257 --name crdb cockroachdb/cockroach start-single-node --insecure ``` Next, let's seed the database with a simple table: ``` docker exec crdb cockroach sql --insecure -e 'CREATE TABLE users (id int primary key);' ``` After creating the `users` table, use Atlas's `schema inspect` command to read the schema of our local database and save the result to a file: ``` atlas schema inspect -u 'postgres://root:pass@localhost:26257/?sslmode=disable' --schema public > schema.hcl ``` Observe the current HCL representation of the `public` schema, which contains our newly created table, `users`: ```hcl table "users" { schema = schema.public column "id" { null = false type = bigint } primary_key { columns = [column.id] } } schema "public" { } ``` Next, edit `schema.hcl` to add a column to the `users` table: ```hcl title="schema.hcl" {7-9} table "users" { schema = schema.public column "id" { null = false type = bigint } column "name" { type = varchar(100) } primary_key { columns = [column.id] } } schema "public" { } ``` Now apply the schema using the `schema apply` command: ``` atlas schema apply -u 'postgres://root:pass@localhost:26257/?sslmode=disable' --schema public -f schema.hcl ``` Atlas prints out the planned changes and asks for your confirmation: ``` -- Planned Changes: -- Create "test" table ALTER TABLE "public"."users" ADD COLUMN "name" character varying(100) NOT NULL ? Are you sure?: ▸ Apply Abort ``` After hitting "Apply", Atlas applies the desired schema to the database: ``` ✔ Apply ``` We have successfully applied our schema to our database. To stop the container running CockroachDB run: ``` docker stop crdb ``` ### Learn more about Atlas In this short example, we demonstrated two of Atlas's basic features: database inspection and declarative schema migration (applying a desired schema on a database). Here are some topics you may want to explore when getting started with Atlas: * [Learn the DDL](/atlas-schema/sql-resources) - learn how to define any SQL resource in Atlas's data definition language. * [Try the Terraform Provider](https://atlasgo.io/blog/2022/05/04/announcing-terraform-provider) - see how you can use the Atlas Terraform Provider to integrate schema management in your general Infrastructure-as-Code workflows. * [Use the `migrate` command to author migrations](/cli-reference#atlas-migrate) - In addition to the Terraform-like declarative workflow, Atlas can manage a migration script directory for you based on your desired schema. ### Preview support The integration of Atlas with CockroachDB is well tested with version `v21.2.11` (at the time of writing, `latest`) and will be extended in the future. If you're using other versions of CockroachDB or looking for help, don't hesitate to [file an issue](https://github.com/ariga/atlas/issues) or join our [Discord channel](https://discord.gg/zZ6sWVg6NT). Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/2022-07-14-announcing-atlas-lint.md000066400000000000000000000125351431455511600235510ustar00rootroot00000000000000--- title: Announcing v0.5.0 with Migration Directory Linting authors: rotemtam tags: [atlas, lint, ci] image: https://blog.ariga.io/uploads/images/posts/v0.5.0/atlas-lint.png --- With the release of [v0.5.0](https://github.com/ariga/atlas/releases/tag/v0.5.0), we are happy to announce a very significant milestone for the project. While this version includes some cool features (such as multi-file schemas) and a [swath](https://github.com/ariga/atlas/compare/v0.4.2...v0.5.0) of incremental improvements and bugfixes, there is one feature that we're particularly excited about and want to share with you in this post. As most outages happen directly as a result of a change to a system, Atlas provides users with the means to verify the safety of planned changes before they happen. The [`sqlcheck`](https://pkg.go.dev/ariga.io/atlas@master/sql/sqlcheck) package provides interfaces for analyzing the contents of SQL files to generate insights on the safety of many kinds of changes to database schemas. With this package, developers may define an `Analyzer` that can be used to diagnose the impact of SQL statements on the target database. This functionality is exposed to CLI users via the `migrate lint` subcommand. By utilizing the `sqlcheck` package, Atlas can now check your migration directory for common problems and issues. ### `atlas migrate lint` in action Recall that Atlas uses a [dev database](https://atlasgo.io/concepts/dev-database) to plan and simulate schema changes. Let's start by spinning up a container that will serve as our dev database: ```text docker run --name atlas-db-dev -d -p 3307:3306 -e MYSQL_ROOT_PASSWORD=pass mysql ``` Next let's create `schema.hcl`, the HCL file which will contain the desired state of our database: ```hcl title=schema.hcl schema "example" { } table "users" { schema = schema.example column "id" { type = int } column "name" { type = varchar(255) } primary_key { columns = [ column.id ] } } ``` To simplify the commands we need to type in this demo, let's create an Atlas [project file](https://atlasgo.io/atlas-schema/projects) to define a local environment. ```hcl title=atlas.hcl env "local" { src = "./schema.hcl" url = "mysql://root:pass@localhost:3306" dev = "mysql://root:pass@localhost:3307" } ``` Next, let's plan the initial migration that creates the `users` table: ```text atlas migrate diff --env local ``` Observe that the `migrations/` directory was created with an `.sql` file and a file named `atlas.sum`: ```text ├── atlas.hcl ├── migrations │ ├── 20220714090139.sql │ └── atlas.sum └── schema.hcl ``` This is the contents of our new migration script: ```sql -- add new schema named "example" CREATE DATABASE `example`; -- create "users" table CREATE TABLE `example`.`users` (`id` int NOT NULL, `name` varchar(255) NOT NULL, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` Next, let's make a destructive change to the schema. Destructive changes are changes to a database schema that result in loss of data, such as dropping a column or table. Let's remove the `name` name column from our desired schema: ```hcl title=schema.hcl {8} schema "example" { } table "users" { schema = schema.example column "id" { type = int } // Notice the "name" column is missing. primary_key { columns = [ column.id ] } } ``` Now, let's plan a migration to this new schema: ```text atlas migrate diff --env local ``` Observe the new migration which Atlas planned for us: ```sql -- modify "users" table ALTER TABLE `example`.`users` DROP COLUMN `name`; ``` Finally, let's use `atlas migrate lint` to analyze this change and verify it's safety: ```text atlas migrate lint --env local --latest 1 Destructive changes detected in file 20220714090811.sql: L2: Dropping non-virtual column "name" ``` When we run the `lint` command, we need to instruct Atlas on how to decide what set of migration files to analyze. Currently, two modes are supported. * `--git-base `: which selects the diff between the provided branch and the current one as the changeset. * `--latest ` which selects the latest `n` migration files as the changeset. As expected, Atlas analyzed this change and detected a _destructive change_ to our database schema. In addition, Atlas users can analyze the migration directory to automatically detect: * Data-dependent changes * Migration Directory integrity * Backward-incompatible changes (coming soon) * Drift between the desired and the migration directory (coming soon) * .. and more ### Wrapping up We started Atlas more than a year ago because we felt that the industry deserves a better way to manage databases. A huge amount of progress has been made as part of the DevOps movement on the fronts of managing compute, networking and configuration. So much, in fact, that it always baffled us to see that the database, the single most critical component of any software system, did not receive this level of treatment. Until today, the task of verifying the safety of migration scripts was reserved to humans (preferably SQL savvy, and highly experienced). We believe that with this milestone we are beginning to pave a road to a reality where teams can move as quickly and safely with their databases as they can with their code. Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/2022-08-11-announcing-versioned-migration-authoring.mdx000066400000000000000000000265331431455511600276550ustar00rootroot00000000000000--- title: Announcing v0.6.0 with Versioned Migration Authoring authors: a8m tags: [atlas, migrations, versioned] image: https://blog.ariga.io/uploads/images/posts/v0.6.0/atlas-migrate-diff.png --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; With the release of [v0.6.0](https://github.com/ariga/atlas/releases/tag/v0.6.0), we are happy to announce official support for a style of workflow for managing changes to database schemas that we have been experimenting with in the past months: _Versioned Migration Authoring_. ### TL;DR * Atlas supports a declarative workflow (similar to Terraform) where users provide the desired database schema in a simple [data definition language](https://atlasgo.io/atlas-schema/sql-resources) and Atlas calculates a plan to get a target database to that state. This workflow is supported by the [`schema apply`](https://atlasgo.io/declarative/apply) command. * Many teams prefer a more imperative approach where each change to the database schema is checked-in to source control and reviewed during code-review. This type of workflow is commonly called _versioned migrations_ (or _change based migrations_) and is supported by many established tools such as [Flyway](https://flyway.org) and [Liquibase](https://liquibase.org). * The downside of the versioned migration approach is, of course, that it puts the burden of planning the migration on developers. As part of the Atlas project we advocate for a third combined approach that we call "Versioned Migration Authoring". * Versioned Migration Authoring is an attempt to combine the simplicity and expressiveness of the declarative approach with the control and explicitness of versioned migrations. * To use Versioned Migration Authoring today, use the `atlas migrate diff` command. See the [Getting Started](#getting-started) section below for instructions. ### Declarative Migrations The declarative approach has become increasingly popular with engineers nowadays because it embodies a convenient separation of concerns between application and infrastructure engineers. Application engineers describe _what_ (the desired state) they need to happen, and infrastructure engineers build tools that plan and execute ways to get to that state (_how_). This division of labor allows for great efficiencies as it abstracts away the complicated inner workings of infrastructure behind a simple, easy to understand API for the application developers and allows for specialization and development of expertise to pay off for the infra people. With declarative migrations, the desired state of the database schema is given as input to the migration engine, which plans and executes a set of actions to change the database to its desired state. For example, suppose your application uses a small SQLite database to store its data. In this database, you have a `users` table with this structure: ```hcl schema "main" {} table "users" { schema = schema.main column "id" { type = int } column "greeting" { type = text } } ``` Now, suppose that you want to add a default value of `"shalom"` to the `greeting` column. Many developers are not aware that it isn't possible to modify a column's default value in an existing table in SQLite. Instead, the common practice is to create a new table, copy the existing rows into the new table and drop the old one after. Using the declarative approach, developers can change the default value for the `greeting` column: ```hcl {10} schema "main" {} table "users" { schema = schema.main column "id" { type = int } column "greeting" { type = text default = "shalom" } } ``` And have Atlas's engine devise a plan similar to this: ```sql -- Planned Changes: -- Create "new_users" table CREATE TABLE `new_users` (`id` int NOT NULL, `greeting` text NOT NULL DEFAULT 'shalom') -- Copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`id`, `greeting`) SELECT `id`, IFNULL(`greeting`, 'shalom') AS `greeting` FROM `users` -- Drop "users" table after copying rows DROP TABLE `users` -- Rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users` ``` ### Versioned Migrations As the database is one of the most critical components in any system, applying changes to its schema is rightfully considered a dangerous operation. For this reason, many teams prefer a more imperative approach where each change to the database schema is checked in to source control and reviewed during code-review. Each such change is called a "migration", as it migrates the database schema from the previous version to the next. To support this kind of requirement, many popular database schema management tools such as [Flyway](https://flywaydb.org/), [Liquibase](https://liquibase.org/) or [golang-migrate](https://github.com/golang-migrate/migrate) support a workflow that is commonly called "versioned migrations". In addition to the higher level of control which is provided by versioned migrations, applications are often deployed to multiple remote environments at once. These environments are not controlled (or even accessible) by the development team. In such cases, declarative migrations, which rely on a network connection to the target database and on human approval of migrations plans in real-time, are not a feasible strategy. With versioned migrations (sometimes called "change-based migrations"), instead of describing the desired state ("what the database should look like"), developers describe the changes themselves ("how to reach the state"). Most of the time, this is done by creating a set of SQL files containing the statements needed. Each of the files is assigned a unique version and a description of the changes. Tools like the ones mentioned earlier are then able to interpret the migration files and to apply (some of) them in the correct order to transition to the desired database structure. The benefit of the versioned migrations approach is that it is explicit: engineers know _exactly_ what queries are going to be run against the database when the time comes to execute them. Because changes are planned ahead of time, migration authors can control precisely how to reach the desired schema. If we consider a migration as a plan to get from state A to state B, oftentimes multiple paths exist, each with a very different impact on the database. To demonstrate, consider an initial state which contains a table with two columns: ```sql CREATE TABLE users ( id int, name varchar(255) ); ``` Suppose our desired state is: ```sql CREATE TABLE users ( id int, user_name varchar(255) ); ``` There are at least two ways get from the initial to the desired state: * Drop the `name` column and create a new `user_name` column. * Alter the name of the `name` column to `user_name`. Depending on the context, either may be the desired outcome for the developer planning the change. With versioned migrations, engineers have the ultimate confidence of what change is going to happen, which may not be known ahead of time in a _declarative_ approach. ### Migration Authoring The downside of the _versioned migration_ approach is, of course, that it puts the burden of planning the migration on developers. This requires a certain level of expertise that is not always available to every engineer, as we demonstrated in our example of setting a default value in a SQLite database above. As part of the Atlas project we advocate for a third combined approach that we call "Versioned Migration Authoring". Versioned Migration Authoring is an attempt to combine the simplicity and expressiveness of the declarative approach with the control and explicitness of versioned migrations. With versioned migration authoring, users still declare their desired state and use the Atlas engine to plan a safe migration from the existing to the new state. However, instead of coupling planning and execution, plans are instead written into normal migration files which can be checked into source control, fine-tuned manually and reviewed in regular code review processes. ### Getting started Start by downloading the Atlas CLI: Get the latest release with [Homebrew](https://brew.sh/): ```shell brew install ariga/tap/atlas ``` Download latest release. ```shell curl -LO https://release.ariga.io/atlas/atlas-darwin-amd64-latest ``` Make the atlas binary executable. ```shell chmod +x ./atlas-darwin-amd64-latest ``` Move the atlas binary to a file location on your system PATH. ```shell sudo mv ./atlas-darwin-amd64-latest /usr/local/bin/atlas ``` ```shell sudo chown root: /usr/local/bin/atlas ``` Download latest release. ```shell curl -LO https://release.ariga.io/atlas/atlas-linux-amd64-latest ``` Move the atlas binary to a file location on your system PATH. ```shell sudo install -o root -g root -m 0755 ./atlas-linux-amd64-latest /usr/local/bin/atlas ``` Download the [latest release](https://release.ariga.io/atlas/atlas-windows-amd64-latest.exe) and move the atlas binary to a file location on your system PATH. Next, define a simple Atlas schema with one table and an empty migration directory: ```hcl title="schema.hcl" schema "test" {} table "users" { schema = schema.test column "id" { type = int } } ``` Let's run `atlas migrate diff` with the necessary parameters to generate a migration script for creating our `users` table: * `--dir` the URL to the migration directory, by default it is `file://migrations`. * `--to` the URL of the desired state, an HCL file or a database connection. * `--dev-url` a [URL](/concepts/url) to a [Dev Database](/concepts/dev-database) that will be used to compute the diff. ```bash atlas migrate diff create_users \ --dir="file://migrations" \ --to="file://schema.hcl" \ --dev-url="mysql://root:pass@:3306/test" ``` Observe that two files were created in the `migrations` directory: By default, migration files are named with the following format `{{ now }}_{{ name }}.sql`. If you wish to use a different file format, use the `--dir-format` option. ```sql -- create "users" table CREATE TABLE `users` (`id` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` In addition to the migration directory, Atlas maintains a file name `atlas.sum` which is used to ensure the integrity of the migration directory and force developers to deal with situations where migration order or contents were modified after the fact. ```text h1:t1fEP1rSsGf1gYrYCjsGyEyuM0cnhATlq93B7h8uXxY= 20220811074144_create_users.sql h1:liZcCBbAn/HyBTqBAEVar9fJNKPTb2Eq+rEKZeCFC9M= ``` ### Further reading To learn more about Versioned Migration Authoring: * Read the [docs](/versioned/diff) * [CLI Command Reference](/cli-reference#atlas-migrate-diff) Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/2022-08-22-atlas-ci-github-actions.md000066400000000000000000000166131431455511600237600ustar00rootroot00000000000000--- title: Prevent destructive changes to your database with the Atlas GitHub Action authors: rotemtam tags: [ci, github-actions, announcement] image: https://atlasgo.io/uploads/images/actions-share-image.png --- Losing data is painful for almost all organizations. This is one of the reasons teams are very cautious when it comes to making changes to their databases. In fact, many teams set explicit policies on what kinds of changes to the database are allowed, often completely prohibiting any change that is destructive. Destructive changes are changes to a database schema that result in loss of data. For instance, consider a statement such as: ```sql ALTER TABLE `users` DROP COLUMN `email_address`; ``` This statement is considered destructive because whatever data is stored in the `email_address` column will be deleted from disk, with no way to recover it. Suppose you were in charge of a team that decided to prohibit destructive changes, how would you go about enforcing such a policy? From our experience, most teams enforce policies relating to schema migrations in code-review: a human engineer, preferably with some expertise in operating databases, manually reviews any proposed database migration scripts and rejects them if they contain destructive changes. Relying on a human reviewer to enforce such a policy is both expensive (it takes time and mental energy) and error-prone. Just like manual QA is slowly being replaced with automated testing, and manual code style reviews are being replaced with linters, isn't it time that we automate the process of ensuring that changes to database schemas are safe? ### Announcing the Atlas GitHub Action Today, we're happy to announce the release of the official [Atlas GitHub Action](https://github.com/ariga/atlas-action) which can be used to apply [migration directory linting](2022-07-14-announcing-atlas-lint.md) for a bunch of popular database migration tools. [golang-migrate](https://github.com/golang-migrate/migrate), [goose](https://github.com/pressly/goose), [dbmate](https://github.com/amacneil/dbmate) and Atlas itself are already supported, and Flyway and Liquibase are coming soon. If you're using GitHub to manage your source code, you're in luck. By adding a short configuration file to your repository, you can start linting your schema migration scripts today! Let's see a short example. ### Setting up Suppose we are running a website for an e-commerce business. To store the data for our website we use a MySQL database. Because the data in this database is _everything_ to us, we use a careful _versioned migrations_ approach where each change to the database schema is described in an SQL script and stored in our Git repository. To execute these scripts we use a popular tool called [golang-migrate](https://github.com/golang-migrate/migrate). The source code for this example can be found in [rotemtam/atlas-action-demo](https://github.com/rotemtam/atlas-action-demo). Initially, our schema contains two tables: `users` and `orders`, documented in the first few migration files: Create the `users` table: ```sql title=migrations/20220819060736.up.sql -- create "users" table CREATE TABLE `users` ( `id` int NOT NULL, `name` varchar(100) NULL, PRIMARY KEY (`id`) ) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` Add a unique `email` column: ```sql title=migrations/20220819061056.up.sql ALTER TABLE `users` ADD COLUMN `email` varchar(255) NOT NULL, ADD UNIQUE INDEX `email_unique` (`email`); ``` Create the `orders` table, with a foreign-key referencing the `users` table: ```sql title=migrations/20220819075145.up.sql -- create "orders" table CREATE TABLE `orders` ( `id` int NOT NULL, `user_id` int NOT NULL, `total` decimal(10) NOT NULL, PRIMARY KEY (`id`), INDEX `user_orders` (`user_id`), CONSTRAINT `user_orders` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON UPDATE NO ACTION ON DELETE NO ACTION ) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` ### Installing the Atlas Action To make sure we never accidentally delete data during schema changes, we enact a policy that prohibits destructive changes to the database. To enforce this policy, we invoke the `atlas-action` GitHub Action from within our continuous integration flow by adding a workflow file name `.github/workflows/atlas-ci.yaml`: ```yaml title=.github/workflows/atlas-ci.yaml name: Atlas CI on: # Run whenever code is changed in the master branch, # change this to your root branch. push: branches: - master # Run on PRs where something changed under the `path/to/migration/dir/` directory. pull_request: paths: - 'migrations/*' jobs: lint: services: # Spin up a mysql:8.0.29 container to be used as the dev-database for analysis. mysql: image: mysql:8.0.29 env: MYSQL_ROOT_PASSWORD: pass MYSQL_DATABASE: test ports: - "3306:3306" options: >- --health-cmd "mysqladmin ping -ppass" --health-interval 10s --health-start-period 10s --health-timeout 5s --health-retries 10 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3.0.1 with: fetch-depth: 0 # Mandatory unless "latest" is set below. - uses: ariga/atlas-action@v0 with: dir: migrations/ dir-format: golang-migrate # Or: atlas, goose, dbmate dev-url: mysql://root:pass@localhost:3306/test ``` ### Detecting a destructive change Next, let's see what happens when a developer accidentally proposes a destructive change, to drop a column in the `orders` table: ```sql title=migrations/20220819081116.up.sql -- modify "orders" table ALTER TABLE `orders` DROP COLUMN `total`; ``` This change is proposed in [PR #1](https://github.com/rotemtam/atlas-action-demo/pull/1/files) in our example repo. Because we have previously set up the Atlas GitHub Action to [lint our migration directory](https://github.com/rotemtam/atlas-action-demo/blob/master/.github/workflows/atlas-ci.yaml), whenever a file changes under the `migrations/` directory, a workflow is triggered. After letting our workflow complete, observe that GitHub informs us that the `Atlas CI / lint` check has failed: ![](https://atlasgo.io/uploads/images/workflow-summary-01.png) Clicking on the ["details" link](https://github.com/rotemtam/atlas-action-demo/runs/7960178186?check_suite_focus=true) we find a detailed explanation on the causes for the failure: ![](https://atlasgo.io/uploads/images/workflow-summary-02.png) Examining the Action [run summary](https://github.com/rotemtam/atlas-action-demo/actions/runs/2906742450) we find the following annotation: ![](https://atlasgo.io/uploads/images/workflow-summary-03.png) As you can see, Atlas has detected the destructive change we proposed to apply to our database and failed our build! ### Wrapping up In this post we discussed why many teams set policies to prevent destructive changes to database schemas. We further showed how such policies can be enforced in an automated way using the official [Atlas GitHub Action](https://atlasgo.io/integrations/github-actions). ### Further reading To learn more about CI for database schema changes: * Read the [docs for `atlas migrate lint`](/versioned/lint) * Learn about [migration analyzers](/lint/analyzers) * [CLI Command Reference](/cli-reference#atlas-migrate-lint) Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/2022-09-05-announcing-migration-execution.mdx000066400000000000000000000251651431455511600256700ustar00rootroot00000000000000--- title: The Atlas Migration Execution Engine authors: masseelch tags: [atlas, migrations, versioned, announcement] image: https://blog.ariga.io/uploads/images/posts/v0.7.0/atlas-v070.png --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; With the release of [v0.6.0](https://github.com/ariga/atlas/releases/tag/v0.6.0), we [introduced](https://atlasgo.io/blog/2022/08/11/announcing-versioned-migration-authoring) a workflow for managing changes to database schemas that we have called: _Versioned Migration Authoring_. Today, we released the first version of the Atlas migration execution engine, that can apply migration files on your database. In this post, we will give a brief overview of the features and what to expect in the future. ### Migration File Format The Atlas migration filename format follows a very simple structure: `version_[name].sql`, with the `name` being optional. `version` can be an arbitrary string. Migration files are lexicographically sorted by filename. ```shell ↪ tree . . ├── 1_initial.sql ├── 2_second.sql ├── 3_third.sql └── atlas.sum 0 directories, 4 files ``` ```sql -- add new schema named "my_schema" CREATE DATABASE `my_schema`; -- create "tbl" table CREATE TABLE `my_schema`.`tbl` (`col` int NOT NULL); ``` ```sql ALTER TABLE `my_schema`.`tbl` ADD `col_2` TEXT; ``` ```sql CREATE TABLE `tbl_2` (`col` int NOT NULL); ``` ```text h1:cD9kOv5VDRLrKVZ0pM4CxAlhH6mgE8PQLpUeuIMKDcs= 1_initial.sql h1:SrFyOe0eg5WnE96GH3TtAt6046sfrOK4YKZYBlYr1SA= 2_second.sql h1:FPzwV+MzwyCss7SASZtyafXiYc9Un5bzlcc3u7MxLJU= 3_third.sql h1:uD+xDcA3Q+gHqwca2ZBDAHWYvC2eiUcwr1IgMsN0Q6c= ``` If you want to follow along, you can simply copy and paste the above files in a folder on your system. Make sure you have a database ready to work on. You can start an ephemeral docker container with the following command: ```shell # Run a local mysql container listening on port 3306. docker run --rm --name atlas-apply --detach --env MYSQL_ROOT_PASSWORD=pass -p 3306:3306 mysql:8 ``` ### Apply Migrations In order to apply migrations you need to have the Atlas CLI in version v0.7.0 or above. Follow the [installation instructions](https://atlasgo.io/getting-started#installation) if you don't have Atlas installed yet. Now, to apply the first migration of our migration directory, we call `atlas migrate apply` and pass in some configuration parameters. ```shell atlas migrate apply 1 \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/" ``` ```shell Migrating to version 1 (1 migrations in total): -- migrating version 1 -> CREATE DATABASE `my_schema`; -> CREATE TABLE `my_schema`.`tbl` (`col` int NOT NULL); -- ok (17.247319ms) ------------------------- -- 18.784204ms -- 1 migrations -- 2 sql statements ``` ### Migration Status Atlas saves information about the database schema revisions (applied migration versions) in a special table called `atlas_schema_revisions`. In the example above we connected to the database without specifying which schema to operate against. For this reason, Atlas created the revision table in a new schema called `atlas_schema_revisions`. For a schema-bound connection Atlas will put the table into the connected schema. We will see that in a bit. Go ahead and call `atlas migrate status` to gather information about the database migration state: ```shell atlas migrate status \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/" ``` ```shell Migration Status: PENDING -- Current Version: 1 -- Next Version: 2 -- Executed Files: 1 -- Pending Files: 2 ``` This output tells us that the last applied version is `1`, the next one is called `2` and that we still have two migrations pending. Let's apply the pending migrations: Note, that we do not pass an argument to the `apply`, in which case Atlas will attempt to apply all pending migrations. ```shell atlas migrate apply \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/" ``` ```shell Migrating to version 3 from 1 (2 migrations in total): -- migrating version 2 -> ALTER TABLE `my_schema`.`tbl` ADD `col_2` TEXT; -- ok (13.98847ms) -- migrating version 3 -> CREATE TABLE `tbl_2` (`col` int NOT NULL); Error 1046: No database selected ------------------------- -- 15.604338ms -- 1 migrations ok (1 with errors) -- 1 sql statements ok (1 with errors) Error: Execution had errors: Error 1046: No database selected Error: sql/migrate: execute: executing statement "CREATE TABLE `tbl_2` (`col` int NOT NULL);" from version "3": Error 1046: No database selected exit status 1 ``` What happened here? After further investigation, you will find that our connection URL is bound to the entire database, not to a schema. The third migration file however does not contain a schema qualifier for the `CREATE TABLE` statement. By default, Atlas wraps the execution of each migration file into one transaction. This transaction gets rolled back if any error occurs withing execution. Be aware though, that some databases, such as MySQL and MariaDB, don't support transactional DDL. If you want to learn how to configure the way Atlas uses transactions, have a look at the [docs](/versioned/apply#transaction-configuration). ### Migration Retry To resolve this edit the migration file and add a qualifier to the statement: ```sql CREATE TABLE `my_schema`.`tbl_2` (`col` int NOT NULL); ``` Since you changed the contents of a migration file, we have to re-calculate the directory integrity hash-sum by calling: ```shell atlas migrate hash --force \ --dir "file://migrations" ``` Then we can proceed and simply attempt to execute the migration file again. ```shell atlas migrate apply \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/" ``` ```shell Migrating to version 3 from 2 (1 migrations in total): -- migrating version 3 -> CREATE TABLE `my_schema`.`tbl_2` (`col` int NOT NULL); -- ok (15.168892ms) ------------------------- -- 16.741173ms -- 1 migrations -- 1 sql statements ``` Attempting to migrate again or calling `atlas migrate status` will tell us that all migrations have been applied onto the database and there is nothing to do at the moment. ```shell atlas migrate apply \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/" ``` ```shell No migration files to execute ``` ```shell atlas migrate status \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/" ``` ```shell Migration Status: OK -- Current Version: 3 -- Next Version: Already at latest version -- Executed Files: 3 -- Pending Files: 0 ``` ### Moving an existing project to Atlas with Baseline Migrations Another common scenario is when you need to move an existing project to Atlas. To do so, create an initial migration file reflecting the current state of a database schema by using `atlas migrate diff`. A very simple way to do so would be by heading over to the database from before, deleting the `atlas_schema_revisions` schema, emptying your migration directory and running the `atlas migrate diff` command. ```shell rm -rf migrations docker exec atlas-apply mysql -ppass -e "CREATE SCHEMA `my_schema_dev`;" # create a dev-db docker exec atlas-apply mysql -ppass -e "DROP SCHEMA `atlas_schema_revisions`;" atlas migrate diff \ --dir "file://migrations" \ --to "mysql://root:pass@localhost:3306/my_schema" \ --dev-url "mysql://root:pass@localhost:3306/my_schema_dev" ``` To demonstrate that Atlas can also work on a schema level instead of a realm connection, we are running on a connection bound to the `my_schema` schema this time. You should end up with the following migration directory: ```sql -- create "tbl" table CREATE TABLE `tbl` (`col` int NOT NULL, `col_2` text NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; -- create "tbl_2" table CREATE TABLE `tbl_2` (`col` int NOT NULL) CHARSET utf8mb4 COLLATE utf8mb4_0900_ai_ci; ``` ```text h1:5zAQ8J0qziTKWg09fRNdbUf3rnLdvA1KHayh8l1SgM0= 20220908105652.sql h1:4WEB39tqALVYNQJTfULKizxEuUC37sgFs0LN5dKJpOw= ``` Now, let's create a new migration file to create a table `tbl_3` and update the directory integrity file. ```shell atlas migrate new add_table --dir "file://migrations" echo "CREATE TABLE `tbl_3` (`col` text NULL);" >> migrations/$(ls -t migrations | head -n1) atlas migrate hash --force --dir "file://migrations" ``` Since we now have both a migration file representing our current database state and the new migration file to apply, we can make use of the `--baseline` flag: ```shell atlas migrate apply \ --dir "file://migrations" \ --url "mysql://root:pass@localhost:3306/my_schema" \ --baseline "20220908110527" # replace the version with the one generated by you ``` ```shell Migrating to version 20220908110847 from 20220908110527 (1 migrations in total): -- migrating version 20220908110847 -> CREATE TABLE `tbl_3` (`col` text NULL); -- ok (14.325493ms) ------------------------- -- 15.786455ms -- 1 migrations -- 1 sql statements ``` ### Outlook The Atlas migration engine is powering [Ent](https://entgo.io) and the execution engine is already being used within Ariga for several months. We will continue working on improving it, releasing cool features, such as assisted troubleshooting for failed migrations, a more intelligent, dialect-aware execution planning for things like MySQLs implicits commits and more. ### Wrapping up In this post we learned about the new migration execution engine of Atlas and some information about its internals. ### Further reading To learn more about Versioned Migration Authoring: * Read the [docs](/versioned/diff) * [CLI Command Reference](/cli-reference#atlas-migrate-diff) Have questions? Feedback? Find our team [on our Discord server](https://discord.gg/zZ6sWVg6NT). atlas-0.7.2/doc/website/blog/authors.yml000066400000000000000000000013171431455511600201750ustar00rootroot00000000000000Hedwigz: name: Amit Shani title: Software Engineer at Ariga url: "https://github.com/hedwigz" image_url: "https://avatars.githubusercontent.com/u/8277210?v=4" twitter: itsamitush rotemtam: name: Rotem Tamir title: Building Atlas url: "https://github.com/rotemtam" image_url: "https://s.gravatar.com/avatar/36b3739951a27d2e37251867b7d44b1a?s=80" twitter: _rtam a8m: name: Ariel Mashraki title: Building Atlas url: "https://github.com/a8m" image_url: "https://avatars.githubusercontent.com/u/7413593?v=4" twitter: arielmashraki masseelch: name: Jannik Clausen title: Building Atlas url: "https://github.com/masseelch" image_url: "https://avatars.githubusercontent.com/u/12862103?v=4"atlas-0.7.2/doc/website/docusaurus.config.js000066400000000000000000000151151431455511600210420ustar00rootroot00000000000000/** @type {import('@docusaurus/types').DocusaurusConfig} */ module.exports = { title: 'Atlas | Open-source database schema management tool', tagline: 'Manage your database schemas with Atlas CLI', url: 'https://atlasgo.io', baseUrl: '/', onBrokenLinks: 'throw', onBrokenMarkdownLinks: 'warn', organizationName: 'ariga', projectName: 'atlas', themeConfig: { prism: { additionalLanguages: ['hcl'], magicComments: [ { className: 'theme-code-block-highlighted-line', line: 'highlight-next-line', block: {start: 'highlight-start', end: 'highlight-end'}, }, { className: 'code-block-error-message', line: 'highlight-next-line-error-message', }, { className: 'code-block-info-line', line: 'highlight-next-line-info', }, ], }, algolia: { appId: 'D158RRDJO1', apiKey: "3585a7e658ef4ab7775a9099be3778d2", indexName: "atlasgo", }, navbar: { title: 'Atlas', logo: { alt: 'Atlas', src: 'https://atlasgo.io/uploads/landing/logo.svg', }, items: [ { href: 'https://github.com/ariga/atlas', className: 'header-github-link', position: 'right', }, { href: 'https://discord.gg/zZ6sWVg6NT', className: 'header-discord-link', position: 'right', },{ href: 'https://twitter.com/ariga_io', className: 'header-twitter-link', position: 'right', }, { to: 'getting-started', label: 'Docs', position: 'left', }, { to: 'guides', label: 'Guides', position: 'left', }, { to: 'blog', label: 'Blog', position: 'left' }, ], }, // "footer": { "links": [ { "title": "Docs", "items": [ {"label": "Getting Started", "to": "getting-started"}, {"label": "Data Definition Language ", "to": "guides/ddl"}, {"label": "CLI Reference", "to": "cli-reference"}, {"label": "Blog", "to": "blog"}, {"label": "Guides", "to": "guides"}, {"label": "About", "to": "about"}, {"label": "GoDoc", "to": "https://pkg.go.dev/ariga.io/atlas"}, ] }, { "title": "Community", "items": [ {"label": "GitHub", "to": "https://github.com/ariga/atlas"}, {"label": "Discord", "to": "https://discord.gg/zZ6sWVg6NT"}, {"label": "Twitter", "to": "https://twitter.com/ariga_io"} ] }, { "title": "Integrations", "items": [ {"label": "GitHub Actions", "to": "/integrations/github-actions"}, {"label": "Terraform", "to": "/integrations/terraform-provider"}, {"label": "Go API", "to": "/integrations/go-api"} ] }, { "title": "Legal", "items": [ {"label": "Privacy Policy", "to": "https://ariga.io/legal/privacy"}, {"label": "Terms of Service", "to": "https://ariga.io/legal/tos"}, {"label": "End User License", "to": "https://ariga.io/legal/atlas/eula"}, ] } ], copyright: ` Copyright © ${new Date().getFullYear()} The Atlas Authors. The Go gopher was designed by Renee French.
The design for the Go gopher is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details.
`, }, announcementBar: { id: 'announcementBar-1', // Increment on change content: `️🚀 Sign up for a user testing session and receive exclusive Atlas swag, register here!`, isCloseable: true, }, }, plugins: [ [ '@docusaurus/plugin-client-redirects', { redirects: [ { to: '/getting-started/', from: '/cli/getting-started/setting-up', }, { to: '/integrations/terraform-provider', from: '/terraform-provider' }, { to: '/integrations/go-api', from: ['/go-api/intro','/go-api/inspect'], }, { to: '/cli-reference', from: '/cli/reference', }, { to: '/concepts/url', from: '/cli/url', }, { to: '/concepts/dev-database', from: '/dev-database', }, { to: '/guides/ddl', from: ['/ddl/intro', '/concepts/ddl'], }, { to: '/atlas-schema/input-variables', from: '/ddl/input-variables', }, { to: '/atlas-schema/projects', from: '/cli/projects', }, { to: '/atlas-schema/sql-types', from: '/ddl/sql-types', }, { to: '/atlas-schema/sql-resources', from: '/ddl/sql', }, { to: '/guides', from: '/knowledge', }, { to: '/guides/mysql/generated-columns', from: '/knowledge/mysql/generated-columns', }, { to: '/guides/postgres/partial-indexes', from: '/knowledge/postgres/partial-indexes', }, { to: '/guides/postgres/serial-columns', from: '/knowledge/postgres/serial-columns', }, { to: '/guides/ddl', from: '/knowledge/ddl', }, ], }, ], [ require.resolve('docusaurus-gtm-plugin'), { id: 'GTM-T9GX8BR', // GTM Container ID } ], ], presets: [ [ '@docusaurus/preset-classic', { docs: { routeBasePath: '/', sidebarPath: require.resolve('./sidebars.js'), path: "../md", showLastUpdateAuthor: false, showLastUpdateTime: false, }, gtag: { trackingID: 'G-Z88N4TF03R' }, blog: { showReadingTime: true, blogSidebarTitle: 'All our posts', blogSidebarCount: 'ALL', }, theme: { customCss: require.resolve('./src/css/custom.css'), }, }, ], ], }; atlas-0.7.2/doc/website/package-lock.json000066400000000000000000034416241431455511600202720ustar00rootroot00000000000000{ "name": "website", "version": "0.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "website", "version": "0.0.0", "dependencies": { "@docusaurus/core": "^2.0.0-rc.1", "@docusaurus/plugin-client-redirects": "^2.0.0-rc.1", "@docusaurus/preset-classic": "^2.0.0-rc.1", "@mdx-js/react": "^1.6.21", "@svgr/webpack": "^5.5.0", "clsx": "^1.1.1", "docusaurus-gtm-plugin": "^0.0.2", "file-loader": "^6.2.0", "react": "^17.0.1", "react-dom": "^17.0.1", "url-loader": "^4.1.1", "url-parse": "^1.5.2" } }, "node_modules/@algolia/autocomplete-core": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.1.tgz", "integrity": "sha512-eiZw+fxMzNQn01S8dA/hcCpoWCOCwcIIEUtHHdzN5TGB3IpzLbuhqFeTfh2OUhhgkE8Uo17+wH+QJ/wYyQmmzg==", "dependencies": { "@algolia/autocomplete-shared": "1.7.1" } }, "node_modules/@algolia/autocomplete-preset-algolia": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.1.tgz", "integrity": "sha512-pJwmIxeJCymU1M6cGujnaIYcY3QPOVYZOXhFkWVM7IxKzy272BwCvMFMyc5NpG/QmiObBxjo7myd060OeTNJXg==", "dependencies": { "@algolia/autocomplete-shared": "1.7.1" }, "peerDependencies": { "@algolia/client-search": "^4.9.1", "algoliasearch": "^4.9.1" } }, "node_modules/@algolia/autocomplete-shared": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.1.tgz", "integrity": "sha512-eTmGVqY3GeyBTT8IWiB2K5EuURAqhnumfktAEoHxfDY2o7vg2rSnO16ZtIG0fMgt3py28Vwgq42/bVEuaQV7pg==" }, "node_modules/@algolia/cache-browser-local-storage": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.13.1.tgz", "integrity": "sha512-UAUVG2PEfwd/FfudsZtYnidJ9eSCpS+LW9cQiesePQLz41NAcddKxBak6eP2GErqyFagSlnVXe/w2E9h2m2ttg==", "dependencies": { "@algolia/cache-common": "4.13.1" } }, "node_modules/@algolia/cache-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.13.1.tgz", "integrity": "sha512-7Vaf6IM4L0Jkl3sYXbwK+2beQOgVJ0mKFbz/4qSxKd1iy2Sp77uTAazcX+Dlexekg1fqGUOSO7HS4Sx47ZJmjA==" }, "node_modules/@algolia/cache-in-memory": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.13.1.tgz", "integrity": "sha512-pZzybCDGApfA/nutsFK1P0Sbsq6fYJU3DwIvyKg4pURerlJM4qZbB9bfLRef0FkzfQu7W11E4cVLCIOWmyZeuQ==", "dependencies": { "@algolia/cache-common": "4.13.1" } }, "node_modules/@algolia/client-account": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.13.1.tgz", "integrity": "sha512-TFLiZ1KqMiir3FNHU+h3b0MArmyaHG+eT8Iojio6TdpeFcAQ1Aiy+2gb3SZk3+pgRJa/BxGmDkRUwE5E/lv3QQ==", "dependencies": { "@algolia/client-common": "4.13.1", "@algolia/client-search": "4.13.1", "@algolia/transporter": "4.13.1" } }, "node_modules/@algolia/client-analytics": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.13.1.tgz", "integrity": "sha512-iOS1JBqh7xaL5x00M5zyluZ9+9Uy9GqtYHv/2SMuzNW1qP7/0doz1lbcsP3S7KBbZANJTFHUOfuqyRLPk91iFA==", "dependencies": { "@algolia/client-common": "4.13.1", "@algolia/client-search": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "node_modules/@algolia/client-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.13.1.tgz", "integrity": "sha512-LcDoUE0Zz3YwfXJL6lJ2OMY2soClbjrrAKB6auYVMNJcoKZZ2cbhQoFR24AYoxnGUYBER/8B+9sTBj5bj/Gqbg==", "dependencies": { "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "node_modules/@algolia/client-personalization": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.13.1.tgz", "integrity": "sha512-1CqrOW1ypVrB4Lssh02hP//YxluoIYXAQCpg03L+/RiXJlCs+uIqlzC0ctpQPmxSlTK6h07kr50JQoYH/TIM9w==", "dependencies": { "@algolia/client-common": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "node_modules/@algolia/client-search": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.13.1.tgz", "integrity": "sha512-YQKYA83MNRz3FgTNM+4eRYbSmHi0WWpo019s5SeYcL3HUan/i5R09VO9dk3evELDFJYciiydSjbsmhBzbpPP2A==", "dependencies": { "@algolia/client-common": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "node_modules/@algolia/events": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz", "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, "node_modules/@algolia/logger-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.13.1.tgz", "integrity": "sha512-L6slbL/OyZaAXNtS/1A8SAbOJeEXD5JcZeDCPYDqSTYScfHu+2ePRTDMgUTY4gQ7HsYZ39N1LujOd8WBTmM2Aw==" }, "node_modules/@algolia/logger-console": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.13.1.tgz", "integrity": "sha512-7jQOTftfeeLlnb3YqF8bNgA2GZht7rdKkJ31OCeSH2/61haO0tWPoNRjZq9XLlgMQZH276pPo0NdiArcYPHjCA==", "dependencies": { "@algolia/logger-common": "4.13.1" } }, "node_modules/@algolia/requester-browser-xhr": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.13.1.tgz", "integrity": "sha512-oa0CKr1iH6Nc7CmU6RE7TnXMjHnlyp7S80pP/LvZVABeJHX3p/BcSCKovNYWWltgTxUg0U1o+2uuy8BpMKljwA==", "dependencies": { "@algolia/requester-common": "4.13.1" } }, "node_modules/@algolia/requester-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.13.1.tgz", "integrity": "sha512-eGVf0ID84apfFEuXsaoSgIxbU3oFsIbz4XiotU3VS8qGCJAaLVUC5BUJEkiFENZIhon7hIB4d0RI13HY4RSA+w==" }, "node_modules/@algolia/requester-node-http": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.13.1.tgz", "integrity": "sha512-7C0skwtLdCz5heKTVe/vjvrqgL/eJxmiEjHqXdtypcE5GCQCYI15cb+wC4ytYioZDMiuDGeVYmCYImPoEgUGPw==", "dependencies": { "@algolia/requester-common": "4.13.1" } }, "node_modules/@algolia/transporter": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.13.1.tgz", "integrity": "sha512-pICnNQN7TtrcYJqqPEXByV8rJ8ZRU2hCiIKLTLRyNpghtQG3VAFk6fVtdzlNfdUGZcehSKGarPIZEHlQXnKjgw==", "dependencies": { "@algolia/cache-common": "4.13.1", "@algolia/logger-common": "4.13.1", "@algolia/requester-common": "4.13.1" } }, "node_modules/@ampproject/remapping": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", "dependencies": { "@jridgewell/gen-mapping": "^0.1.0", "@jridgewell/trace-mapping": "^0.3.9" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@babel/code-frame": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", "dependencies": { "@babel/highlight": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.18.8.tgz", "integrity": "sha512-HSmX4WZPPK3FUxYp7g2T6EyO8j96HlZJlxmKPSh6KAcqwyDrfx7hKjXpAW/0FhFfTJsR0Yt4lAjLI2coMptIHQ==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.18.6.tgz", "integrity": "sha512-cQbWBpxcbbs/IUredIPkHiAGULLV8iwgNRMFzvbhEXISp4f3rUUXE5+TIw6KwUWUR3DwyI6gmBRnmAtYaWehwQ==", "dependencies": { "@ampproject/remapping": "^2.1.0", "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.18.6", "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-module-transforms": "^7.18.6", "@babel/helpers": "^7.18.6", "@babel/parser": "^7.18.6", "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.1", "semver": "^6.3.0" }, "engines": { "node": ">=6.9.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/babel" } }, "node_modules/@babel/core/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { "version": "7.18.7", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.18.7.tgz", "integrity": "sha512-shck+7VLlY72a2w9c3zYWuE1pwOKEiQHV7GTUbSnhyl5eu3i04t30tBY82ZRWrDfo3gkakCFtevExnxbkf2a3A==", "dependencies": { "@babel/types": "^7.18.7", "@jridgewell/gen-mapping": "^0.3.2", "jsesc": "^2.5.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@babel/helper-annotate-as-pure": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz", "integrity": "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.6.tgz", "integrity": "sha512-KT10c1oWEpmrIRYnthbzHgoOf6B+Xd6a5yhdbNtdhtG7aO1or5HViuf1TQR36xY/QprXA5nvxO6nAjhJ4y38jw==", "dependencies": { "@babel/helper-explode-assignable-expression": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-compilation-targets": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.6.tgz", "integrity": "sha512-vFjbfhNCzqdeAtZflUFrG5YIFqGTqsctrtkZ1D/NB0mDW9TwW3GmmUepYY4G9wCET5rY5ugz4OGTcLd614IzQg==", "dependencies": { "@babel/compat-data": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "browserslist": "^4.20.2", "semver": "^6.3.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/helper-create-class-features-plugin": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.6.tgz", "integrity": "sha512-YfDzdnoxHGV8CzqHGyCbFvXg5QESPFkXlHtvdCkesLjjVMT2Adxe4FGUR5ChIb3DxSaXO12iIOCWoXdsUVwnqw==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-member-expression-to-functions": "^7.18.6", "@babel/helper-optimise-call-expression": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-create-regexp-features-plugin": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.18.6.tgz", "integrity": "sha512-7LcpH1wnQLGrI+4v+nPp+zUvIkF9x0ddv1Hkdue10tg3gmRnLy97DXh4STiOf1qeIInyD69Qv5kKSZzKD8B/7A==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "regexpu-core": "^5.1.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-define-polyfill-provider": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz", "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==", "dependencies": { "@babel/helper-compilation-targets": "^7.13.0", "@babel/helper-module-imports": "^7.12.13", "@babel/helper-plugin-utils": "^7.13.0", "@babel/traverse": "^7.13.0", "debug": "^4.1.1", "lodash.debounce": "^4.0.8", "resolve": "^1.14.2", "semver": "^6.1.2" }, "peerDependencies": { "@babel/core": "^7.4.0-0" } }, "node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/helper-environment-visitor": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.6.tgz", "integrity": "sha512-8n6gSfn2baOY+qlp+VSzsosjCVGFqWKmDF0cCWOybh52Dw3SEyoWR1KrhMJASjLwIEkkAufZ0xvr+SxLHSpy2Q==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-explode-assignable-expression": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz", "integrity": "sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-function-name": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.18.6.tgz", "integrity": "sha512-0mWMxV1aC97dhjCah5U5Ua7668r5ZmSC2DLfH2EZnf9c3/dHZKiFa5pRLMH5tjSl471tY6496ZWk/kjNONBxhw==", "dependencies": { "@babel/template": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-hoist-variables": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-member-expression-to-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.6.tgz", "integrity": "sha512-CeHxqwwipekotzPDUuJOfIMtcIHBuc7WAzLmTYWctVigqS5RktNMQ5bEwQSuGewzYnCtTWa3BARXeiLxDTv+Ng==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.18.8.tgz", "integrity": "sha512-che3jvZwIcZxrwh63VfnFTUzcAM9v/lznYkkRxIBGMPt1SudOKHAEec0SIRCfiuIzTcF7VGj/CaTT6gY4eWxvA==", "dependencies": { "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-simple-access": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/helper-validator-identifier": "^7.18.6", "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.8", "@babel/types": "^7.18.8" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-optimise-call-expression": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz", "integrity": "sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-plugin-utils": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.6.tgz", "integrity": "sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-remap-async-to-generator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.6.tgz", "integrity": "sha512-z5wbmV55TveUPZlCLZvxWHtrjuJd+8inFhk7DG0WW87/oJuGDcjDiu7HIvGcpf5464L6xKCg3vNkmlVVz9hwyQ==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-wrap-function": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-replace-supers": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.18.6.tgz", "integrity": "sha512-fTf7zoXnUGl9gF25fXCWE26t7Tvtyn6H4hkLSYhATwJvw2uYxd3aoXplMSe0g9XbwK7bmxNes7+FGO0rB/xC0g==", "dependencies": { "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-member-expression-to-functions": "^7.18.6", "@babel/helper-optimise-call-expression": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-simple-access": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz", "integrity": "sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.6.tgz", "integrity": "sha512-4KoLhwGS9vGethZpAhYnMejWkX64wsnHPDwvOsKWU6Fg4+AlK2Jz3TyjQLMEPvz+1zemi/WBdkYxCD0bAfIkiw==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-split-export-declaration": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", "dependencies": { "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz", "integrity": "sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-wrap-function": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.18.6.tgz", "integrity": "sha512-I5/LZfozwMNbwr/b1vhhuYD+J/mU+gfGAj5td7l5Rv9WYmH6i3Om69WGKNmlIpsVW/mF6O5bvTKbvDQZVgjqOw==", "dependencies": { "@babel/helper-function-name": "^7.18.6", "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.18.6.tgz", "integrity": "sha512-vzSiiqbQOghPngUYt/zWGvK3LAsPhz55vc9XNN0xAl2gV4ieShI2OQli5duxWHD+72PZPTKAcfcZDE1Cwc5zsQ==", "dependencies": { "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", "dependencies": { "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/parser": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.18.8.tgz", "integrity": "sha512-RSKRfYX20dyH+elbJK2uqAkVyucL+xXzhqlMD5/ZXx+dAAwpyB7HsvnHe/ZUGOF+xLr5Wx9/JoXVTj6BQE2/oA==", "bin": { "parser": "bin/babel-parser.js" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz", "integrity": "sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.6.tgz", "integrity": "sha512-Udgu8ZRgrBrttVz6A0EVL0SJ1z+RLbIeqsu632SA1hf0awEppD6TvdznoH+orIF8wtFFAV/Enmw9Y+9oV8TQcw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.18.6", "@babel/plugin-proposal-optional-chaining": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.13.0" } }, "node_modules/@babel/plugin-proposal-async-generator-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.18.6.tgz", "integrity": "sha512-WAz4R9bvozx4qwf74M+sfqPMKfSqwM0phxPTR6iJIi8robgzXwkEgmeJG1gEKhm6sDqT/U9aV3lfcqybIpev8w==", "dependencies": { "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-remap-async-to-generator": "^7.18.6", "@babel/plugin-syntax-async-generators": "^7.8.4" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-class-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-class-static-block": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz", "integrity": "sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-class-static-block": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.12.0" } }, "node_modules/@babel/plugin-proposal-dynamic-import": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz", "integrity": "sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-export-namespace-from": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.6.tgz", "integrity": "sha512-zr/QcUlUo7GPo6+X1wC98NJADqmy5QTFWWhqeQWiki4XHafJtLl/YMGkmRB2szDD2IYJCCdBTd4ElwhId9T7Xw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-json-strings": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz", "integrity": "sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-logical-assignment-operators": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.6.tgz", "integrity": "sha512-zMo66azZth/0tVd7gmkxOkOjs2rpHyhpcFo565PUP37hSp6hSd9uUKIfTDFMz58BwqgQKhJ9YxtM5XddjXVn+Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-nullish-coalescing-operator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz", "integrity": "sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-numeric-separator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz", "integrity": "sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-object-rest-spread": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.6.tgz", "integrity": "sha512-9yuM6wr4rIsKa1wlUAbZEazkCrgw2sMPEXCr4Rnwetu7cEW1NydkCWytLuYletbf8vFxdJxFhwEZqMpOx2eZyw==", "dependencies": { "@babel/compat-data": "^7.18.6", "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-transform-parameters": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-optional-catch-binding": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz", "integrity": "sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-optional-chaining": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.6.tgz", "integrity": "sha512-PatI6elL5eMzoypFAiYDpYQyMtXTn+iMhuxxQt5mAXD4fEmKorpSI3PHd+i3JXBJN3xyA6MvJv7at23HffFHwA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.18.6", "@babel/plugin-syntax-optional-chaining": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-private-methods": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz", "integrity": "sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-private-property-in-object": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz", "integrity": "sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-proposal-unicode-property-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz", "integrity": "sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-async-generators": { "version": "7.8.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-class-properties": { "version": "7.12.13", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "dependencies": { "@babel/helper-plugin-utils": "^7.12.13" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-class-static-block": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-dynamic-import": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-export-namespace-from": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.3" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-import-assertions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz", "integrity": "sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-json-strings": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-jsx": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz", "integrity": "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-numeric-separator": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-object-rest-spread": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-optional-catch-binding": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-optional-chaining": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-private-property-in-object": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-top-level-await": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-typescript": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz", "integrity": "sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-arrow-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz", "integrity": "sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-async-to-generator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz", "integrity": "sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==", "dependencies": { "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-remap-async-to-generator": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-block-scoped-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz", "integrity": "sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-block-scoping": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.6.tgz", "integrity": "sha512-pRqwb91C42vs1ahSAWJkxOxU1RHWDn16XAa6ggQ72wjLlWyYeAcLvTtE0aM8ph3KNydy9CQF2nLYcjq1WysgxQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-classes": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.18.8.tgz", "integrity": "sha512-RySDoXdF6hgHSHuAW4aLGyVQdmvEX/iJtjVre52k0pxRq4hzqze+rAVP++NmNv596brBpYmaiKgTZby7ziBnVg==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-optimise-call-expression": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "globals": "^11.1.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-computed-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.6.tgz", "integrity": "sha512-9repI4BhNrR0KenoR9vm3/cIc1tSBIo+u1WVjKCAynahj25O8zfbiE6JtAtHPGQSs4yZ+bA8mRasRP+qc+2R5A==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-destructuring": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.18.6.tgz", "integrity": "sha512-tgy3u6lRp17ilY8r1kP4i2+HDUwxlVqq3RTc943eAWSzGgpU1qhiKpqZ5CMyHReIYPHdo3Kg8v8edKtDqSVEyQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-dotall-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz", "integrity": "sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-duplicate-keys": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.6.tgz", "integrity": "sha512-NJU26U/208+sxYszf82nmGYqVF9QN8py2HFTblPT9hbawi8+1C5a9JubODLTGFuT0qlkqVinmkwOD13s0sZktg==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-exponentiation-operator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz", "integrity": "sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==", "dependencies": { "@babel/helper-builder-binary-assignment-operator-visitor": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-for-of": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz", "integrity": "sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-function-name": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.6.tgz", "integrity": "sha512-kJha/Gbs5RjzIu0CxZwf5e3aTTSlhZnHMT8zPWnJMjNpLOUgqevg+PN5oMH68nMCXnfiMo4Bhgxqj59KHTlAnA==", "dependencies": { "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.6.tgz", "integrity": "sha512-x3HEw0cJZVDoENXOp20HlypIHfl0zMIhMVZEBVTfmqbObIpsMxMbmU5nOEO8R7LYT+z5RORKPlTI5Hj4OsO9/Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-member-expression-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz", "integrity": "sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-amd": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz", "integrity": "sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==", "dependencies": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-commonjs": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz", "integrity": "sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==", "dependencies": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-simple-access": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-systemjs": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.18.6.tgz", "integrity": "sha512-UbPYpXxLjTw6w6yXX2BYNxF3p6QY225wcTkfQCy3OMnSlS/C3xGtwUjEzGkldb/sy6PWLiCQ3NbYfjWUTI3t4g==", "dependencies": { "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-identifier": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-umd": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz", "integrity": "sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==", "dependencies": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.18.6.tgz", "integrity": "sha512-UmEOGF8XgaIqD74bC8g7iV3RYj8lMf0Bw7NJzvnS9qQhM4mg+1WHKotUIdjxgD2RGrgFLZZPCFPFj3P/kVDYhg==", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/plugin-transform-new-target": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz", "integrity": "sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-object-super": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz", "integrity": "sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-parameters": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz", "integrity": "sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-property-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz", "integrity": "sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-react-constant-elements": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.14.5.tgz", "integrity": "sha512-NBqLEx1GxllIOXJInJAQbrnwwYJsV3WaMHIcOwD8rhYS0AabTWn7kHdHgPgu5RmHLU0q4DMxhAMu8ue/KampgQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-react-display-name": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz", "integrity": "sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-react-jsx": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.18.6.tgz", "integrity": "sha512-Mz7xMPxoy9kPS/JScj6fJs03TZ/fZ1dJPlMjRAgTaxaS0fUBk8FV/A2rRgfPsVCZqALNwMexD+0Uaf5zlcKPpw==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-react-jsx-development": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz", "integrity": "sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==", "dependencies": { "@babel/plugin-transform-react-jsx": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-react-pure-annotations": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz", "integrity": "sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-regenerator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz", "integrity": "sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "regenerator-transform": "^0.15.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-reserved-words": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz", "integrity": "sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-runtime": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.6.tgz", "integrity": "sha512-8uRHk9ZmRSnWqUgyae249EJZ94b0yAGLBIqzZzl+0iEdbno55Pmlt/32JZsHwXD9k/uZj18Aqqk35wBX4CBTXA==", "dependencies": { "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "babel-plugin-polyfill-corejs2": "^0.3.1", "babel-plugin-polyfill-corejs3": "^0.5.2", "babel-plugin-polyfill-regenerator": "^0.3.1", "semver": "^6.3.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/plugin-transform-shorthand-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz", "integrity": "sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-spread": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.18.6.tgz", "integrity": "sha512-ayT53rT/ENF8WWexIRg9AiV9h0aIteyWn5ptfZTZQrjk/+f3WdrJGCY4c9wcgl2+MKkKPhzbYp97FTsquZpDCw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-sticky-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz", "integrity": "sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-template-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.6.tgz", "integrity": "sha512-UuqlRrQmT2SWRvahW46cGSany0uTlcj8NYOS5sRGYi8FxPYPoLd5DDmMd32ZXEj2Jq+06uGVQKHxa/hJx2EzKw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-typeof-symbol": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.6.tgz", "integrity": "sha512-7m71iS/QhsPk85xSjFPovHPcH3H9qeyzsujhTc+vcdnsXavoWYJ74zx0lP5RhpC5+iDnVLO+PPMHzC11qels1g==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-typescript": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.18.8.tgz", "integrity": "sha512-p2xM8HI83UObjsZGofMV/EdYjamsDm6MoN3hXPYIT0+gxIoopE+B7rPYKAxfrz9K9PK7JafTTjqYC6qipLExYA==", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-typescript": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-unicode-escapes": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.6.tgz", "integrity": "sha512-XNRwQUXYMP7VLuy54cr/KS/WeL3AZeORhrmeZ7iewgu+X2eBqmpaLI/hzqr9ZxCeUoq0ASK4GUzSM0BDhZkLFw==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-unicode-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz", "integrity": "sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/preset-env": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.18.6.tgz", "integrity": "sha512-WrthhuIIYKrEFAwttYzgRNQ5hULGmwTj+D6l7Zdfsv5M7IWV/OZbUfbeL++Qrzx1nVJwWROIFhCHRYQV4xbPNw==", "dependencies": { "@babel/compat-data": "^7.18.6", "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.18.6", "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.18.6", "@babel/plugin-proposal-async-generator-functions": "^7.18.6", "@babel/plugin-proposal-class-properties": "^7.18.6", "@babel/plugin-proposal-class-static-block": "^7.18.6", "@babel/plugin-proposal-dynamic-import": "^7.18.6", "@babel/plugin-proposal-export-namespace-from": "^7.18.6", "@babel/plugin-proposal-json-strings": "^7.18.6", "@babel/plugin-proposal-logical-assignment-operators": "^7.18.6", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.18.6", "@babel/plugin-proposal-numeric-separator": "^7.18.6", "@babel/plugin-proposal-object-rest-spread": "^7.18.6", "@babel/plugin-proposal-optional-catch-binding": "^7.18.6", "@babel/plugin-proposal-optional-chaining": "^7.18.6", "@babel/plugin-proposal-private-methods": "^7.18.6", "@babel/plugin-proposal-private-property-in-object": "^7.18.6", "@babel/plugin-proposal-unicode-property-regex": "^7.18.6", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", "@babel/plugin-syntax-import-assertions": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", "@babel/plugin-syntax-numeric-separator": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", "@babel/plugin-transform-arrow-functions": "^7.18.6", "@babel/plugin-transform-async-to-generator": "^7.18.6", "@babel/plugin-transform-block-scoped-functions": "^7.18.6", "@babel/plugin-transform-block-scoping": "^7.18.6", "@babel/plugin-transform-classes": "^7.18.6", "@babel/plugin-transform-computed-properties": "^7.18.6", "@babel/plugin-transform-destructuring": "^7.18.6", "@babel/plugin-transform-dotall-regex": "^7.18.6", "@babel/plugin-transform-duplicate-keys": "^7.18.6", "@babel/plugin-transform-exponentiation-operator": "^7.18.6", "@babel/plugin-transform-for-of": "^7.18.6", "@babel/plugin-transform-function-name": "^7.18.6", "@babel/plugin-transform-literals": "^7.18.6", "@babel/plugin-transform-member-expression-literals": "^7.18.6", "@babel/plugin-transform-modules-amd": "^7.18.6", "@babel/plugin-transform-modules-commonjs": "^7.18.6", "@babel/plugin-transform-modules-systemjs": "^7.18.6", "@babel/plugin-transform-modules-umd": "^7.18.6", "@babel/plugin-transform-named-capturing-groups-regex": "^7.18.6", "@babel/plugin-transform-new-target": "^7.18.6", "@babel/plugin-transform-object-super": "^7.18.6", "@babel/plugin-transform-parameters": "^7.18.6", "@babel/plugin-transform-property-literals": "^7.18.6", "@babel/plugin-transform-regenerator": "^7.18.6", "@babel/plugin-transform-reserved-words": "^7.18.6", "@babel/plugin-transform-shorthand-properties": "^7.18.6", "@babel/plugin-transform-spread": "^7.18.6", "@babel/plugin-transform-sticky-regex": "^7.18.6", "@babel/plugin-transform-template-literals": "^7.18.6", "@babel/plugin-transform-typeof-symbol": "^7.18.6", "@babel/plugin-transform-unicode-escapes": "^7.18.6", "@babel/plugin-transform-unicode-regex": "^7.18.6", "@babel/preset-modules": "^0.1.5", "@babel/types": "^7.18.6", "babel-plugin-polyfill-corejs2": "^0.3.1", "babel-plugin-polyfill-corejs3": "^0.5.2", "babel-plugin-polyfill-regenerator": "^0.3.1", "core-js-compat": "^3.22.1", "semver": "^6.3.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/preset-env/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/preset-modules": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", "@babel/plugin-transform-dotall-regex": "^7.4.4", "@babel/types": "^7.4.4", "esutils": "^2.0.2" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/preset-react": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz", "integrity": "sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-transform-react-display-name": "^7.18.6", "@babel/plugin-transform-react-jsx": "^7.18.6", "@babel/plugin-transform-react-jsx-development": "^7.18.6", "@babel/plugin-transform-react-pure-annotations": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/preset-typescript": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz", "integrity": "sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-transform-typescript": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/runtime": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.18.6.tgz", "integrity": "sha512-t9wi7/AW6XtKahAe20Yw0/mMljKq0B1r2fPdvaAdV/KPDZewFXdaaa6K7lxmZBZ8FBNpCiAT6iHPmd6QO9bKfQ==", "dependencies": { "regenerator-runtime": "^0.13.4" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/runtime-corejs3": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.18.6.tgz", "integrity": "sha512-cOu5wH2JFBgMjje+a+fz2JNIWU4GzYpl05oSob3UDvBEh6EuIn+TXFHMmBbhSb+k/4HMzgKCQfEEDArAWNF9Cw==", "dependencies": { "core-js-pure": "^3.20.2", "regenerator-runtime": "^0.13.4" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/template": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.6.tgz", "integrity": "sha512-JoDWzPe+wgBsTTgdnIma3iHNFC7YVJoPssVBDjiHfNlyt4YcunDtcDOUmfVDfCK5MfdsaIoX9PkijPhjH3nYUw==", "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/parser": "^7.18.6", "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.18.8.tgz", "integrity": "sha512-UNg/AcSySJYR/+mIcJQDCv00T+AqRO7j/ZEJLzpaYtgM48rMg5MnkJgyNqkzo88+p4tfRvZJCEiwwfG6h4jkRg==", "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.18.7", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/parser": "^7.18.8", "@babel/types": "^7.18.8", "debug": "^4.1.0", "globals": "^11.1.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/types": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.18.8.tgz", "integrity": "sha512-qwpdsmraq0aJ3osLJRApsc2ouSJCdnMeZwB0DhbtHAtRpZNZCdlbRnHIgcRKzdE1g0iOGg644fzjOBcdOz9cPw==", "dependencies": { "@babel/helper-validator-identifier": "^7.18.6", "to-fast-properties": "^2.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", "optional": true, "engines": { "node": ">=0.1.90" } }, "node_modules/@docsearch/css": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.1.1.tgz", "integrity": "sha512-utLgg7E1agqQeqCJn05DWC7XXMk4tMUUnL7MZupcknRu2OzGN13qwey2qA/0NAKkVBGugiWtON0+rlU0QIPojg==" }, "node_modules/@docsearch/react": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.1.1.tgz", "integrity": "sha512-cfoql4qvtsVRqBMYxhlGNpvyy/KlCoPqjIsJSZYqYf9AplZncKjLBTcwBu6RXFMVCe30cIFljniI4OjqAU67pQ==", "dependencies": { "@algolia/autocomplete-core": "1.7.1", "@algolia/autocomplete-preset-algolia": "1.7.1", "@docsearch/css": "3.1.1", "algoliasearch": "^4.0.0" }, "peerDependencies": { "@types/react": ">= 16.8.0 < 19.0.0", "react": ">= 16.8.0 < 19.0.0", "react-dom": ">= 16.8.0 < 19.0.0" } }, "node_modules/@docusaurus/core": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-2.0.0-rc.1.tgz", "integrity": "sha512-b9FX0Z+EddfQ6wAiNh+Wx4fysKfcvEcWJrZ5USROn3C+EVU5P4luaa8mwWK//O+hTwD9ur7/A44IZ/tWCTAoLQ==", "dependencies": { "@babel/core": "^7.18.6", "@babel/generator": "^7.18.7", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-transform-runtime": "^7.18.6", "@babel/preset-env": "^7.18.6", "@babel/preset-react": "^7.18.6", "@babel/preset-typescript": "^7.18.6", "@babel/runtime": "^7.18.6", "@babel/runtime-corejs3": "^7.18.6", "@babel/traverse": "^7.18.8", "@docusaurus/cssnano-preset": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/react-loadable": "5.5.2", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "@slorber/static-site-generator-webpack-plugin": "^4.0.7", "@svgr/webpack": "^6.2.1", "autoprefixer": "^10.4.7", "babel-loader": "^8.2.5", "babel-plugin-dynamic-import-node": "^2.3.3", "boxen": "^6.2.1", "chalk": "^4.1.2", "chokidar": "^3.5.3", "clean-css": "^5.3.0", "cli-table3": "^0.6.2", "combine-promises": "^1.1.0", "commander": "^5.1.0", "copy-webpack-plugin": "^11.0.0", "core-js": "^3.23.3", "css-loader": "^6.7.1", "css-minimizer-webpack-plugin": "^4.0.0", "cssnano": "^5.1.12", "del": "^6.1.1", "detect-port": "^1.3.0", "escape-html": "^1.0.3", "eta": "^1.12.3", "file-loader": "^6.2.0", "fs-extra": "^10.1.0", "html-minifier-terser": "^6.1.0", "html-tags": "^3.2.0", "html-webpack-plugin": "^5.5.0", "import-fresh": "^3.3.0", "leven": "^3.1.0", "lodash": "^4.17.21", "mini-css-extract-plugin": "^2.6.1", "postcss": "^8.4.14", "postcss-loader": "^7.0.0", "prompts": "^2.4.2", "react-dev-utils": "^12.0.1", "react-helmet-async": "^1.3.0", "react-loadable": "npm:@docusaurus/react-loadable@5.5.2", "react-loadable-ssr-addon-v5-slorber": "^1.0.1", "react-router": "^5.3.3", "react-router-config": "^5.1.1", "react-router-dom": "^5.3.3", "rtl-detect": "^1.0.4", "semver": "^7.3.7", "serve-handler": "^6.1.3", "shelljs": "^0.8.5", "terser-webpack-plugin": "^5.3.3", "tslib": "^2.4.0", "update-notifier": "^5.1.0", "url-loader": "^4.1.1", "wait-on": "^6.0.1", "webpack": "^5.73.0", "webpack-bundle-analyzer": "^4.5.0", "webpack-dev-server": "^4.9.3", "webpack-merge": "^5.8.0", "webpackbar": "^5.0.2" }, "bin": { "docusaurus": "bin/docusaurus.mjs" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.0.0.tgz", "integrity": "sha512-MdPdhdWLtQsjd29Wa4pABdhWbaRMACdM1h31BY+c6FghTZqNGT7pEYdBoaGeKtdTOBC/XNFQaKVj+r/Ei2ryWA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-remove-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.0.0.tgz", "integrity": "sha512-aVdtfx9jlaaxc3unA6l+M9YRnKIZjOhQPthLKqmTXC8UVkBLDRGwPKo+r8n3VZN8B34+yVajzPTZ+ptTSuZZCw==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.0.0.tgz", "integrity": "sha512-Ccj42ApsePD451AZJJf1QzTD1B/BOU392URJTeXFxSK709i0KUsGtbwyiqsKu7vsYxpTM0IA5clAKDyf9RCZyA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.0.0.tgz", "integrity": "sha512-88V26WGyt1Sfd1emBYmBJRWMmgarrExpKNVmI9vVozha4kqs6FzQJ/Kp5+EYli1apgX44518/0+t9+NU36lThQ==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-svg-dynamic-title": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.0.0.tgz", "integrity": "sha512-F7YXNLfGze+xv0KMQxrl2vkNbI9kzT9oDK55/kUuymh1ACyXkMV+VZWX1zEhSTfEKh7VkHVZGmVtHg8eTZ6PRg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-svg-em-dimensions": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.0.0.tgz", "integrity": "sha512-+rghFXxdIqJNLQK08kwPBD3Z22/0b2tEZ9lKiL/yTfuyj1wW8HUXu4bo/XkogATIYuXSghVQOOCwURXzHGKyZA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-transform-react-native-svg": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.0.0.tgz", "integrity": "sha512-VaphyHZ+xIKv5v0K0HCzyfAaLhPGJXSk2HkpYfXIOKb7DjLBv0soHDxNv6X0vr2titsxE7klb++u7iOf7TSrFQ==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-transform-svg-component": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.2.0.tgz", "integrity": "sha512-bhYIpsORb++wpsp91fymbFkf09Z/YEKR0DnFjxvN+8JHeCUD2unnh18jIMKnDJTWtvpTaGYPXELVe4OOzFI0xg==", "engines": { "node": ">=12" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/babel-preset": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.2.0.tgz", "integrity": "sha512-4WQNY0J71JIaL03DRn0vLiz87JXx0b9dYm2aA8XHlQJQoixMl4r/soYHm8dsaJZ3jWtkCiOYy48dp9izvXhDkQ==", "dependencies": { "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", "@svgr/babel-plugin-transform-svg-component": "^6.2.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/core": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/core/-/core-6.2.1.tgz", "integrity": "sha512-NWufjGI2WUyrg46mKuySfviEJ6IxHUOm/8a3Ph38VCWSp+83HBraCQrpEM3F3dB6LBs5x8OElS8h3C0oOJaJAA==", "dependencies": { "@svgr/plugin-jsx": "^6.2.1", "camelcase": "^6.2.0", "cosmiconfig": "^7.0.1" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@docusaurus/core/node_modules/@svgr/hast-util-to-babel-ast": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.2.1.tgz", "integrity": "sha512-pt7MMkQFDlWJVy9ULJ1h+hZBDGFfSCwlBNW1HkLnVi7jUhyEXUaGYWi1x6bM2IXuAR9l265khBT4Av4lPmaNLQ==", "dependencies": { "@babel/types": "^7.15.6", "entities": "^3.0.1" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@docusaurus/core/node_modules/@svgr/plugin-jsx": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.2.1.tgz", "integrity": "sha512-u+MpjTsLaKo6r3pHeeSVsh9hmGRag2L7VzApWIaS8imNguqoUwDq/u6U/NDmYs/KAsrmtBjOEaAAPbwNGXXp1g==", "dependencies": { "@babel/core": "^7.15.5", "@svgr/babel-preset": "^6.2.0", "@svgr/hast-util-to-babel-ast": "^6.2.1", "svg-parser": "^2.0.2" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@svgr/core": "^6.0.0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/plugin-svgo": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.2.0.tgz", "integrity": "sha512-oDdMQONKOJEbuKwuy4Np6VdV6qoaLLvoY86hjvQEgU82Vx1MSWRyYms6Sl0f+NtqxLI/rDVufATbP/ev996k3Q==", "dependencies": { "cosmiconfig": "^7.0.1", "deepmerge": "^4.2.2", "svgo": "^2.5.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@svgr/core": "^6.0.0" } }, "node_modules/@docusaurus/core/node_modules/@svgr/webpack": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.2.1.tgz", "integrity": "sha512-h09ngMNd13hnePwgXa+Y5CgOjzlCvfWLHg+MBnydEedAnuLRzUHUJmGS3o2OsrhxTOOqEsPOFt5v/f6C5Qulcw==", "dependencies": { "@babel/core": "^7.15.5", "@babel/plugin-transform-react-constant-elements": "^7.14.5", "@babel/preset-env": "^7.15.6", "@babel/preset-react": "^7.14.5", "@babel/preset-typescript": "^7.15.0", "@svgr/core": "^6.2.1", "@svgr/plugin-jsx": "^6.2.1", "@svgr/plugin-svgo": "^6.2.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@docusaurus/core/node_modules/ansi-styles": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.1.0.tgz", "integrity": "sha512-VbqNsoz55SYGczauuup0MFUyXNQviSpFTj1RQtFzmQLk18qbVSpTFFGMT293rmDaQuKCT6InmbuEyUne4mTuxQ==", "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/@docusaurus/core/node_modules/boxen": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz", "integrity": "sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==", "dependencies": { "ansi-align": "^3.0.1", "camelcase": "^6.2.0", "chalk": "^4.1.2", "cli-boxes": "^3.0.0", "string-width": "^5.0.1", "type-fest": "^2.5.0", "widest-line": "^4.0.1", "wrap-ansi": "^8.0.1" }, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@docusaurus/core/node_modules/cli-boxes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@docusaurus/core/node_modules/entities": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==", "engines": { "node": ">=0.12" }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/@docusaurus/core/node_modules/react-loadable": { "name": "@docusaurus/react-loadable", "version": "5.5.2", "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "dependencies": { "@types/react": "*", "prop-types": "^15.6.2" }, "peerDependencies": { "react": "*" } }, "node_modules/@docusaurus/core/node_modules/semver": { "version": "7.3.7", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/@docusaurus/core/node_modules/string-width": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" }, "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@docusaurus/core/node_modules/strip-ansi": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", "dependencies": { "ansi-regex": "^6.0.1" }, "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, "node_modules/@docusaurus/core/node_modules/type-fest": { "version": "2.12.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.12.2.tgz", "integrity": "sha512-qt6ylCGpLjZ7AaODxbpyBZSs9fCI9SkL3Z9q2oxMBQhs/uyY+VD8jHA8ULCGmWQJlBgqvO3EJeAngOHD8zQCrQ==", "engines": { "node": ">=12.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@docusaurus/core/node_modules/widest-line": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", "dependencies": { "string-width": "^5.0.1" }, "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@docusaurus/core/node_modules/wrap-ansi": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.0.1.tgz", "integrity": "sha512-QFF+ufAqhoYHvoHdajT/Po7KoXVBPXS2bgjIam5isfWJPfIOnQZ50JtUiVvCv/sjgacf3yRrt2ZKUZ/V4itN4g==", "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" }, "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/@docusaurus/cssnano-preset": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-rc.1.tgz", "integrity": "sha512-9/KmQvF+eTlMqUqG6UcXbRgxbGed/8bQInXuKEs+95/jI6jO/3xSzuRwuHHHP0naUvSVWjnNI9jngPrQerXE5w==", "dependencies": { "cssnano-preset-advanced": "^5.3.8", "postcss": "^8.4.14", "postcss-sort-media-queries": "^4.2.1", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" } }, "node_modules/@docusaurus/logger": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.0-rc.1.tgz", "integrity": "sha512-daa3g+SXuO9K60PVMiSUmDEK9Vro+Ed7i7uF8CH6QQJLcNZy/zJc0Xz62eH7ip1x77fmeb6Rg4Us1TqTFc9AbQ==", "dependencies": { "chalk": "^4.1.2", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" } }, "node_modules/@docusaurus/mdx-loader": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-rc.1.tgz", "integrity": "sha512-8Fg0c/ceu39knmr7w0dutm7gq3YxKYCqWVS2cB/cPATzChCCNH/AGLfBT6sz/Z4tjVXE+NyREq2pfOFvkhjVXg==", "dependencies": { "@babel/parser": "^7.18.8", "@babel/traverse": "^7.18.8", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@mdx-js/mdx": "^1.6.22", "escape-html": "^1.0.3", "file-loader": "^6.2.0", "fs-extra": "^10.1.0", "image-size": "^1.0.1", "mdast-util-to-string": "^2.0.0", "remark-emoji": "^2.2.0", "stringify-object": "^3.3.0", "tslib": "^2.4.0", "unified": "^9.2.2", "unist-util-visit": "^2.0.3", "url-loader": "^4.1.1", "webpack": "^5.73.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/module-type-aliases": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.0.0-rc.1.tgz", "integrity": "sha512-la7D8ggFP8I5nOp/Epl6NqTeDWcbofPVMOaVisRxQbx5iuF9Al+AITbaDgm4CXpFLJACsqhsXD5W4BnKX8ZxfA==", "dependencies": { "@docusaurus/react-loadable": "5.5.2", "@docusaurus/types": "2.0.0-rc.1", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", "@types/react-router-dom": "*", "react-helmet-async": "*", "react-loadable": "npm:@docusaurus/react-loadable@5.5.2" }, "peerDependencies": { "react": "*", "react-dom": "*" } }, "node_modules/@docusaurus/module-type-aliases/node_modules/react-loadable": { "name": "@docusaurus/react-loadable", "version": "5.5.2", "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "dependencies": { "@types/react": "*", "prop-types": "^15.6.2" }, "peerDependencies": { "react": "*" } }, "node_modules/@docusaurus/plugin-client-redirects": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-client-redirects/-/plugin-client-redirects-2.0.0-rc.1.tgz", "integrity": "sha512-uGiK7kzQeJ+gChzIgazKMlHEonOwlmK6NEJvr44aWS6DbauVXOs/aolXZCHx8ZdKPETOpZEzSRYjU5e+QIN+HA==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "eta": "^1.12.3", "fs-extra": "^10.1.0", "lodash": "^4.17.21", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-content-blog": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-rc.1.tgz", "integrity": "sha512-BVVrAGZujpjS/0rarY2o24rlylRRh2NZuM65kg0JNkkViF79SeEHsepog7IuHyoqGWPm1N/I7LpEp7k+gowZzQ==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "cheerio": "^1.0.0-rc.12", "feed": "^4.2.2", "fs-extra": "^10.1.0", "lodash": "^4.17.21", "reading-time": "^1.5.0", "tslib": "^2.4.0", "unist-util-visit": "^2.0.3", "utility-types": "^3.10.0", "webpack": "^5.73.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-content-docs": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-rc.1.tgz", "integrity": "sha512-Yk5Hu6uaw3tRplzJnbDygwRhmZ3PCzEXD4SJpBA6cPC73ylfqOEh6qhiU+BWhMTtDXNhY+athk5Kycfk3DW1aQ==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/module-type-aliases": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "@types/react-router-config": "^5.0.6", "combine-promises": "^1.1.0", "fs-extra": "^10.1.0", "import-fresh": "^3.3.0", "js-yaml": "^4.1.0", "lodash": "^4.17.21", "tslib": "^2.4.0", "utility-types": "^3.10.0", "webpack": "^5.73.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-content-docs/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/@docusaurus/plugin-content-docs/node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "node_modules/@docusaurus/plugin-content-pages": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-rc.1.tgz", "integrity": "sha512-FdO79WC5hfWDQu3/CTFLRQzTNc0e5n+HNzavm2MNkSzGV08BFJ6RAkbPbtra5CWef+6iXZav6D/tzv2jDPvLzA==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "fs-extra": "^10.1.0", "tslib": "^2.4.0", "webpack": "^5.73.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-debug": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-rc.1.tgz", "integrity": "sha512-aOsyYrPMbnsyqHwsVZ+0frrMRtnYqm4eaJpG4sC/6LYAJ07IDRQ9j3GOku2dKr5GsFK1Vx7VlE6ZLwe0MaGstg==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "fs-extra": "^10.1.0", "react-json-view": "^1.21.3", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-google-analytics": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-rc.1.tgz", "integrity": "sha512-f+G8z5OJWfg5QqWDLIdcN2SDoK5J5Gg8HMrqCI6Pfl+rxPb5I1niA+/UkAM+kMCpnekvhSt5AWz2fgkRenkPLA==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-google-gtag": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-rc.1.tgz", "integrity": "sha512-yE1Et9hhhX9qMRnMJzpNq0854qIYiSEc2dZaXNk537HN7Q0rKkr/YONUHz2iqNYwPX2hGOY4LdpTxlMP88uVhA==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/plugin-sitemap": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-rc.1.tgz", "integrity": "sha512-5JmbNpssUF03odFM4ArvIsrO9bv7HnAJ0VtefXhh0WBpaFs8NgI3rTkCTFimvtRQjDR9U2bh23fXz2vjQQz6oA==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "fs-extra": "^10.1.0", "sitemap": "^7.1.1", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/preset-classic": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.0.0-rc.1.tgz", "integrity": "sha512-5jjTVZkhArjyoNHwCI9x4PSG0zPmBJILjZLVrxPcHpm/K0ltkYcp6J3GxYpf5EbMuOh5+yCWM63cSshGcNOo3Q==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/plugin-content-blog": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/plugin-content-pages": "2.0.0-rc.1", "@docusaurus/plugin-debug": "2.0.0-rc.1", "@docusaurus/plugin-google-analytics": "2.0.0-rc.1", "@docusaurus/plugin-google-gtag": "2.0.0-rc.1", "@docusaurus/plugin-sitemap": "2.0.0-rc.1", "@docusaurus/theme-classic": "2.0.0-rc.1", "@docusaurus/theme-common": "2.0.0-rc.1", "@docusaurus/theme-search-algolia": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/react-loadable": { "version": "5.5.2", "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "dependencies": { "@types/react": "*", "prop-types": "^15.6.2" }, "peerDependencies": { "react": "*" } }, "node_modules/@docusaurus/theme-classic": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.0.0-rc.1.tgz", "integrity": "sha512-qNiz7ieeq3AC+V8TbW6S63pWLJph1CbzWDDPTqxDLHgA8VQaNaSmJM8S92pH+yKALRb9u14ogjjYYc75Nj2JmQ==", "dependencies": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/module-type-aliases": "2.0.0-rc.1", "@docusaurus/plugin-content-blog": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/plugin-content-pages": "2.0.0-rc.1", "@docusaurus/theme-common": "2.0.0-rc.1", "@docusaurus/theme-translations": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "@mdx-js/react": "^1.6.22", "clsx": "^1.2.1", "copy-text-to-clipboard": "^3.0.1", "infima": "0.2.0-alpha.42", "lodash": "^4.17.21", "nprogress": "^0.2.0", "postcss": "^8.4.14", "prism-react-renderer": "^1.3.5", "prismjs": "^1.28.0", "react-router-dom": "^5.3.3", "rtlcss": "^3.5.0", "tslib": "^2.4.0", "utility-types": "^3.10.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/theme-common": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.0.0-rc.1.tgz", "integrity": "sha512-1r9ZLKD9SeoCYVzWzcdR79Dia4ANlrlRjNl6uzETOEybjK6FF7yEa9Yra8EJcOCbi3coyYz5xFh/r1YHFTFHug==", "dependencies": { "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/module-type-aliases": "2.0.0-rc.1", "@docusaurus/plugin-content-blog": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/plugin-content-pages": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", "clsx": "^1.2.1", "parse-numeric-range": "^1.3.0", "prism-react-renderer": "^1.3.5", "tslib": "^2.4.0", "utility-types": "^3.10.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/theme-search-algolia": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-rc.1.tgz", "integrity": "sha512-H5yq6V/B4qo6GZrDKMbeSpk3T9e9K2MliDzLonRu0w3QHW9orVGe0c/lZvRbGlDZjnsOo7XGddhXXIDWGwnpaA==", "dependencies": { "@docsearch/react": "^3.1.1", "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/theme-common": "2.0.0-rc.1", "@docusaurus/theme-translations": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "algoliasearch": "^4.13.1", "algoliasearch-helper": "^3.10.0", "clsx": "^1.2.1", "eta": "^1.12.3", "fs-extra": "^10.1.0", "lodash": "^4.17.21", "tslib": "^2.4.0", "utility-types": "^3.10.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/theme-translations": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.0.0-rc.1.tgz", "integrity": "sha512-JLhNdlnbQhxVQzOnLyiCaTzKFa1lpVrM3nCrkGQKscoG2rY6ARGYMgMN2DkoH6hm7TflQ8+PE1S5MzzASeLs4Q==", "dependencies": { "fs-extra": "^10.1.0", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" } }, "node_modules/@docusaurus/types": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-2.0.0-rc.1.tgz", "integrity": "sha512-wX25FOZa/aKnCGA5ljWPaDpMW3TuTbs0BtjQ8WTC557p8zDvuz4r+g2/FPHsgWE0TKwUMf4usQU1m3XpJLPN+g==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*", "commander": "^5.1.0", "joi": "^17.6.0", "react-helmet-async": "^1.3.0", "utility-types": "^3.10.0", "webpack": "^5.73.0", "webpack-merge": "^5.8.0" }, "peerDependencies": { "react": "^16.8.4 || ^17.0.0", "react-dom": "^16.8.4 || ^17.0.0" } }, "node_modules/@docusaurus/utils": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.0-rc.1.tgz", "integrity": "sha512-ym9I1OwIYbKs1LGaUajaA/vDG8VweJj/6YoZjHp+eDQHhTRIrHXiYoGDqorafRhftKwnA1EnyomuXpNd9bq8Gg==", "dependencies": { "@docusaurus/logger": "2.0.0-rc.1", "@svgr/webpack": "^6.2.1", "file-loader": "^6.2.0", "fs-extra": "^10.1.0", "github-slugger": "^1.4.0", "globby": "^11.1.0", "gray-matter": "^4.0.3", "js-yaml": "^4.1.0", "lodash": "^4.17.21", "micromatch": "^4.0.5", "resolve-pathname": "^3.0.0", "shelljs": "^0.8.5", "tslib": "^2.4.0", "url-loader": "^4.1.1", "webpack": "^5.73.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "@docusaurus/types": "*" }, "peerDependenciesMeta": { "@docusaurus/types": { "optional": true } } }, "node_modules/@docusaurus/utils-common": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.0.0-rc.1.tgz", "integrity": "sha512-+iZICpeFPZJ9oGJXuG92WTWee6WRnVx5BdzlcfuKf/f5KQX8PvwXR2tDME78FGGhShB8zr+vjuNEXuLvXT7j2A==", "dependencies": { "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" }, "peerDependencies": { "@docusaurus/types": "*" }, "peerDependenciesMeta": { "@docusaurus/types": { "optional": true } } }, "node_modules/@docusaurus/utils-validation": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.0.0-rc.1.tgz", "integrity": "sha512-lj36gm9Ksu4tt/EUeLDWoMbXe3sfBxeIPIUUdqYcBYkF/rpQkh+uL/dncjNGiw6uvBOqXhOfsFVP045HtgShVw==", "dependencies": { "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "joi": "^17.6.0", "js-yaml": "^4.1.0", "tslib": "^2.4.0" }, "engines": { "node": ">=16.14" } }, "node_modules/@docusaurus/utils-validation/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/@docusaurus/utils-validation/node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.0.0.tgz", "integrity": "sha512-MdPdhdWLtQsjd29Wa4pABdhWbaRMACdM1h31BY+c6FghTZqNGT7pEYdBoaGeKtdTOBC/XNFQaKVj+r/Ei2ryWA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-remove-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.0.0.tgz", "integrity": "sha512-aVdtfx9jlaaxc3unA6l+M9YRnKIZjOhQPthLKqmTXC8UVkBLDRGwPKo+r8n3VZN8B34+yVajzPTZ+ptTSuZZCw==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.0.0.tgz", "integrity": "sha512-Ccj42ApsePD451AZJJf1QzTD1B/BOU392URJTeXFxSK709i0KUsGtbwyiqsKu7vsYxpTM0IA5clAKDyf9RCZyA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.0.0.tgz", "integrity": "sha512-88V26WGyt1Sfd1emBYmBJRWMmgarrExpKNVmI9vVozha4kqs6FzQJ/Kp5+EYli1apgX44518/0+t9+NU36lThQ==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-svg-dynamic-title": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.0.0.tgz", "integrity": "sha512-F7YXNLfGze+xv0KMQxrl2vkNbI9kzT9oDK55/kUuymh1ACyXkMV+VZWX1zEhSTfEKh7VkHVZGmVtHg8eTZ6PRg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-svg-em-dimensions": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.0.0.tgz", "integrity": "sha512-+rghFXxdIqJNLQK08kwPBD3Z22/0b2tEZ9lKiL/yTfuyj1wW8HUXu4bo/XkogATIYuXSghVQOOCwURXzHGKyZA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-transform-react-native-svg": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.0.0.tgz", "integrity": "sha512-VaphyHZ+xIKv5v0K0HCzyfAaLhPGJXSk2HkpYfXIOKb7DjLBv0soHDxNv6X0vr2titsxE7klb++u7iOf7TSrFQ==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-transform-svg-component": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.2.0.tgz", "integrity": "sha512-bhYIpsORb++wpsp91fymbFkf09Z/YEKR0DnFjxvN+8JHeCUD2unnh18jIMKnDJTWtvpTaGYPXELVe4OOzFI0xg==", "engines": { "node": ">=12" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/babel-preset": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.2.0.tgz", "integrity": "sha512-4WQNY0J71JIaL03DRn0vLiz87JXx0b9dYm2aA8XHlQJQoixMl4r/soYHm8dsaJZ3jWtkCiOYy48dp9izvXhDkQ==", "dependencies": { "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", "@svgr/babel-plugin-transform-svg-component": "^6.2.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/core": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/core/-/core-6.2.1.tgz", "integrity": "sha512-NWufjGI2WUyrg46mKuySfviEJ6IxHUOm/8a3Ph38VCWSp+83HBraCQrpEM3F3dB6LBs5x8OElS8h3C0oOJaJAA==", "dependencies": { "@svgr/plugin-jsx": "^6.2.1", "camelcase": "^6.2.0", "cosmiconfig": "^7.0.1" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/hast-util-to-babel-ast": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.2.1.tgz", "integrity": "sha512-pt7MMkQFDlWJVy9ULJ1h+hZBDGFfSCwlBNW1HkLnVi7jUhyEXUaGYWi1x6bM2IXuAR9l265khBT4Av4lPmaNLQ==", "dependencies": { "@babel/types": "^7.15.6", "entities": "^3.0.1" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/plugin-jsx": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.2.1.tgz", "integrity": "sha512-u+MpjTsLaKo6r3pHeeSVsh9hmGRag2L7VzApWIaS8imNguqoUwDq/u6U/NDmYs/KAsrmtBjOEaAAPbwNGXXp1g==", "dependencies": { "@babel/core": "^7.15.5", "@svgr/babel-preset": "^6.2.0", "@svgr/hast-util-to-babel-ast": "^6.2.1", "svg-parser": "^2.0.2" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@svgr/core": "^6.0.0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/plugin-svgo": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.2.0.tgz", "integrity": "sha512-oDdMQONKOJEbuKwuy4Np6VdV6qoaLLvoY86hjvQEgU82Vx1MSWRyYms6Sl0f+NtqxLI/rDVufATbP/ev996k3Q==", "dependencies": { "cosmiconfig": "^7.0.1", "deepmerge": "^4.2.2", "svgo": "^2.5.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" }, "peerDependencies": { "@svgr/core": "^6.0.0" } }, "node_modules/@docusaurus/utils/node_modules/@svgr/webpack": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.2.1.tgz", "integrity": "sha512-h09ngMNd13hnePwgXa+Y5CgOjzlCvfWLHg+MBnydEedAnuLRzUHUJmGS3o2OsrhxTOOqEsPOFt5v/f6C5Qulcw==", "dependencies": { "@babel/core": "^7.15.5", "@babel/plugin-transform-react-constant-elements": "^7.14.5", "@babel/preset-env": "^7.15.6", "@babel/preset-react": "^7.14.5", "@babel/preset-typescript": "^7.15.0", "@svgr/core": "^6.2.1", "@svgr/plugin-jsx": "^6.2.1", "@svgr/plugin-svgo": "^6.2.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@docusaurus/utils/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/@docusaurus/utils/node_modules/entities": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==", "engines": { "node": ">=0.12" }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/@docusaurus/utils/node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "node_modules/@hapi/hoek": { "version": "9.3.0", "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==" }, "node_modules/@hapi/topo": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", "dependencies": { "@hapi/hoek": "^9.0.0" } }, "node_modules/@jridgewell/gen-mapping": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.6.tgz", "integrity": "sha512-R7xHtBSNm+9SyvpJkdQl+qrM3Hm2fea3Ef197M3mUug+v+yR+Rhfbs7PBtcBUVnIWJ4JcAdjvij+c8hXS9p5aw==", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.0.tgz", "integrity": "sha512-SfJxIxNVYLTsKwzB3MoOQ1yxf4w/E6MdkvTgrgAt1bfxjSrLUoHMKrDOykwN14q65waezZIdqDneUIPh4/sKxg==", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.4.11", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz", "integrity": "sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.9", "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "node_modules/@leichtgewicht/ip-codec": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz", "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==" }, "node_modules/@mdx-js/mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz", "integrity": "sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA==", "dependencies": { "@babel/core": "7.12.9", "@babel/plugin-syntax-jsx": "7.12.1", "@babel/plugin-syntax-object-rest-spread": "7.8.3", "@mdx-js/util": "1.6.22", "babel-plugin-apply-mdx-type-prop": "1.6.22", "babel-plugin-extract-import-names": "1.6.22", "camelcase-css": "2.0.1", "detab": "2.0.4", "hast-util-raw": "6.0.1", "lodash.uniq": "4.5.0", "mdast-util-to-hast": "10.0.1", "remark-footnotes": "2.0.0", "remark-mdx": "1.6.22", "remark-parse": "8.0.3", "remark-squeeze-paragraphs": "4.0.0", "style-to-object": "0.3.0", "unified": "9.2.0", "unist-builder": "2.0.3", "unist-util-visit": "2.0.3" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/@mdx-js/mdx/node_modules/@babel/core": { "version": "7.12.9", "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", "@babel/helper-module-transforms": "^7.12.1", "@babel/helpers": "^7.12.5", "@babel/parser": "^7.12.7", "@babel/template": "^7.12.7", "@babel/traverse": "^7.12.9", "@babel/types": "^7.12.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", "json5": "^2.1.2", "lodash": "^4.17.19", "resolve": "^1.3.2", "semver": "^5.4.1", "source-map": "^0.5.0" }, "engines": { "node": ">=6.9.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/babel" } }, "node_modules/@mdx-js/mdx/node_modules/@babel/plugin-syntax-jsx": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@mdx-js/mdx/node_modules/semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "bin": { "semver": "bin/semver" } }, "node_modules/@mdx-js/mdx/node_modules/source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/@mdx-js/mdx/node_modules/unified": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", "dependencies": { "bail": "^1.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", "is-plain-obj": "^2.0.0", "trough": "^1.0.0", "vfile": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/@mdx-js/react": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" }, "peerDependencies": { "react": "^16.13.1 || ^17.0.0" } }, "node_modules/@mdx-js/util": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz", "integrity": "sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" }, "engines": { "node": ">= 8" } }, "node_modules/@nodelib/fs.stat": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "engines": { "node": ">= 8" } }, "node_modules/@nodelib/fs.walk": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" }, "engines": { "node": ">= 8" } }, "node_modules/@polka/url": { "version": "1.0.0-next.21", "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz", "integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==" }, "node_modules/@sideway/address": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==", "dependencies": { "@hapi/hoek": "^9.0.0" } }, "node_modules/@sideway/formula": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz", "integrity": "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" }, "node_modules/@sideway/pinpoint": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" }, "node_modules/@sindresorhus/is": { "version": "0.14.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", "engines": { "node": ">=6" } }, "node_modules/@slorber/static-site-generator-webpack-plugin": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz", "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==", "dependencies": { "eval": "^0.1.8", "p-map": "^4.0.0", "webpack-sources": "^3.2.2" }, "engines": { "node": ">=14" } }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz", "integrity": "sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-remove-jsx-attribute": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz", "integrity": "sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz", "integrity": "sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz", "integrity": "sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-svg-dynamic-title": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz", "integrity": "sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-svg-em-dimensions": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz", "integrity": "sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-transform-react-native-svg": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz", "integrity": "sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-plugin-transform-svg-component": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz", "integrity": "sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/babel-preset": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-5.5.0.tgz", "integrity": "sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig==", "dependencies": { "@svgr/babel-plugin-add-jsx-attribute": "^5.4.0", "@svgr/babel-plugin-remove-jsx-attribute": "^5.4.0", "@svgr/babel-plugin-remove-jsx-empty-expression": "^5.0.1", "@svgr/babel-plugin-replace-jsx-attribute-value": "^5.0.1", "@svgr/babel-plugin-svg-dynamic-title": "^5.4.0", "@svgr/babel-plugin-svg-em-dimensions": "^5.4.0", "@svgr/babel-plugin-transform-react-native-svg": "^5.4.0", "@svgr/babel-plugin-transform-svg-component": "^5.5.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/core": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/core/-/core-5.5.0.tgz", "integrity": "sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ==", "dependencies": { "@svgr/plugin-jsx": "^5.5.0", "camelcase": "^6.2.0", "cosmiconfig": "^7.0.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/hast-util-to-babel-ast": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz", "integrity": "sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ==", "dependencies": { "@babel/types": "^7.12.6" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/plugin-jsx": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz", "integrity": "sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA==", "dependencies": { "@babel/core": "^7.12.3", "@svgr/babel-preset": "^5.5.0", "@svgr/hast-util-to-babel-ast": "^5.5.0", "svg-parser": "^2.0.2" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/plugin-svgo": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz", "integrity": "sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ==", "dependencies": { "cosmiconfig": "^7.0.0", "deepmerge": "^4.2.2", "svgo": "^1.2.2" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/plugin-svgo/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" }, "engines": { "node": ">=4" } }, "node_modules/@svgr/plugin-svgo/node_modules/css-select": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz", "integrity": "sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==", "dependencies": { "boolbase": "^1.0.0", "css-what": "^3.2.1", "domutils": "^1.7.0", "nth-check": "^1.0.2" } }, "node_modules/@svgr/plugin-svgo/node_modules/css-tree": { "version": "1.0.0-alpha.37", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz", "integrity": "sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==", "dependencies": { "mdn-data": "2.0.4", "source-map": "^0.6.1" }, "engines": { "node": ">=8.0.0" } }, "node_modules/@svgr/plugin-svgo/node_modules/css-what": { "version": "3.4.2", "resolved": "https://registry.npmjs.org/css-what/-/css-what-3.4.2.tgz", "integrity": "sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==", "engines": { "node": ">= 6" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, "node_modules/@svgr/plugin-svgo/node_modules/dom-serializer": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz", "integrity": "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==", "dependencies": { "domelementtype": "^2.0.1", "entities": "^2.0.0" } }, "node_modules/@svgr/plugin-svgo/node_modules/dom-serializer/node_modules/domelementtype": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.2.0.tgz", "integrity": "sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/@svgr/plugin-svgo/node_modules/domutils": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz", "integrity": "sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==", "dependencies": { "dom-serializer": "0", "domelementtype": "1" } }, "node_modules/@svgr/plugin-svgo/node_modules/mdn-data": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz", "integrity": "sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==" }, "node_modules/@svgr/plugin-svgo/node_modules/svgo": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz", "integrity": "sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==", "deprecated": "This SVGO version is no longer supported. Upgrade to v2.x.x.", "dependencies": { "chalk": "^2.4.1", "coa": "^2.0.2", "css-select": "^2.0.0", "css-select-base-adapter": "^0.1.1", "css-tree": "1.0.0-alpha.37", "csso": "^4.0.2", "js-yaml": "^3.13.1", "mkdirp": "~0.5.1", "object.values": "^1.1.0", "sax": "~1.2.4", "stable": "^0.1.8", "unquote": "~1.1.1", "util.promisify": "~1.0.0" }, "bin": { "svgo": "bin/svgo" }, "engines": { "node": ">=4.0.0" } }, "node_modules/@svgr/webpack": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-5.5.0.tgz", "integrity": "sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g==", "dependencies": { "@babel/core": "^7.12.3", "@babel/plugin-transform-react-constant-elements": "^7.12.1", "@babel/preset-env": "^7.12.1", "@babel/preset-react": "^7.12.5", "@svgr/core": "^5.5.0", "@svgr/plugin-jsx": "^5.5.0", "@svgr/plugin-svgo": "^5.5.0", "loader-utils": "^2.0.0" }, "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@szmarczak/http-timer": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", "dependencies": { "defer-to-connect": "^1.0.1" }, "engines": { "node": ">=6" } }, "node_modules/@trysound/sax": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", "engines": { "node": ">=10.13.0" } }, "node_modules/@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "node_modules/@types/bonjour": { "version": "3.5.10", "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz", "integrity": "sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/connect": { "version": "3.4.35", "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/connect-history-api-fallback": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz", "integrity": "sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw==", "dependencies": { "@types/express-serve-static-core": "*", "@types/node": "*" } }, "node_modules/@types/eslint": { "version": "8.4.1", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==", "dependencies": { "@types/estree": "*", "@types/json-schema": "*" } }, "node_modules/@types/eslint-scope": { "version": "3.7.3", "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", "dependencies": { "@types/eslint": "*", "@types/estree": "*" } }, "node_modules/@types/estree": { "version": "0.0.51", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" }, "node_modules/@types/express": { "version": "4.17.13", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.18", "@types/qs": "*", "@types/serve-static": "*" } }, "node_modules/@types/express-serve-static-core": { "version": "4.17.29", "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz", "integrity": "sha512-uMd++6dMKS32EOuw1Uli3e3BPgdLIXmezcfHv7N4c1s3gkhikBplORPpMq3fuWkxncZN1reb16d5n8yhQ80x7Q==", "dependencies": { "@types/node": "*", "@types/qs": "*", "@types/range-parser": "*" } }, "node_modules/@types/hast": { "version": "2.3.4", "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz", "integrity": "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==", "dependencies": { "@types/unist": "*" } }, "node_modules/@types/history": { "version": "4.7.11", "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz", "integrity": "sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==" }, "node_modules/@types/html-minifier-terser": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==" }, "node_modules/@types/http-proxy": { "version": "1.17.9", "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.9.tgz", "integrity": "sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/json-schema": { "version": "7.0.9", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==" }, "node_modules/@types/mdast": { "version": "3.0.10", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", "dependencies": { "@types/unist": "*" } }, "node_modules/@types/mime": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" }, "node_modules/@types/node": { "version": "17.0.8", "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.8.tgz", "integrity": "sha512-YofkM6fGv4gDJq78g4j0mMuGMkZVxZDgtU0JRdx6FgiJDG+0fY0GKVolOV8WqVmEhLCXkQRjwDdKyPxJp/uucg==" }, "node_modules/@types/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, "node_modules/@types/parse5": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz", "integrity": "sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==" }, "node_modules/@types/prop-types": { "version": "15.7.4", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.4.tgz", "integrity": "sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ==" }, "node_modules/@types/q": { "version": "1.5.5", "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.5.tgz", "integrity": "sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ==" }, "node_modules/@types/qs": { "version": "6.9.7", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, "node_modules/@types/range-parser": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, "node_modules/@types/react": { "version": "17.0.38", "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.38.tgz", "integrity": "sha512-SI92X1IA+FMnP3qM5m4QReluXzhcmovhZnLNm3pyeQlooi02qI7sLiepEYqT678uNiyc25XfCqxREFpy3W7YhQ==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", "csstype": "^3.0.2" } }, "node_modules/@types/react-router": { "version": "5.1.18", "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.18.tgz", "integrity": "sha512-YYknwy0D0iOwKQgz9v8nOzt2J6l4gouBmDnWqUUznltOTaon+r8US8ky8HvN0tXvc38U9m6z/t2RsVsnd1zM0g==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*" } }, "node_modules/@types/react-router-config": { "version": "5.0.6", "resolved": "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz", "integrity": "sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router": "*" } }, "node_modules/@types/react-router-dom": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz", "integrity": "sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router": "*" } }, "node_modules/@types/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz", "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==" }, "node_modules/@types/sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.4.tgz", "integrity": "sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/scheduler": { "version": "0.16.2", "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" }, "node_modules/@types/serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz", "integrity": "sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg==", "dependencies": { "@types/express": "*" } }, "node_modules/@types/serve-static": { "version": "1.13.10", "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", "dependencies": { "@types/mime": "^1", "@types/node": "*" } }, "node_modules/@types/sockjs": { "version": "0.3.33", "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz", "integrity": "sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/unist": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" }, "node_modules/@types/ws": { "version": "8.5.3", "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.3.tgz", "integrity": "sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==", "dependencies": { "@types/node": "*" } }, "node_modules/@webassemblyjs/ast": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", "dependencies": { "@webassemblyjs/helper-numbers": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1" } }, "node_modules/@webassemblyjs/floating-point-hex-parser": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==" }, "node_modules/@webassemblyjs/helper-api-error": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==" }, "node_modules/@webassemblyjs/helper-buffer": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==" }, "node_modules/@webassemblyjs/helper-numbers": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", "dependencies": { "@webassemblyjs/floating-point-hex-parser": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", "@xtuc/long": "4.2.2" } }, "node_modules/@webassemblyjs/helper-wasm-bytecode": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==" }, "node_modules/@webassemblyjs/helper-wasm-section": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/wasm-gen": "1.11.1" } }, "node_modules/@webassemblyjs/ieee754": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "node_modules/@webassemblyjs/leb128": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", "dependencies": { "@xtuc/long": "4.2.2" } }, "node_modules/@webassemblyjs/utf8": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==" }, "node_modules/@webassemblyjs/wasm-edit": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/helper-wasm-section": "1.11.1", "@webassemblyjs/wasm-gen": "1.11.1", "@webassemblyjs/wasm-opt": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1", "@webassemblyjs/wast-printer": "1.11.1" } }, "node_modules/@webassemblyjs/wasm-gen": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/ieee754": "1.11.1", "@webassemblyjs/leb128": "1.11.1", "@webassemblyjs/utf8": "1.11.1" } }, "node_modules/@webassemblyjs/wasm-opt": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", "@webassemblyjs/wasm-gen": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1" } }, "node_modules/@webassemblyjs/wasm-parser": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/ieee754": "1.11.1", "@webassemblyjs/leb128": "1.11.1", "@webassemblyjs/utf8": "1.11.1" } }, "node_modules/@webassemblyjs/wast-printer": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@xtuc/long": "4.2.2" } }, "node_modules/@xtuc/ieee754": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" }, "node_modules/@xtuc/long": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" }, "engines": { "node": ">= 0.6" } }, "node_modules/acorn": { "version": "8.7.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", "bin": { "acorn": "bin/acorn" }, "engines": { "node": ">=0.4.0" } }, "node_modules/acorn-import-assertions": { "version": "1.7.6", "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.7.6.tgz", "integrity": "sha512-FlVvVFA1TX6l3lp8VjDnYYq7R1nyW6x3svAt4nDgrWQ9SBaSh9CnbwgSUTasgfNfOG5HlM1ehugCvM+hjo56LA==", "peerDependencies": { "acorn": "^8" } }, "node_modules/acorn-walk": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", "engines": { "node": ">=0.4.0" } }, "node_modules/address": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/address/-/address-1.1.2.tgz", "integrity": "sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==", "engines": { "node": ">= 0.12.0" } }, "node_modules/aggregate-error": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/ajv-formats": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", "dependencies": { "ajv": "^8.0.0" }, "peerDependencies": { "ajv": "^8.0.0" }, "peerDependenciesMeta": { "ajv": { "optional": true } } }, "node_modules/ajv-formats/node_modules/ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/ajv-formats/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "node_modules/ajv-keywords": { "version": "3.5.2", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "peerDependencies": { "ajv": "^6.9.1" } }, "node_modules/algoliasearch": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.13.1.tgz", "integrity": "sha512-dtHUSE0caWTCE7liE1xaL+19AFf6kWEcyn76uhcitWpntqvicFHXKFoZe5JJcv9whQOTRM6+B8qJz6sFj+rDJA==", "dependencies": { "@algolia/cache-browser-local-storage": "4.13.1", "@algolia/cache-common": "4.13.1", "@algolia/cache-in-memory": "4.13.1", "@algolia/client-account": "4.13.1", "@algolia/client-analytics": "4.13.1", "@algolia/client-common": "4.13.1", "@algolia/client-personalization": "4.13.1", "@algolia/client-search": "4.13.1", "@algolia/logger-common": "4.13.1", "@algolia/logger-console": "4.13.1", "@algolia/requester-browser-xhr": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/requester-node-http": "4.13.1", "@algolia/transporter": "4.13.1" } }, "node_modules/algoliasearch-helper": { "version": "3.10.0", "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.10.0.tgz", "integrity": "sha512-4E4od8qWWDMVvQ3jaRX6Oks/k35ywD011wAA4LbYMMjOtaZV6VWaTjRr4iN2bdaXP2o1BP7SLFMBf3wvnHmd8Q==", "dependencies": { "@algolia/events": "^4.0.1" }, "peerDependencies": { "algoliasearch": ">= 3.1 < 6" } }, "node_modules/ansi-align": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", "dependencies": { "string-width": "^4.1.0" } }, "node_modules/ansi-html-community": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", "engines": [ "node >= 0.8.0" ], "bin": { "ansi-html": "bin/ansi-html" } }, "node_modules/ansi-regex": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, "node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dependencies": { "color-convert": "^1.9.0" }, "engines": { "node": ">=4" } }, "node_modules/anymatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" }, "engines": { "node": ">= 8" } }, "node_modules/arg": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" }, "node_modules/argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/array-flatten": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" }, "node_modules/array-union": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "engines": { "node": ">=8" } }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" }, "node_modules/at-least-node": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", "engines": { "node": ">= 4.0.0" } }, "node_modules/autoprefixer": { "version": "10.4.7", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.7.tgz", "integrity": "sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA==", "funding": [ { "type": "opencollective", "url": "https://opencollective.com/postcss/" }, { "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/autoprefixer" } ], "dependencies": { "browserslist": "^4.20.3", "caniuse-lite": "^1.0.30001335", "fraction.js": "^4.2.0", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", "postcss-value-parser": "^4.2.0" }, "bin": { "autoprefixer": "bin/autoprefixer" }, "engines": { "node": "^10 || ^12 || >=14" }, "peerDependencies": { "postcss": "^8.1.0" } }, "node_modules/axios": { "version": "0.25.0", "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", "dependencies": { "follow-redirects": "^1.14.7" } }, "node_modules/babel-loader": { "version": "8.2.5", "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz", "integrity": "sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==", "dependencies": { "find-cache-dir": "^3.3.1", "loader-utils": "^2.0.0", "make-dir": "^3.1.0", "schema-utils": "^2.6.5" }, "engines": { "node": ">= 8.9" }, "peerDependencies": { "@babel/core": "^7.0.0", "webpack": ">=2" } }, "node_modules/babel-loader/node_modules/schema-utils": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", "dependencies": { "@types/json-schema": "^7.0.5", "ajv": "^6.12.4", "ajv-keywords": "^3.5.2" }, "engines": { "node": ">= 8.9.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/babel-plugin-apply-mdx-type-prop": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz", "integrity": "sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ==", "dependencies": { "@babel/helper-plugin-utils": "7.10.4", "@mdx-js/util": "1.6.22" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" }, "peerDependencies": { "@babel/core": "^7.11.6" } }, "node_modules/babel-plugin-apply-mdx-type-prop/node_modules/@babel/helper-plugin-utils": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "node_modules/babel-plugin-dynamic-import-node": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "dependencies": { "object.assign": "^4.1.0" } }, "node_modules/babel-plugin-extract-import-names": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz", "integrity": "sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ==", "dependencies": { "@babel/helper-plugin-utils": "7.10.4" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/babel-plugin-extract-import-names/node_modules/@babel/helper-plugin-utils": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "node_modules/babel-plugin-polyfill-corejs2": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz", "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==", "dependencies": { "@babel/compat-data": "^7.13.11", "@babel/helper-define-polyfill-provider": "^0.3.1", "semver": "^6.1.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/babel-plugin-polyfill-corejs3": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz", "integrity": "sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==", "dependencies": { "@babel/helper-define-polyfill-provider": "^0.3.1", "core-js-compat": "^3.21.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/babel-plugin-polyfill-regenerator": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz", "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==", "dependencies": { "@babel/helper-define-polyfill-provider": "^0.3.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/bail": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/base16": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==" }, "node_modules/batch": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==" }, "node_modules/big.js": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", "engines": { "node": "*" } }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "engines": { "node": ">=8" } }, "node_modules/body-parser": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz", "integrity": "sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.4", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.10.3", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" }, "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" } }, "node_modules/body-parser/node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "engines": { "node": ">= 0.8" } }, "node_modules/body-parser/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/body-parser/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/bonjour-service": { "version": "1.0.13", "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.0.13.tgz", "integrity": "sha512-LWKRU/7EqDUC9CTAQtuZl5HzBALoCYwtLhffW3et7vZMwv3bWLpJf8bRYlMD5OCcDpTfnPgNCV4yo9ZIaJGMiA==", "dependencies": { "array-flatten": "^2.1.2", "dns-equal": "^1.0.0", "fast-deep-equal": "^3.1.3", "multicast-dns": "^7.2.5" } }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" }, "node_modules/boxen": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.0.1.tgz", "integrity": "sha512-49VBlw+PrWEF51aCmy7QIteYPIFZxSpvqBdP/2itCPPlJ49kj9zg/XPRFrdkne2W+CfwXUls8exMvu1RysZpKA==", "dependencies": { "ansi-align": "^3.0.0", "camelcase": "^6.2.0", "chalk": "^4.1.0", "cli-boxes": "^2.2.1", "string-width": "^4.2.0", "type-fest": "^0.20.2", "widest-line": "^3.1.0", "wrap-ansi": "^7.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "dependencies": { "fill-range": "^7.0.1" }, "engines": { "node": ">=8" } }, "node_modules/browserslist": { "version": "4.20.3", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz", "integrity": "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==", "funding": [ { "type": "opencollective", "url": "https://opencollective.com/browserslist" }, { "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/browserslist" } ], "dependencies": { "caniuse-lite": "^1.0.30001332", "electron-to-chromium": "^1.4.118", "escalade": "^3.1.1", "node-releases": "^2.0.3", "picocolors": "^1.0.0" }, "bin": { "browserslist": "cli.js" }, "engines": { "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, "node_modules/bytes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=", "engines": { "node": ">= 0.8" } }, "node_modules/cacheable-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", "dependencies": { "clone-response": "^1.0.2", "get-stream": "^5.1.0", "http-cache-semantics": "^4.0.0", "keyv": "^3.0.0", "lowercase-keys": "^2.0.0", "normalize-url": "^4.1.0", "responselike": "^1.0.2" }, "engines": { "node": ">=8" } }, "node_modules/cacheable-request/node_modules/get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", "dependencies": { "pump": "^3.0.0" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/cacheable-request/node_modules/lowercase-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", "engines": { "node": ">=8" } }, "node_modules/cacheable-request/node_modules/normalize-url": { "version": "4.5.1", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", "engines": { "node": ">=8" } }, "node_modules/call-bind": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", "dependencies": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "engines": { "node": ">=6" } }, "node_modules/camel-case": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "node_modules/camelcase": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/camelcase-css": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", "engines": { "node": ">= 6" } }, "node_modules/caniuse-api": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", "dependencies": { "browserslist": "^4.0.0", "caniuse-lite": "^1.0.0", "lodash.memoize": "^4.1.2", "lodash.uniq": "^4.5.0" } }, "node_modules/caniuse-lite": { "version": "1.0.30001335", "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001335.tgz", "integrity": "sha512-ddP1Tgm7z2iIxu6QTtbZUv6HJxSaV/PZeSrWFZtbY4JZ69tOeNhBCl3HyRQgeNZKE5AOn1kpV7fhljigy0Ty3w==", "funding": [ { "type": "opencollective", "url": "https://opencollective.com/browserslist" }, { "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/caniuse-lite" } ] }, "node_modules/ccount": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/chalk/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/chalk/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, "engines": { "node": ">=7.0.0" } }, "node_modules/chalk/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/chalk/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/chalk/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/character-entities": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/character-entities-legacy": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/character-reference-invalid": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/cheerio": { "version": "1.0.0-rc.12", "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", "dependencies": { "cheerio-select": "^2.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "domutils": "^3.0.1", "htmlparser2": "^8.0.1", "parse5": "^7.0.0", "parse5-htmlparser2-tree-adapter": "^7.0.0" }, "engines": { "node": ">= 6" }, "funding": { "url": "https://github.com/cheeriojs/cheerio?sponsor=1" } }, "node_modules/cheerio-select": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", "dependencies": { "boolbase": "^1.0.0", "css-select": "^5.1.0", "css-what": "^6.1.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, "node_modules/cheerio-select/node_modules/css-select": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", "dependencies": { "boolbase": "^1.0.0", "css-what": "^6.1.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "nth-check": "^2.0.1" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, "node_modules/cheerio-select/node_modules/dom-serializer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" }, "funding": { "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, "node_modules/cheerio-select/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/cheerio-select/node_modules/domhandler": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "dependencies": { "domelementtype": "^2.3.0" }, "engines": { "node": ">= 4" }, "funding": { "url": "https://github.com/fb55/domhandler?sponsor=1" } }, "node_modules/cheerio-select/node_modules/domutils": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.1" }, "funding": { "url": "https://github.com/fb55/domutils?sponsor=1" } }, "node_modules/cheerio-select/node_modules/entities": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==", "engines": { "node": ">=0.12" }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/cheerio-select/node_modules/nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", "dependencies": { "boolbase": "^1.0.0" }, "funding": { "url": "https://github.com/fb55/nth-check?sponsor=1" } }, "node_modules/cheerio/node_modules/dom-serializer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" }, "funding": { "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, "node_modules/cheerio/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/cheerio/node_modules/domhandler": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "dependencies": { "domelementtype": "^2.3.0" }, "engines": { "node": ">= 4" }, "funding": { "url": "https://github.com/fb55/domhandler?sponsor=1" } }, "node_modules/cheerio/node_modules/domutils": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.1" }, "funding": { "url": "https://github.com/fb55/domutils?sponsor=1" } }, "node_modules/cheerio/node_modules/entities": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==", "engines": { "node": ">=0.12" }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/cheerio/node_modules/htmlparser2": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", "funding": [ "https://github.com/fb55/htmlparser2?sponsor=1", { "type": "github", "url": "https://github.com/sponsors/fb55" } ], "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "entities": "^4.3.0" } }, "node_modules/chokidar": { "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "funding": [ { "type": "individual", "url": "https://paulmillr.com/funding/" } ], "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "engines": { "node": ">= 8.10.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "node_modules/chrome-trace-event": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", "engines": { "node": ">=6.0" } }, "node_modules/clean-css": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.0.tgz", "integrity": "sha512-YYuuxv4H/iNb1Z/5IbMRoxgrzjWGhOEFfd+groZ5dMCVkpENiMZmwspdrzBo9286JjM1gZJPAyL7ZIdzuvu2AQ==", "dependencies": { "source-map": "~0.6.0" }, "engines": { "node": ">= 10.0" } }, "node_modules/clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", "engines": { "node": ">=6" } }, "node_modules/cli-boxes": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", "engines": { "node": ">=6" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/cli-table3": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.2.tgz", "integrity": "sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw==", "dependencies": { "string-width": "^4.2.0" }, "engines": { "node": "10.* || >= 12.*" }, "optionalDependencies": { "@colors/colors": "1.5.0" } }, "node_modules/clone-deep": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", "dependencies": { "is-plain-object": "^2.0.4", "kind-of": "^6.0.2", "shallow-clone": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/clone-response": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", "dependencies": { "mimic-response": "^1.0.0" } }, "node_modules/clsx": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", "engines": { "node": ">=6" } }, "node_modules/coa": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/coa/-/coa-2.0.2.tgz", "integrity": "sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==", "dependencies": { "@types/q": "^1.5.1", "chalk": "^2.4.1", "q": "^1.1.2" }, "engines": { "node": ">= 4.0" } }, "node_modules/coa/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" }, "engines": { "node": ">=4" } }, "node_modules/collapse-white-space": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dependencies": { "color-name": "1.1.3" } }, "node_modules/color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" }, "node_modules/colord": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.2.tgz", "integrity": "sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ==" }, "node_modules/colorette": { "version": "2.0.19", "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==" }, "node_modules/combine-promises": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/combine-promises/-/combine-promises-1.1.0.tgz", "integrity": "sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg==", "engines": { "node": ">=10" } }, "node_modules/comma-separated-tokens": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/commander": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", "engines": { "node": ">= 6" } }, "node_modules/commondir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" }, "node_modules/compressible": { "version": "2.0.18", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", "dependencies": { "mime-db": ">= 1.43.0 < 2" }, "engines": { "node": ">= 0.6" } }, "node_modules/compression": { "version": "1.7.4", "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", "dependencies": { "accepts": "~1.3.5", "bytes": "3.0.0", "compressible": "~2.0.16", "debug": "2.6.9", "on-headers": "~1.0.2", "safe-buffer": "5.1.2", "vary": "~1.1.2" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/compression/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/compression/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "node_modules/configstore": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", "dependencies": { "dot-prop": "^5.2.0", "graceful-fs": "^4.1.2", "make-dir": "^3.0.0", "unique-string": "^2.0.0", "write-file-atomic": "^3.0.0", "xdg-basedir": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/connect-history-api-fallback": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==", "engines": { "node": ">=0.8" } }, "node_modules/consola": { "version": "2.15.3", "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "dependencies": { "safe-buffer": "5.2.1" }, "engines": { "node": ">= 0.6" } }, "node_modules/content-disposition/node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/content-type": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", "engines": { "node": ">= 0.6" } }, "node_modules/convert-source-map": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", "dependencies": { "safe-buffer": "~5.1.1" } }, "node_modules/cookie": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", "engines": { "node": ">= 0.6" } }, "node_modules/cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "node_modules/copy-text-to-clipboard": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz", "integrity": "sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q==", "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/copy-webpack-plugin": { "version": "11.0.0", "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz", "integrity": "sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==", "dependencies": { "fast-glob": "^3.2.11", "glob-parent": "^6.0.1", "globby": "^13.1.1", "normalize-path": "^3.0.0", "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0" }, "engines": { "node": ">= 14.15.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^5.1.0" } }, "node_modules/copy-webpack-plugin/node_modules/ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/copy-webpack-plugin/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, "peerDependencies": { "ajv": "^8.8.2" } }, "node_modules/copy-webpack-plugin/node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dependencies": { "is-glob": "^4.0.3" }, "engines": { "node": ">=10.13.0" } }, "node_modules/copy-webpack-plugin/node_modules/globby": { "version": "13.1.2", "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.2.tgz", "integrity": "sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==", "dependencies": { "dir-glob": "^3.0.1", "fast-glob": "^3.2.11", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^4.0.0" }, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/copy-webpack-plugin/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "node_modules/copy-webpack-plugin/node_modules/schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/copy-webpack-plugin/node_modules/slash": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/core-js": { "version": "3.23.5", "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.23.5.tgz", "integrity": "sha512-7Vh11tujtAZy82da4duVreQysIoO2EvVrur7y6IzZkH1IHPSekuDi8Vuw1+YKjkbfWLRD7Nc9ICQ/sIUDutcyg==", "hasInstallScript": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, "node_modules/core-js-compat": { "version": "3.22.3", "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.22.3.tgz", "integrity": "sha512-wliMbvPI2idgFWpFe7UEyHMvu6HWgW8WA+HnDRtgzoSDYvXFMpoGX1H3tPDDXrcfUSyXafCLDd7hOeMQHEZxGw==", "dependencies": { "browserslist": "^4.20.3", "semver": "7.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, "node_modules/core-js-compat/node_modules/semver": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", "bin": { "semver": "bin/semver.js" } }, "node_modules/core-js-pure": { "version": "3.23.5", "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.23.5.tgz", "integrity": "sha512-8t78LdpKSuCq4pJYCYk8hl7XEkAX+BP16yRIwL3AanTksxuEf7CM83vRyctmiEL8NDZ3jpUcv56fk9/zG3aIuw==", "hasInstallScript": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, "node_modules/cosmiconfig": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", "dependencies": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.10.0" }, "engines": { "node": ">=10" } }, "node_modules/cross-fetch": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz", "integrity": "sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==", "dependencies": { "node-fetch": "2.6.7" } }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" }, "engines": { "node": ">= 8" } }, "node_modules/crypto-random-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", "engines": { "node": ">=8" } }, "node_modules/css-declaration-sorter": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.3.0.tgz", "integrity": "sha512-OGT677UGHJTAVMRhPO+HJ4oKln3wkBTwtDFH0ojbqm+MJm6xuDMHp2nkhh/ThaBqq20IbraBQSWKfSLNHQO9Og==", "engines": { "node": "^10 || ^12 || >=14" }, "peerDependencies": { "postcss": "^8.0.9" } }, "node_modules/css-loader": { "version": "6.7.1", "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz", "integrity": "sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==", "dependencies": { "icss-utils": "^5.1.0", "postcss": "^8.4.7", "postcss-modules-extract-imports": "^3.0.0", "postcss-modules-local-by-default": "^4.0.0", "postcss-modules-scope": "^3.0.0", "postcss-modules-values": "^4.0.0", "postcss-value-parser": "^4.2.0", "semver": "^7.3.5" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^5.0.0" } }, "node_modules/css-minimizer-webpack-plugin": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.0.0.tgz", "integrity": "sha512-7ZXXRzRHvofv3Uac5Y+RkWRNo0ZMlcg8e9/OtrqUYmwDWJo+qs67GvdeFrXLsFb7czKNwjQhPkM0avlIYl+1nA==", "dependencies": { "cssnano": "^5.1.8", "jest-worker": "^27.5.1", "postcss": "^8.4.13", "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0", "source-map": "^0.6.1" }, "engines": { "node": ">= 14.15.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^5.0.0" }, "peerDependenciesMeta": { "@parcel/css": { "optional": true }, "clean-css": { "optional": true }, "csso": { "optional": true }, "esbuild": { "optional": true } } }, "node_modules/css-minimizer-webpack-plugin/node_modules/ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/css-minimizer-webpack-plugin/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, "peerDependencies": { "ajv": "^8.8.2" } }, "node_modules/css-minimizer-webpack-plugin/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "node_modules/css-minimizer-webpack-plugin/node_modules/schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/css-select": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", "dependencies": { "boolbase": "^1.0.0", "css-what": "^6.0.1", "domhandler": "^4.3.1", "domutils": "^2.8.0", "nth-check": "^2.0.1" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, "node_modules/css-select-base-adapter": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz", "integrity": "sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==" }, "node_modules/css-select/node_modules/nth-check": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", "dependencies": { "boolbase": "^1.0.0" }, "funding": { "url": "https://github.com/fb55/nth-check?sponsor=1" } }, "node_modules/css-tree": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", "dependencies": { "mdn-data": "2.0.14", "source-map": "^0.6.1" }, "engines": { "node": ">=8.0.0" } }, "node_modules/css-what": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", "engines": { "node": ">= 6" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, "node_modules/cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", "bin": { "cssesc": "bin/cssesc" }, "engines": { "node": ">=4" } }, "node_modules/cssnano": { "version": "5.1.12", "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.12.tgz", "integrity": "sha512-TgvArbEZu0lk/dvg2ja+B7kYoD7BBCmn3+k58xD0qjrGHsFzXY/wKTo9M5egcUCabPol05e/PVoIu79s2JN4WQ==", "dependencies": { "cssnano-preset-default": "^5.2.12", "lilconfig": "^2.0.3", "yaml": "^1.10.2" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/cssnano" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/cssnano-preset-advanced": { "version": "5.3.8", "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.8.tgz", "integrity": "sha512-xUlLLnEB1LjpEik+zgRNlk8Y/koBPPtONZjp7JKbXigeAmCrFvq9H0pXW5jJV45bQWAlmJ0sKy+IMr0XxLYQZg==", "dependencies": { "autoprefixer": "^10.3.7", "cssnano-preset-default": "^5.2.12", "postcss-discard-unused": "^5.1.0", "postcss-merge-idents": "^5.1.1", "postcss-reduce-idents": "^5.2.0", "postcss-zindex": "^5.1.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/cssnano-preset-default": { "version": "5.2.12", "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz", "integrity": "sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew==", "dependencies": { "css-declaration-sorter": "^6.3.0", "cssnano-utils": "^3.1.0", "postcss-calc": "^8.2.3", "postcss-colormin": "^5.3.0", "postcss-convert-values": "^5.1.2", "postcss-discard-comments": "^5.1.2", "postcss-discard-duplicates": "^5.1.0", "postcss-discard-empty": "^5.1.1", "postcss-discard-overridden": "^5.1.0", "postcss-merge-longhand": "^5.1.6", "postcss-merge-rules": "^5.1.2", "postcss-minify-font-values": "^5.1.0", "postcss-minify-gradients": "^5.1.1", "postcss-minify-params": "^5.1.3", "postcss-minify-selectors": "^5.2.1", "postcss-normalize-charset": "^5.1.0", "postcss-normalize-display-values": "^5.1.0", "postcss-normalize-positions": "^5.1.1", "postcss-normalize-repeat-style": "^5.1.1", "postcss-normalize-string": "^5.1.0", "postcss-normalize-timing-functions": "^5.1.0", "postcss-normalize-unicode": "^5.1.0", "postcss-normalize-url": "^5.1.0", "postcss-normalize-whitespace": "^5.1.1", "postcss-ordered-values": "^5.1.3", "postcss-reduce-initial": "^5.1.0", "postcss-reduce-transforms": "^5.1.0", "postcss-svgo": "^5.1.0", "postcss-unique-selectors": "^5.1.1" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/cssnano-utils": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz", "integrity": "sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/csso": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "dependencies": { "css-tree": "^1.1.2" }, "engines": { "node": ">=8.0.0" } }, "node_modules/csstype": { "version": "3.0.10", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.10.tgz", "integrity": "sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA==" }, "node_modules/debug": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", "dependencies": { "ms": "2.1.2" }, "engines": { "node": ">=6.0" }, "peerDependenciesMeta": { "supports-color": { "optional": true } } }, "node_modules/decompress-response": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", "dependencies": { "mimic-response": "^1.0.0" }, "engines": { "node": ">=4" } }, "node_modules/deep-extend": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", "engines": { "node": ">=4.0.0" } }, "node_modules/deepmerge": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", "engines": { "node": ">=0.10.0" } }, "node_modules/default-gateway": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", "dependencies": { "execa": "^5.0.0" }, "engines": { "node": ">= 10" } }, "node_modules/defer-to-connect": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==" }, "node_modules/define-lazy-prop": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "engines": { "node": ">=8" } }, "node_modules/define-properties": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", "dependencies": { "object-keys": "^1.0.12" }, "engines": { "node": ">= 0.4" } }, "node_modules/del": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "dependencies": { "globby": "^11.0.1", "graceful-fs": "^4.2.4", "is-glob": "^4.0.1", "is-path-cwd": "^2.2.0", "is-path-inside": "^3.0.2", "p-map": "^4.0.0", "rimraf": "^3.0.2", "slash": "^3.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", "engines": { "node": ">= 0.8" } }, "node_modules/destroy": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" } }, "node_modules/detab": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz", "integrity": "sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g==", "dependencies": { "repeat-string": "^1.5.4" }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/detect-node": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" }, "node_modules/detect-port": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.3.0.tgz", "integrity": "sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ==", "dependencies": { "address": "^1.0.1", "debug": "^2.6.0" }, "bin": { "detect": "bin/detect-port", "detect-port": "bin/detect-port" }, "engines": { "node": ">= 4.2.1" } }, "node_modules/detect-port-alt": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", "dependencies": { "address": "^1.0.1", "debug": "^2.6.0" }, "bin": { "detect": "bin/detect-port", "detect-port": "bin/detect-port" }, "engines": { "node": ">= 4.2.1" } }, "node_modules/detect-port-alt/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/detect-port-alt/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/detect-port/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/detect-port/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "dependencies": { "path-type": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/dns-equal": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", "integrity": "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" }, "node_modules/dns-packet": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.4.0.tgz", "integrity": "sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g==", "dependencies": { "@leichtgewicht/ip-codec": "^2.0.1" }, "engines": { "node": ">=6" } }, "node_modules/docusaurus-gtm-plugin": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/docusaurus-gtm-plugin/-/docusaurus-gtm-plugin-0.0.2.tgz", "integrity": "sha512-Xx/df0Ppd5SultlzUj9qlQk2lX9mNVfTb41juyBUPZ1Nc/5dNx+uN0VuLyF4JEObkDRrUY1EFo9fEUDo8I6QOQ==" }, "node_modules/dom-converter": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", "dependencies": { "utila": "~0.4" } }, "node_modules/dom-serializer": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", "dependencies": { "domelementtype": "^2.0.1", "domhandler": "^4.2.0", "entities": "^2.0.0" }, "funding": { "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, "node_modules/dom-serializer/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/domelementtype": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz", "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==" }, "node_modules/domhandler": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", "dependencies": { "domelementtype": "^2.2.0" }, "engines": { "node": ">= 4" }, "funding": { "url": "https://github.com/fb55/domhandler?sponsor=1" } }, "node_modules/domhandler/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/domutils": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", "dependencies": { "dom-serializer": "^1.0.1", "domelementtype": "^2.2.0", "domhandler": "^4.2.0" }, "funding": { "url": "https://github.com/fb55/domutils?sponsor=1" } }, "node_modules/domutils/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/dot-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "node_modules/dot-prop": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "dependencies": { "is-obj": "^2.0.0" }, "engines": { "node": ">=8" } }, "node_modules/duplexer": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" }, "node_modules/duplexer3": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=" }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { "version": "1.4.129", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.129.tgz", "integrity": "sha512-GgtN6bsDtHdtXJtlMYZWGB/uOyjZWjmRDumXTas7dGBaB9zUyCjzHet1DY2KhyHN8R0GLbzZWqm4efeddqqyRQ==" }, "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "node_modules/emojis-list": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", "engines": { "node": ">= 4" } }, "node_modules/emoticon": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz", "integrity": "sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", "engines": { "node": ">= 0.8" } }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dependencies": { "once": "^1.4.0" } }, "node_modules/enhanced-resolve": { "version": "5.9.3", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz", "integrity": "sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow==", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" }, "engines": { "node": ">=10.13.0" } }, "node_modules/entities": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", "dependencies": { "is-arrayish": "^0.2.1" } }, "node_modules/es-abstract": { "version": "1.18.5", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.5.tgz", "integrity": "sha512-DDggyJLoS91CkJjgauM5c0yZMjiD1uK3KcaCeAmffGwZ+ODWzOkPN4QwRbsK5DOFf06fywmyLci3ZD8jLGhVYA==", "dependencies": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "get-intrinsic": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.2", "internal-slot": "^1.0.3", "is-callable": "^1.2.3", "is-negative-zero": "^2.0.1", "is-regex": "^1.1.3", "is-string": "^1.0.6", "object-inspect": "^1.11.0", "object-keys": "^1.1.1", "object.assign": "^4.1.2", "string.prototype.trimend": "^1.0.4", "string.prototype.trimstart": "^1.0.4", "unbox-primitive": "^1.0.1" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/es-module-lexer": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==" }, "node_modules/es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "dependencies": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", "is-symbol": "^1.0.2" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", "engines": { "node": ">=6" } }, "node_modules/escape-goat": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==", "engines": { "node": ">=8" } }, "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" }, "node_modules/escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "engines": { "node": ">=0.8.0" } }, "node_modules/eslint-scope": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" }, "engines": { "node": ">=8.0.0" } }, "node_modules/esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" }, "engines": { "node": ">=4" } }, "node_modules/esrecurse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dependencies": { "estraverse": "^5.2.0" }, "engines": { "node": ">=4.0" } }, "node_modules/esrecurse/node_modules/estraverse": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", "engines": { "node": ">=4.0" } }, "node_modules/estraverse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "engines": { "node": ">=4.0" } }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "engines": { "node": ">=0.10.0" } }, "node_modules/eta": { "version": "1.12.3", "resolved": "https://registry.npmjs.org/eta/-/eta-1.12.3.tgz", "integrity": "sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg==", "engines": { "node": ">=6.0.0" }, "funding": { "url": "https://github.com/eta-dev/eta?sponsor=1" } }, "node_modules/etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", "engines": { "node": ">= 0.6" } }, "node_modules/eval": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", "dependencies": { "@types/node": "*", "require-like": ">= 0.1.1" }, "engines": { "node": ">= 0.8" } }, "node_modules/eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, "node_modules/events": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", "engines": { "node": ">=0.8.x" } }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, "node_modules/execa/node_modules/get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/express": { "version": "4.18.1", "resolved": "https://registry.npmjs.org/express/-/express-4.18.1.tgz", "integrity": "sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.0", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", "qs": "6.10.3", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", "serve-static": "1.15.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" }, "engines": { "node": ">= 0.10.0" } }, "node_modules/express/node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "node_modules/express/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/express/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/express/node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "node_modules/extend-shallow": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", "dependencies": { "is-extendable": "^0.1.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "node_modules/fast-glob": { "version": "3.2.11", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" }, "engines": { "node": ">=8.6.0" } }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "node_modules/fast-url-parser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", "integrity": "sha1-9K8+qfNNiicc9YrSs3WfQx8LMY0=", "dependencies": { "punycode": "^1.3.2" } }, "node_modules/fast-url-parser/node_modules/punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" }, "node_modules/fastq": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "dependencies": { "reusify": "^1.0.4" } }, "node_modules/faye-websocket": { "version": "0.11.4", "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", "dependencies": { "websocket-driver": ">=0.5.1" }, "engines": { "node": ">=0.8.0" } }, "node_modules/fbemitter": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz", "integrity": "sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==", "dependencies": { "fbjs": "^3.0.0" } }, "node_modules/fbjs": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz", "integrity": "sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ==", "dependencies": { "cross-fetch": "^3.1.5", "fbjs-css-vars": "^1.0.0", "loose-envify": "^1.0.0", "object-assign": "^4.1.0", "promise": "^7.1.1", "setimmediate": "^1.0.5", "ua-parser-js": "^0.7.30" } }, "node_modules/fbjs-css-vars": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==" }, "node_modules/feed": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", "dependencies": { "xml-js": "^1.6.11" }, "engines": { "node": ">=0.4.0" } }, "node_modules/file-loader": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", "dependencies": { "loader-utils": "^2.0.0", "schema-utils": "^3.0.0" }, "engines": { "node": ">= 10.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^4.0.0 || ^5.0.0" } }, "node_modules/filesize": { "version": "8.0.7", "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz", "integrity": "sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==", "engines": { "node": ">= 0.4.0" } }, "node_modules/fill-range": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "dependencies": { "to-regex-range": "^5.0.1" }, "engines": { "node": ">=8" } }, "node_modules/finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, "node_modules/finalhandler/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/finalhandler/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/find-cache-dir": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", "pkg-dir": "^4.1.0" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/avajs/find-cache-dir?sponsor=1" } }, "node_modules/find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/flux": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/flux/-/flux-4.0.3.tgz", "integrity": "sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw==", "dependencies": { "fbemitter": "^3.0.0", "fbjs": "^3.0.1" }, "peerDependencies": { "react": "^15.0.2 || ^16.0.0 || ^17.0.0" } }, "node_modules/follow-redirects": { "version": "1.14.9", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz", "integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==", "funding": [ { "type": "individual", "url": "https://github.com/sponsors/RubenVerborgh" } ], "engines": { "node": ">=4.0" }, "peerDependenciesMeta": { "debug": { "optional": true } } }, "node_modules/fork-ts-checker-webpack-plugin": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz", "integrity": "sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA==", "dependencies": { "@babel/code-frame": "^7.8.3", "@types/json-schema": "^7.0.5", "chalk": "^4.1.0", "chokidar": "^3.4.2", "cosmiconfig": "^6.0.0", "deepmerge": "^4.2.2", "fs-extra": "^9.0.0", "glob": "^7.1.6", "memfs": "^3.1.2", "minimatch": "^3.0.4", "schema-utils": "2.7.0", "semver": "^7.3.2", "tapable": "^1.0.0" }, "engines": { "node": ">=10", "yarn": ">=1.0.0" }, "peerDependencies": { "eslint": ">= 6", "typescript": ">= 2.7", "vue-template-compiler": "*", "webpack": ">= 4" }, "peerDependenciesMeta": { "eslint": { "optional": true }, "vue-template-compiler": { "optional": true } } }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/cosmiconfig": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", "dependencies": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.1.0", "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.7.2" }, "engines": { "node": ">=8" } }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", "dependencies": { "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" }, "engines": { "node": ">=10" } }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/schema-utils": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", "integrity": "sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==", "dependencies": { "@types/json-schema": "^7.0.4", "ajv": "^6.12.2", "ajv-keywords": "^3.4.1" }, "engines": { "node": ">= 8.9.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/tapable": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==", "engines": { "node": ">=6" } }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", "engines": { "node": ">= 0.6" } }, "node_modules/fraction.js": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", "engines": { "node": "*" }, "funding": { "type": "patreon", "url": "https://www.patreon.com/infusion" } }, "node_modules/fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", "engines": { "node": ">= 0.6" } }, "node_modules/fs-extra": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" }, "engines": { "node": ">=12" } }, "node_modules/fs-monkey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz", "integrity": "sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==" }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "node_modules/fsevents": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", "hasInstallScript": true, "optional": true, "os": [ "darwin" ], "engines": { "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, "node_modules/function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "engines": { "node": ">=6.9.0" } }, "node_modules/get-intrinsic": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", "dependencies": { "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/get-own-enumerable-property-symbols": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" }, "node_modules/get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "dependencies": { "pump": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/github-slugger": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz", "integrity": "sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==" }, "node_modules/glob": { "version": "7.1.7", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.0.4", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, "engines": { "node": "*" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dependencies": { "is-glob": "^4.0.1" }, "engines": { "node": ">= 6" } }, "node_modules/glob-to-regexp": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" }, "node_modules/global-dirs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", "dependencies": { "ini": "2.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/global-dirs/node_modules/ini": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", "engines": { "node": ">=10" } }, "node_modules/global-modules": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", "dependencies": { "global-prefix": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/global-prefix": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", "dependencies": { "ini": "^1.3.5", "kind-of": "^6.0.2", "which": "^1.3.1" }, "engines": { "node": ">=6" } }, "node_modules/global-prefix/node_modules/which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "dependencies": { "isexe": "^2.0.0" }, "bin": { "which": "bin/which" } }, "node_modules/globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "engines": { "node": ">=4" } }, "node_modules/globby": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", "fast-glob": "^3.2.9", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^3.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/got": { "version": "9.6.0", "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", "dependencies": { "@sindresorhus/is": "^0.14.0", "@szmarczak/http-timer": "^1.1.2", "cacheable-request": "^6.0.0", "decompress-response": "^3.3.0", "duplexer3": "^0.1.4", "get-stream": "^4.1.0", "lowercase-keys": "^1.0.1", "mimic-response": "^1.0.1", "p-cancelable": "^1.0.0", "to-readable-stream": "^1.0.0", "url-parse-lax": "^3.0.0" }, "engines": { "node": ">=8.6" } }, "node_modules/graceful-fs": { "version": "4.2.9", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==" }, "node_modules/gray-matter": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", "dependencies": { "js-yaml": "^3.13.1", "kind-of": "^6.0.2", "section-matter": "^1.0.0", "strip-bom-string": "^1.0.0" }, "engines": { "node": ">=6.0" } }, "node_modules/gzip-size": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", "dependencies": { "duplexer": "^0.1.2" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/handle-thing": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", "dependencies": { "function-bind": "^1.1.1" }, "engines": { "node": ">= 0.4.0" } }, "node_modules/has-bigints": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", "engines": { "node": ">=4" } }, "node_modules/has-symbols": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-tostringtag": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", "dependencies": { "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-yarn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==", "engines": { "node": ">=8" } }, "node_modules/hast-to-hyperscript": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", "integrity": "sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==", "dependencies": { "@types/unist": "^2.0.3", "comma-separated-tokens": "^1.0.0", "property-information": "^5.3.0", "space-separated-tokens": "^1.0.0", "style-to-object": "^0.3.0", "unist-util-is": "^4.0.0", "web-namespaces": "^1.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hast-util-from-parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz", "integrity": "sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==", "dependencies": { "@types/parse5": "^5.0.0", "hastscript": "^6.0.0", "property-information": "^5.0.0", "vfile": "^4.0.0", "vfile-location": "^3.2.0", "web-namespaces": "^1.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hast-util-parse-selector": { "version": "2.2.5", "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hast-util-raw": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz", "integrity": "sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig==", "dependencies": { "@types/hast": "^2.0.0", "hast-util-from-parse5": "^6.0.0", "hast-util-to-parse5": "^6.0.0", "html-void-elements": "^1.0.0", "parse5": "^6.0.0", "unist-util-position": "^3.0.0", "vfile": "^4.0.0", "web-namespaces": "^1.0.0", "xtend": "^4.0.0", "zwitch": "^1.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hast-util-raw/node_modules/parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" }, "node_modules/hast-util-to-parse5": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", "integrity": "sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==", "dependencies": { "hast-to-hyperscript": "^9.0.0", "property-information": "^5.0.0", "web-namespaces": "^1.0.0", "xtend": "^4.0.0", "zwitch": "^1.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hastscript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", "dependencies": { "@types/hast": "^2.0.0", "comma-separated-tokens": "^1.0.0", "hast-util-parse-selector": "^2.0.0", "property-information": "^5.0.0", "space-separated-tokens": "^1.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "bin": { "he": "bin/he" } }, "node_modules/history": { "version": "4.10.1", "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", "dependencies": { "@babel/runtime": "^7.1.2", "loose-envify": "^1.2.0", "resolve-pathname": "^3.0.0", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0", "value-equal": "^1.0.1" } }, "node_modules/hoist-non-react-statics": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", "dependencies": { "react-is": "^16.7.0" } }, "node_modules/hpack.js": { "version": "2.1.6", "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", "dependencies": { "inherits": "^2.0.1", "obuf": "^1.0.0", "readable-stream": "^2.0.1", "wbuf": "^1.1.0" } }, "node_modules/hpack.js/node_modules/readable-stream": { "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "node_modules/hpack.js/node_modules/string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dependencies": { "safe-buffer": "~5.1.0" } }, "node_modules/html-entities": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz", "integrity": "sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==" }, "node_modules/html-minifier-terser": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", "dependencies": { "camel-case": "^4.1.2", "clean-css": "^5.2.2", "commander": "^8.3.0", "he": "^1.2.0", "param-case": "^3.0.4", "relateurl": "^0.2.7", "terser": "^5.10.0" }, "bin": { "html-minifier-terser": "cli.js" }, "engines": { "node": ">=12" } }, "node_modules/html-minifier-terser/node_modules/commander": { "version": "8.3.0", "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", "engines": { "node": ">= 12" } }, "node_modules/html-tags": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz", "integrity": "sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg==", "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/html-void-elements": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/html-webpack-plugin": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz", "integrity": "sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw==", "dependencies": { "@types/html-minifier-terser": "^6.0.0", "html-minifier-terser": "^6.0.2", "lodash": "^4.17.21", "pretty-error": "^4.0.0", "tapable": "^2.0.0" }, "engines": { "node": ">=10.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/html-webpack-plugin" }, "peerDependencies": { "webpack": "^5.20.0" } }, "node_modules/htmlparser2": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", "funding": [ "https://github.com/fb55/htmlparser2?sponsor=1", { "type": "github", "url": "https://github.com/sponsors/fb55" } ], "dependencies": { "domelementtype": "^2.0.1", "domhandler": "^4.0.0", "domutils": "^2.5.2", "entities": "^2.0.0" } }, "node_modules/htmlparser2/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/http-cache-semantics": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==" }, "node_modules/http-deceiver": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==" }, "node_modules/http-errors": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" }, "engines": { "node": ">= 0.8" } }, "node_modules/http-parser-js": { "version": "0.5.8", "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" }, "node_modules/http-proxy": { "version": "1.18.1", "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", "dependencies": { "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", "requires-port": "^1.0.0" }, "engines": { "node": ">=8.0.0" } }, "node_modules/http-proxy-middleware": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", "dependencies": { "@types/http-proxy": "^1.17.8", "http-proxy": "^1.18.1", "is-glob": "^4.0.1", "is-plain-obj": "^3.0.0", "micromatch": "^4.0.2" }, "engines": { "node": ">=12.0.0" }, "peerDependencies": { "@types/express": "^4.17.13" }, "peerDependenciesMeta": { "@types/express": { "optional": true } } }, "node_modules/http-proxy-middleware/node_modules/is-plain-obj": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/human-signals": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "engines": { "node": ">=10.17.0" } }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" }, "engines": { "node": ">=0.10.0" } }, "node_modules/icss-utils": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", "engines": { "node": "^10 || ^12 || >= 14" }, "peerDependencies": { "postcss": "^8.1.0" } }, "node_modules/ignore": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", "engines": { "node": ">= 4" } }, "node_modules/image-size": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz", "integrity": "sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg==", "dependencies": { "queue": "6.0.2" }, "bin": { "image-size": "bin/image-size.js" }, "engines": { "node": ">=14.0.0" } }, "node_modules/immer": { "version": "9.0.15", "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.15.tgz", "integrity": "sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/immer" } }, "node_modules/import-fresh": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" }, "engines": { "node": ">=6" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/import-lazy": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=", "engines": { "node": ">=4" } }, "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", "engines": { "node": ">=0.8.19" } }, "node_modules/indent-string": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", "engines": { "node": ">=8" } }, "node_modules/infima": { "version": "0.2.0-alpha.42", "resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.42.tgz", "integrity": "sha512-ift8OXNbQQwtbIt6z16KnSWP7uJ/SysSMFI4F87MNRTicypfl4Pv3E2OGVv6N3nSZFJvA8imYulCBS64iyHYww==", "engines": { "node": ">=12" } }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/ini": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, "node_modules/inline-style-parser": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, "node_modules/internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", "dependencies": { "get-intrinsic": "^1.1.0", "has": "^1.0.3", "side-channel": "^1.0.4" }, "engines": { "node": ">= 0.4" } }, "node_modules/interpret": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", "engines": { "node": ">= 0.10" } }, "node_modules/invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", "dependencies": { "loose-envify": "^1.0.0" } }, "node_modules/ipaddr.js": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz", "integrity": "sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==", "engines": { "node": ">= 10" } }, "node_modules/is-alphabetical": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/is-alphanumerical": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", "dependencies": { "is-alphabetical": "^1.0.0", "is-decimal": "^1.0.0" }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" }, "node_modules/is-bigint": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "dependencies": { "has-bigints": "^1.0.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dependencies": { "binary-extensions": "^2.0.0" }, "engines": { "node": ">=8" } }, "node_modules/is-boolean-object": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-buffer": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ], "engines": { "node": ">=4" } }, "node_modules/is-callable": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-ci": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", "dependencies": { "ci-info": "^2.0.0" }, "bin": { "is-ci": "bin.js" } }, "node_modules/is-ci/node_modules/ci-info": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" }, "node_modules/is-core-module": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.6.0.tgz", "integrity": "sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ==", "dependencies": { "has": "^1.0.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-date-object": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "dependencies": { "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-decimal": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/is-docker": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", "bin": { "is-docker": "cli.js" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-extendable": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dependencies": { "is-extglob": "^2.1.1" }, "engines": { "node": ">=0.10.0" } }, "node_modules/is-hexadecimal": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/is-installed-globally": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", "dependencies": { "global-dirs": "^3.0.0", "is-path-inside": "^3.0.2" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-negative-zero": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==", "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-npm": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "engines": { "node": ">=0.12.0" } }, "node_modules/is-number-object": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz", "integrity": "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==", "dependencies": { "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", "engines": { "node": ">=8" } }, "node_modules/is-path-cwd": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", "engines": { "node": ">=6" } }, "node_modules/is-path-inside": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "engines": { "node": ">=8" } }, "node_modules/is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "engines": { "node": ">=8" } }, "node_modules/is-plain-object": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", "dependencies": { "isobject": "^3.0.1" }, "engines": { "node": ">=0.10.0" } }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-regexp": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-root": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==", "engines": { "node": ">=6" } }, "node_modules/is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-string": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "dependencies": { "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-symbol": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dependencies": { "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" }, "node_modules/is-whitespace-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", "integrity": "sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/is-word-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", "integrity": "sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/is-wsl": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "dependencies": { "is-docker": "^2.0.0" }, "engines": { "node": ">=8" } }, "node_modules/is-yarn-global": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==" }, "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "node_modules/isobject": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", "engines": { "node": ">=0.10.0" } }, "node_modules/jest-worker": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" }, "engines": { "node": ">= 10.13.0" } }, "node_modules/jest-worker/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/jest-worker/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dependencies": { "has-flag": "^4.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/chalk/supports-color?sponsor=1" } }, "node_modules/joi": { "version": "17.6.0", "resolved": "https://registry.npmjs.org/joi/-/joi-17.6.0.tgz", "integrity": "sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==", "dependencies": { "@hapi/hoek": "^9.0.0", "@hapi/topo": "^5.0.0", "@sideway/address": "^4.1.3", "@sideway/formula": "^3.0.0", "@sideway/pinpoint": "^2.0.0" } }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "bin": { "jsesc": "bin/jsesc" }, "engines": { "node": ">=4" } }, "node_modules/json-buffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=" }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, "node_modules/json5": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", "bin": { "json5": "lib/cli.js" }, "engines": { "node": ">=6" } }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "node_modules/keyv": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", "dependencies": { "json-buffer": "3.0.0" } }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", "engines": { "node": ">=0.10.0" } }, "node_modules/kleur": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", "engines": { "node": ">=6" } }, "node_modules/klona": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz", "integrity": "sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==", "engines": { "node": ">= 8" } }, "node_modules/latest-version": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", "dependencies": { "package-json": "^6.3.0" }, "engines": { "node": ">=8" } }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", "engines": { "node": ">=6" } }, "node_modules/lilconfig": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz", "integrity": "sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==", "engines": { "node": ">=10" } }, "node_modules/lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=" }, "node_modules/loader-runner": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz", "integrity": "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==", "engines": { "node": ">=6.11.5" } }, "node_modules/loader-utils": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz", "integrity": "sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==", "dependencies": { "big.js": "^5.2.2", "emojis-list": "^3.0.0", "json5": "^2.1.2" }, "engines": { "node": ">=8.9.0" } }, "node_modules/locate-path": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dependencies": { "p-locate": "^4.1.0" }, "engines": { "node": ">=8" } }, "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "node_modules/lodash.curry": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz", "integrity": "sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==" }, "node_modules/lodash.debounce": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" }, "node_modules/lodash.flow": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz", "integrity": "sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==" }, "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" }, "node_modules/lodash.uniq": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" }, "node_modules/loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "node_modules/lower-case": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", "dependencies": { "tslib": "^2.0.3" } }, "node_modules/lowercase-keys": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", "engines": { "node": ">=0.10.0" } }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dependencies": { "yallist": "^4.0.0" }, "engines": { "node": ">=10" } }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dependencies": { "semver": "^6.0.0" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/make-dir/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/markdown-escapes": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", "integrity": "sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/mdast-squeeze-paragraphs": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz", "integrity": "sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ==", "dependencies": { "unist-util-remove": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/mdast-util-definitions": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz", "integrity": "sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==", "dependencies": { "unist-util-visit": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/mdast-util-to-hast": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", "integrity": "sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==", "dependencies": { "@types/mdast": "^3.0.0", "@types/unist": "^2.0.0", "mdast-util-definitions": "^4.0.0", "mdurl": "^1.0.0", "unist-builder": "^2.0.0", "unist-util-generated": "^1.0.0", "unist-util-position": "^3.0.0", "unist-util-visit": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/mdast-util-to-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/mdn-data": { "version": "2.0.14", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" }, "node_modules/mdurl": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", "engines": { "node": ">= 0.6" } }, "node_modules/memfs": { "version": "3.4.7", "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.4.7.tgz", "integrity": "sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw==", "dependencies": { "fs-monkey": "^1.0.3" }, "engines": { "node": ">= 4.0.0" } }, "node_modules/merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "engines": { "node": ">= 8" } }, "node_modules/methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", "engines": { "node": ">= 0.6" } }, "node_modules/micromatch": { "version": "4.0.5", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" }, "engines": { "node": ">=8.6" } }, "node_modules/mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", "bin": { "mime": "cli.js" }, "engines": { "node": ">=4" } }, "node_modules/mime-db": { "version": "1.51.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { "version": "2.1.34", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "dependencies": { "mime-db": "1.51.0" }, "engines": { "node": ">= 0.6" } }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "engines": { "node": ">=6" } }, "node_modules/mimic-response": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", "engines": { "node": ">=4" } }, "node_modules/mini-create-react-context": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz", "integrity": "sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==", "dependencies": { "@babel/runtime": "^7.12.1", "tiny-warning": "^1.0.3" }, "peerDependencies": { "prop-types": "^15.0.0", "react": "^0.14.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/mini-css-extract-plugin": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz", "integrity": "sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg==", "dependencies": { "schema-utils": "^4.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^5.0.0" } }, "node_modules/mini-css-extract-plugin/node_modules/ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/mini-css-extract-plugin/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, "peerDependencies": { "ajv": "^8.8.2" } }, "node_modules/mini-css-extract-plugin/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "node_modules/mini-css-extract-plugin/node_modules/schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/minimalistic-assert": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" }, "node_modules/minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dependencies": { "brace-expansion": "^1.1.7" }, "engines": { "node": "*" } }, "node_modules/minimist": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "node_modules/mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "dependencies": { "minimist": "^1.2.5" }, "bin": { "mkdirp": "bin/cmd.js" } }, "node_modules/mrmime": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.0.tgz", "integrity": "sha512-a70zx7zFfVO7XpnQ2IX1Myh9yY4UYvfld/dikWRnsXxbyvMcfz+u6UfgNAtH+k2QqtJuzVpv6eLTx1G2+WKZbQ==", "engines": { "node": ">=10" } }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/multicast-dns": { "version": "7.2.5", "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", "dependencies": { "dns-packet": "^5.2.2", "thunky": "^1.0.2" }, "bin": { "multicast-dns": "cli.js" } }, "node_modules/nanoid": { "version": "3.3.4", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==", "bin": { "nanoid": "bin/nanoid.cjs" }, "engines": { "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", "engines": { "node": ">= 0.6" } }, "node_modules/neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, "node_modules/no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "node_modules/node-emoji": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz", "integrity": "sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==", "dependencies": { "lodash": "^4.17.21" } }, "node_modules/node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "dependencies": { "whatwg-url": "^5.0.0" }, "engines": { "node": "4.x || >=6.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "peerDependenciesMeta": { "encoding": { "optional": true } } }, "node_modules/node-forge": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", "engines": { "node": ">= 6.13.0" } }, "node_modules/node-releases": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.4.tgz", "integrity": "sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ==" }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "engines": { "node": ">=0.10.0" } }, "node_modules/normalize-range": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", "engines": { "node": ">=0.10.0" } }, "node_modules/normalize-url": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dependencies": { "path-key": "^3.0.0" }, "engines": { "node": ">=8" } }, "node_modules/nprogress": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz", "integrity": "sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==" }, "node_modules/nth-check": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", "dependencies": { "boolbase": "~1.0.0" } }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", "engines": { "node": ">=0.10.0" } }, "node_modules/object-inspect": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/object-keys": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", "engines": { "node": ">= 0.4" } }, "node_modules/object.assign": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", "dependencies": { "call-bind": "^1.0.0", "define-properties": "^1.1.3", "has-symbols": "^1.0.1", "object-keys": "^1.1.1" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/object.getownpropertydescriptors": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", "integrity": "sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.1.3", "es-abstract": "^1.18.0-next.2" }, "engines": { "node": ">= 0.8" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/object.values": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.1.3", "es-abstract": "^1.18.2" }, "engines": { "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/obuf": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" }, "node_modules/on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "dependencies": { "ee-first": "1.1.1" }, "engines": { "node": ">= 0.8" } }, "node_modules/on-headers": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", "engines": { "node": ">= 0.8" } }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dependencies": { "wrappy": "1" } }, "node_modules/onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dependencies": { "mimic-fn": "^2.1.0" }, "engines": { "node": ">=6" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/open": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", "is-wsl": "^2.2.0" }, "engines": { "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/opener": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==", "bin": { "opener": "bin/opener-bin.js" } }, "node_modules/p-cancelable": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", "engines": { "node": ">=6" } }, "node_modules/p-limit": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dependencies": { "p-try": "^2.0.0" }, "engines": { "node": ">=6" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-locate": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dependencies": { "p-limit": "^2.2.0" }, "engines": { "node": ">=8" } }, "node_modules/p-map": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "dependencies": { "aggregate-error": "^3.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-retry": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", "dependencies": { "@types/retry": "0.12.0", "retry": "^0.13.1" }, "engines": { "node": ">=8" } }, "node_modules/p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "engines": { "node": ">=6" } }, "node_modules/package-json": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", "dependencies": { "got": "^9.6.0", "registry-auth-token": "^4.0.0", "registry-url": "^5.0.0", "semver": "^6.2.0" }, "engines": { "node": ">=8" } }, "node_modules/package-json/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/param-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dependencies": { "callsites": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/parse-entities": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", "dependencies": { "character-entities": "^1.0.0", "character-entities-legacy": "^1.0.0", "character-reference-invalid": "^1.0.0", "is-alphanumerical": "^1.0.0", "is-decimal": "^1.0.0", "is-hexadecimal": "^1.0.0" }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "dependencies": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", "json-parse-even-better-errors": "^2.3.0", "lines-and-columns": "^1.1.6" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/parse-numeric-range": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz", "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==" }, "node_modules/parse5": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.0.0.tgz", "integrity": "sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==", "dependencies": { "entities": "^4.3.0" }, "funding": { "url": "https://github.com/inikulin/parse5?sponsor=1" } }, "node_modules/parse5-htmlparser2-tree-adapter": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", "dependencies": { "domhandler": "^5.0.2", "parse5": "^7.0.0" }, "funding": { "url": "https://github.com/inikulin/parse5?sponsor=1" } }, "node_modules/parse5-htmlparser2-tree-adapter/node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } ] }, "node_modules/parse5-htmlparser2-tree-adapter/node_modules/domhandler": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "dependencies": { "domelementtype": "^2.3.0" }, "engines": { "node": ">= 4" }, "funding": { "url": "https://github.com/fb55/domhandler?sponsor=1" } }, "node_modules/parse5/node_modules/entities": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==", "engines": { "node": ">=0.12" }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", "engines": { "node": ">= 0.8" } }, "node_modules/pascal-case": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "engines": { "node": ">=8" } }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", "engines": { "node": ">=0.10.0" } }, "node_modules/path-is-inside": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=" }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "engines": { "node": ">=8" } }, "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "node_modules/path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "engines": { "node": ">=8" } }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "engines": { "node": ">=8.6" }, "funding": { "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/pkg-dir": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dependencies": { "find-up": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/pkg-up": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", "dependencies": { "find-up": "^3.0.0" }, "engines": { "node": ">=8" } }, "node_modules/pkg-up/node_modules/find-up": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dependencies": { "locate-path": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/pkg-up/node_modules/locate-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dependencies": { "p-locate": "^3.0.0", "path-exists": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/pkg-up/node_modules/p-locate": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "dependencies": { "p-limit": "^2.0.0" }, "engines": { "node": ">=6" } }, "node_modules/pkg-up/node_modules/path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", "engines": { "node": ">=4" } }, "node_modules/postcss": { "version": "8.4.14", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", "funding": [ { "type": "opencollective", "url": "https://opencollective.com/postcss/" }, { "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/postcss" } ], "dependencies": { "nanoid": "^3.3.4", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" }, "engines": { "node": "^10 || ^12 || >=14" } }, "node_modules/postcss-calc": { "version": "8.2.4", "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz", "integrity": "sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==", "dependencies": { "postcss-selector-parser": "^6.0.9", "postcss-value-parser": "^4.2.0" }, "peerDependencies": { "postcss": "^8.2.2" } }, "node_modules/postcss-colormin": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.0.tgz", "integrity": "sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg==", "dependencies": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", "colord": "^2.9.1", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-convert-values": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz", "integrity": "sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g==", "dependencies": { "browserslist": "^4.20.3", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-discard-comments": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz", "integrity": "sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-discard-duplicates": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz", "integrity": "sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-discard-empty": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz", "integrity": "sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-discard-overridden": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz", "integrity": "sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-discard-unused": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz", "integrity": "sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw==", "dependencies": { "postcss-selector-parser": "^6.0.5" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-loader": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.0.1.tgz", "integrity": "sha512-VRviFEyYlLjctSM93gAZtcJJ/iSkPZ79zWbN/1fSH+NisBByEiVLqpdVDrPLVSi8DX0oJo12kL/GppTBdKVXiQ==", "dependencies": { "cosmiconfig": "^7.0.0", "klona": "^2.0.5", "semver": "^7.3.7" }, "engines": { "node": ">= 14.15.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "postcss": "^7.0.0 || ^8.0.1", "webpack": "^5.0.0" } }, "node_modules/postcss-loader/node_modules/semver": { "version": "7.3.7", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/postcss-merge-idents": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz", "integrity": "sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw==", "dependencies": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-merge-longhand": { "version": "5.1.6", "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz", "integrity": "sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw==", "dependencies": { "postcss-value-parser": "^4.2.0", "stylehacks": "^5.1.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-merge-rules": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz", "integrity": "sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ==", "dependencies": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", "cssnano-utils": "^3.1.0", "postcss-selector-parser": "^6.0.5" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-minify-font-values": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz", "integrity": "sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-minify-gradients": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz", "integrity": "sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==", "dependencies": { "colord": "^2.9.1", "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-minify-params": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz", "integrity": "sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg==", "dependencies": { "browserslist": "^4.16.6", "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-minify-selectors": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz", "integrity": "sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==", "dependencies": { "postcss-selector-parser": "^6.0.5" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-modules-extract-imports": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", "integrity": "sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==", "engines": { "node": "^10 || ^12 || >= 14" }, "peerDependencies": { "postcss": "^8.1.0" } }, "node_modules/postcss-modules-local-by-default": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz", "integrity": "sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ==", "dependencies": { "icss-utils": "^5.0.0", "postcss-selector-parser": "^6.0.2", "postcss-value-parser": "^4.1.0" }, "engines": { "node": "^10 || ^12 || >= 14" }, "peerDependencies": { "postcss": "^8.1.0" } }, "node_modules/postcss-modules-scope": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz", "integrity": "sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==", "dependencies": { "postcss-selector-parser": "^6.0.4" }, "engines": { "node": "^10 || ^12 || >= 14" }, "peerDependencies": { "postcss": "^8.1.0" } }, "node_modules/postcss-modules-values": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", "dependencies": { "icss-utils": "^5.0.0" }, "engines": { "node": "^10 || ^12 || >= 14" }, "peerDependencies": { "postcss": "^8.1.0" } }, "node_modules/postcss-normalize-charset": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz", "integrity": "sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-display-values": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz", "integrity": "sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-positions": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz", "integrity": "sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-repeat-style": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz", "integrity": "sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-string": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz", "integrity": "sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-timing-functions": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz", "integrity": "sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-unicode": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz", "integrity": "sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ==", "dependencies": { "browserslist": "^4.16.6", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-url": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz", "integrity": "sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==", "dependencies": { "normalize-url": "^6.0.1", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-normalize-whitespace": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz", "integrity": "sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-ordered-values": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz", "integrity": "sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==", "dependencies": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-reduce-idents": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz", "integrity": "sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-reduce-initial": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz", "integrity": "sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw==", "dependencies": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-reduce-transforms": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz", "integrity": "sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==", "dependencies": { "postcss-value-parser": "^4.2.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-selector-parser": { "version": "6.0.10", "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" }, "engines": { "node": ">=4" } }, "node_modules/postcss-sort-media-queries": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.2.1.tgz", "integrity": "sha512-9VYekQalFZ3sdgcTjXMa0dDjsfBVHXlraYJEMiOJ/2iMmI2JGCMavP16z3kWOaRu8NSaJCTgVpB/IVpH5yT9YQ==", "dependencies": { "sort-css-media-queries": "2.0.4" }, "engines": { "node": ">=10.0.0" }, "peerDependencies": { "postcss": "^8.4.4" } }, "node_modules/postcss-svgo": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz", "integrity": "sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==", "dependencies": { "postcss-value-parser": "^4.2.0", "svgo": "^2.7.0" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-unique-selectors": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz", "integrity": "sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==", "dependencies": { "postcss-selector-parser": "^6.0.5" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/postcss-value-parser": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, "node_modules/postcss-zindex": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz", "integrity": "sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A==", "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/prepend-http": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", "engines": { "node": ">=4" } }, "node_modules/pretty-error": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", "dependencies": { "lodash": "^4.17.20", "renderkid": "^3.0.0" } }, "node_modules/pretty-time": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz", "integrity": "sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==", "engines": { "node": ">=4" } }, "node_modules/prism-react-renderer": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz", "integrity": "sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg==", "peerDependencies": { "react": ">=0.14.9" } }, "node_modules/prismjs": { "version": "1.28.0", "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.28.0.tgz", "integrity": "sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw==", "engines": { "node": ">=6" } }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "node_modules/promise": { "version": "7.3.1", "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", "dependencies": { "asap": "~2.0.3" } }, "node_modules/prompts": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "dependencies": { "kleur": "^3.0.3", "sisteransi": "^1.0.5" }, "engines": { "node": ">= 6" } }, "node_modules/prop-types": { "version": "15.7.2", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.8.1" } }, "node_modules/property-information": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", "dependencies": { "xtend": "^4.0.0" }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" }, "engines": { "node": ">= 0.10" } }, "node_modules/proxy-addr/node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", "engines": { "node": ">= 0.10" } }, "node_modules/pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "node_modules/punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", "engines": { "node": ">=6" } }, "node_modules/pupa": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", "dependencies": { "escape-goat": "^2.0.0" }, "engines": { "node": ">=8" } }, "node_modules/pure-color": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz", "integrity": "sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==" }, "node_modules/q": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=", "engines": { "node": ">=0.6.0", "teleport": ">=0.2.0" } }, "node_modules/qs": { "version": "6.10.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", "dependencies": { "side-channel": "^1.0.4" }, "engines": { "node": ">=0.6" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" }, "node_modules/queue": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz", "integrity": "sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==", "dependencies": { "inherits": "~2.0.3" } }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "dependencies": { "safe-buffer": "^5.1.0" } }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "engines": { "node": ">= 0.6" } }, "node_modules/raw-body": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" }, "engines": { "node": ">= 0.8" } }, "node_modules/raw-body/node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "engines": { "node": ">= 0.8" } }, "node_modules/rc": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "cli.js" } }, "node_modules/react": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" }, "engines": { "node": ">=0.10.0" } }, "node_modules/react-base16-styling": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz", "integrity": "sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==", "dependencies": { "base16": "^1.0.0", "lodash.curry": "^4.0.1", "lodash.flow": "^3.3.0", "pure-color": "^1.2.0" } }, "node_modules/react-dev-utils": { "version": "12.0.1", "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz", "integrity": "sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ==", "dependencies": { "@babel/code-frame": "^7.16.0", "address": "^1.1.2", "browserslist": "^4.18.1", "chalk": "^4.1.2", "cross-spawn": "^7.0.3", "detect-port-alt": "^1.1.6", "escape-string-regexp": "^4.0.0", "filesize": "^8.0.6", "find-up": "^5.0.0", "fork-ts-checker-webpack-plugin": "^6.5.0", "global-modules": "^2.0.0", "globby": "^11.0.4", "gzip-size": "^6.0.0", "immer": "^9.0.7", "is-root": "^2.1.0", "loader-utils": "^3.2.0", "open": "^8.4.0", "pkg-up": "^3.1.0", "prompts": "^2.4.2", "react-error-overlay": "^6.0.11", "recursive-readdir": "^2.2.2", "shell-quote": "^1.7.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "engines": { "node": ">=14" } }, "node_modules/react-dev-utils/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/react-dev-utils/node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/react-dev-utils/node_modules/loader-utils": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz", "integrity": "sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ==", "engines": { "node": ">= 12.13.0" } }, "node_modules/react-dev-utils/node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dependencies": { "p-locate": "^5.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/react-dev-utils/node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dependencies": { "yocto-queue": "^0.1.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/react-dev-utils/node_modules/p-locate": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dependencies": { "p-limit": "^3.0.2" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/react-dom": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "scheduler": "^0.20.2" }, "peerDependencies": { "react": "17.0.2" } }, "node_modules/react-error-overlay": { "version": "6.0.11", "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz", "integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==" }, "node_modules/react-fast-compare": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.0.tgz", "integrity": "sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==" }, "node_modules/react-helmet-async": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-1.3.0.tgz", "integrity": "sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg==", "dependencies": { "@babel/runtime": "^7.12.5", "invariant": "^2.2.4", "prop-types": "^15.7.2", "react-fast-compare": "^3.2.0", "shallowequal": "^1.1.0" }, "peerDependencies": { "react": "^16.6.0 || ^17.0.0 || ^18.0.0", "react-dom": "^16.6.0 || ^17.0.0 || ^18.0.0" } }, "node_modules/react-is": { "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-json-view": { "version": "1.21.3", "resolved": "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz", "integrity": "sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==", "dependencies": { "flux": "^4.0.1", "react-base16-styling": "^0.6.0", "react-lifecycles-compat": "^3.0.4", "react-textarea-autosize": "^8.3.2" }, "peerDependencies": { "react": "^17.0.0 || ^16.3.0 || ^15.5.4", "react-dom": "^17.0.0 || ^16.3.0 || ^15.5.4" } }, "node_modules/react-lifecycles-compat": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" }, "node_modules/react-loadable": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/react-loadable/-/react-loadable-5.5.0.tgz", "integrity": "sha512-C8Aui0ZpMd4KokxRdVAm2bQtI03k2RMRNzOB+IipV3yxFTSVICv7WoUr5L9ALB5BmKO1iHgZtWM8EvYG83otdg==", "peer": true, "dependencies": { "prop-types": "^15.5.0" }, "peerDependencies": { "react": "*" } }, "node_modules/react-loadable-ssr-addon-v5-slorber": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz", "integrity": "sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==", "dependencies": { "@babel/runtime": "^7.10.3" }, "engines": { "node": ">=10.13.0" }, "peerDependencies": { "react-loadable": "*", "webpack": ">=4.41.1 || 5.x" } }, "node_modules/react-router": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.3.tgz", "integrity": "sha512-mzQGUvS3bM84TnbtMYR8ZjKnuPJ71IjSzR+DE6UkUqvN4czWIqEs17yLL8xkAycv4ev0AiN+IGrWu88vJs/p2w==", "dependencies": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "hoist-non-react-statics": "^3.1.0", "loose-envify": "^1.3.1", "mini-create-react-context": "^0.4.0", "path-to-regexp": "^1.7.0", "prop-types": "^15.6.2", "react-is": "^16.6.0", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0" }, "peerDependencies": { "react": ">=15" } }, "node_modules/react-router-config": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz", "integrity": "sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==", "dependencies": { "@babel/runtime": "^7.1.2" }, "peerDependencies": { "react": ">=15", "react-router": ">=5" } }, "node_modules/react-router-dom": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.3.tgz", "integrity": "sha512-Ov0tGPMBgqmbu5CDmN++tv2HQ9HlWDuWIIqn4b88gjlAN5IHI+4ZUZRcpz9Hl0azFIwihbLDYw1OiHGRo7ZIng==", "dependencies": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "loose-envify": "^1.3.1", "prop-types": "^15.6.2", "react-router": "5.3.3", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0" }, "peerDependencies": { "react": ">=15" } }, "node_modules/react-router/node_modules/isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" }, "node_modules/react-router/node_modules/path-to-regexp": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", "dependencies": { "isarray": "0.0.1" } }, "node_modules/react-textarea-autosize": { "version": "8.3.4", "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz", "integrity": "sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ==", "dependencies": { "@babel/runtime": "^7.10.2", "use-composed-ref": "^1.3.0", "use-latest": "^1.2.1" }, "engines": { "node": ">=10" }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, "node_modules/react-textarea-autosize/node_modules/use-composed-ref": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz", "integrity": "sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ==", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, "node_modules/react-textarea-autosize/node_modules/use-latest": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz", "integrity": "sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw==", "dependencies": { "use-isomorphic-layout-effect": "^1.1.1" }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" }, "peerDependenciesMeta": { "@types/react": { "optional": true } } }, "node_modules/react-textarea-autosize/node_modules/use-latest/node_modules/use-isomorphic-layout-effect": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz", "integrity": "sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" }, "peerDependenciesMeta": { "@types/react": { "optional": true } } }, "node_modules/readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" }, "engines": { "node": ">= 6" } }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dependencies": { "picomatch": "^2.2.1" }, "engines": { "node": ">=8.10.0" } }, "node_modules/reading-time": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/reading-time/-/reading-time-1.5.0.tgz", "integrity": "sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg==" }, "node_modules/rechoir": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", "dependencies": { "resolve": "^1.1.6" }, "engines": { "node": ">= 0.10" } }, "node_modules/recursive-readdir": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz", "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", "dependencies": { "minimatch": "3.0.4" }, "engines": { "node": ">=0.10.0" } }, "node_modules/regenerate": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" }, "node_modules/regenerate-unicode-properties": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz", "integrity": "sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==", "dependencies": { "regenerate": "^1.4.2" }, "engines": { "node": ">=4" } }, "node_modules/regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" }, "node_modules/regenerator-transform": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz", "integrity": "sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==", "dependencies": { "@babel/runtime": "^7.8.4" } }, "node_modules/regexpu-core": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.1.0.tgz", "integrity": "sha512-bb6hk+xWd2PEOkj5It46A16zFMs2mv86Iwpdu94la4S3sJ7C973h2dHpYKwIBGaWSO7cIRJ+UX0IeMaWcO4qwA==", "dependencies": { "regenerate": "^1.4.2", "regenerate-unicode-properties": "^10.0.1", "regjsgen": "^0.6.0", "regjsparser": "^0.8.2", "unicode-match-property-ecmascript": "^2.0.0", "unicode-match-property-value-ecmascript": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/registry-auth-token": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", "dependencies": { "rc": "^1.2.8" }, "engines": { "node": ">=6.0.0" } }, "node_modules/registry-url": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", "dependencies": { "rc": "^1.2.8" }, "engines": { "node": ">=8" } }, "node_modules/regjsgen": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.6.0.tgz", "integrity": "sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==" }, "node_modules/regjsparser": { "version": "0.8.4", "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.8.4.tgz", "integrity": "sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==", "dependencies": { "jsesc": "~0.5.0" }, "bin": { "regjsparser": "bin/parser" } }, "node_modules/regjsparser/node_modules/jsesc": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", "bin": { "jsesc": "bin/jsesc" } }, "node_modules/relateurl": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", "integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=", "engines": { "node": ">= 0.10" } }, "node_modules/remark-emoji": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz", "integrity": "sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w==", "dependencies": { "emoticon": "^3.2.0", "node-emoji": "^1.10.0", "unist-util-visit": "^2.0.3" } }, "node_modules/remark-footnotes": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz", "integrity": "sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/remark-mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz", "integrity": "sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ==", "dependencies": { "@babel/core": "7.12.9", "@babel/helper-plugin-utils": "7.10.4", "@babel/plugin-proposal-object-rest-spread": "7.12.1", "@babel/plugin-syntax-jsx": "7.12.1", "@mdx-js/util": "1.6.22", "is-alphabetical": "1.0.4", "remark-parse": "8.0.3", "unified": "9.2.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/remark-mdx/node_modules/@babel/core": { "version": "7.12.9", "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", "@babel/helper-module-transforms": "^7.12.1", "@babel/helpers": "^7.12.5", "@babel/parser": "^7.12.7", "@babel/template": "^7.12.7", "@babel/traverse": "^7.12.9", "@babel/types": "^7.12.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", "json5": "^2.1.2", "lodash": "^4.17.19", "resolve": "^1.3.2", "semver": "^5.4.1", "source-map": "^0.5.0" }, "engines": { "node": ">=6.9.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/babel" } }, "node_modules/remark-mdx/node_modules/@babel/helper-plugin-utils": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "node_modules/remark-mdx/node_modules/@babel/plugin-proposal-object-rest-spread": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz", "integrity": "sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.0", "@babel/plugin-transform-parameters": "^7.12.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/remark-mdx/node_modules/@babel/plugin-syntax-jsx": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/remark-mdx/node_modules/semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "bin": { "semver": "bin/semver" } }, "node_modules/remark-mdx/node_modules/source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/remark-mdx/node_modules/unified": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", "dependencies": { "bail": "^1.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", "is-plain-obj": "^2.0.0", "trough": "^1.0.0", "vfile": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/remark-parse": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz", "integrity": "sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==", "dependencies": { "ccount": "^1.0.0", "collapse-white-space": "^1.0.2", "is-alphabetical": "^1.0.0", "is-decimal": "^1.0.0", "is-whitespace-character": "^1.0.0", "is-word-character": "^1.0.0", "markdown-escapes": "^1.0.0", "parse-entities": "^2.0.0", "repeat-string": "^1.5.4", "state-toggle": "^1.0.0", "trim": "0.0.1", "trim-trailing-lines": "^1.0.0", "unherit": "^1.0.4", "unist-util-remove-position": "^2.0.0", "vfile-location": "^3.0.0", "xtend": "^4.0.1" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/remark-squeeze-paragraphs": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz", "integrity": "sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw==", "dependencies": { "mdast-squeeze-paragraphs": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/renderkid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", "dependencies": { "css-select": "^4.1.3", "dom-converter": "^0.2.0", "htmlparser2": "^6.1.0", "lodash": "^4.17.21", "strip-ansi": "^6.0.1" } }, "node_modules/repeat-string": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", "engines": { "node": ">=0.10" } }, "node_modules/require-from-string": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "engines": { "node": ">=0.10.0" } }, "node_modules/require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", "engines": { "node": "*" } }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" }, "node_modules/resolve": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", "dependencies": { "is-core-module": "^2.2.0", "path-parse": "^1.0.6" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "engines": { "node": ">=4" } }, "node_modules/resolve-pathname": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" }, "node_modules/responselike": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", "dependencies": { "lowercase-keys": "^1.0.0" } }, "node_modules/retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", "engines": { "node": ">= 4" } }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" } }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/rtl-detect": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz", "integrity": "sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ==" }, "node_modules/rtlcss": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz", "integrity": "sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A==", "dependencies": { "find-up": "^5.0.0", "picocolors": "^1.0.0", "postcss": "^8.3.11", "strip-json-comments": "^3.1.1" }, "bin": { "rtlcss": "bin/rtlcss.js" } }, "node_modules/rtlcss/node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/rtlcss/node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dependencies": { "p-locate": "^5.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/rtlcss/node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dependencies": { "yocto-queue": "^0.1.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/rtlcss/node_modules/p-locate": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dependencies": { "p-limit": "^3.0.2" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/rtlcss/node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ], "dependencies": { "queue-microtask": "^1.2.2" } }, "node_modules/rxjs": { "version": "7.5.5", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.5.tgz", "integrity": "sha512-sy+H0pQofO95VDmFLzyaw9xNJU4KTRSwQIGM6+iG3SypAtCiLDzpeG8sJrNCWn2Up9km+KhkvTdbkrdy+yzZdw==", "dependencies": { "tslib": "^2.1.0" } }, "node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, "node_modules/scheduler": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "node_modules/schema-utils": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" }, "engines": { "node": ">= 10.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/section-matter": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", "dependencies": { "extend-shallow": "^2.0.1", "kind-of": "^6.0.0" }, "engines": { "node": ">=4" } }, "node_modules/select-hose": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==" }, "node_modules/selfsigned": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-2.0.1.tgz", "integrity": "sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ==", "dependencies": { "node-forge": "^1" }, "engines": { "node": ">=10" } }, "node_modules/semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/semver-diff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", "dependencies": { "semver": "^6.3.0" }, "engines": { "node": ">=8" } }, "node_modules/semver-diff/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/send": { "version": "0.18.0", "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/send/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/send/node_modules/debug/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/send/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/serialize-javascript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", "dependencies": { "randombytes": "^2.1.0" } }, "node_modules/serve-handler": { "version": "6.1.3", "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz", "integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==", "dependencies": { "bytes": "3.0.0", "content-disposition": "0.5.2", "fast-url-parser": "1.1.3", "mime-types": "2.1.18", "minimatch": "3.0.4", "path-is-inside": "1.0.2", "path-to-regexp": "2.2.1", "range-parser": "1.2.0" } }, "node_modules/serve-handler/node_modules/content-disposition": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=", "engines": { "node": ">= 0.6" } }, "node_modules/serve-handler/node_modules/mime-db": { "version": "1.33.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==", "engines": { "node": ">= 0.6" } }, "node_modules/serve-handler/node_modules/mime-types": { "version": "2.1.18", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", "dependencies": { "mime-db": "~1.33.0" }, "engines": { "node": ">= 0.6" } }, "node_modules/serve-handler/node_modules/path-to-regexp": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", "integrity": "sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==" }, "node_modules/serve-handler/node_modules/range-parser": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=", "engines": { "node": ">= 0.6" } }, "node_modules/serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", "dependencies": { "accepts": "~1.3.4", "batch": "0.6.1", "debug": "2.6.9", "escape-html": "~1.0.3", "http-errors": "~1.6.2", "mime-types": "~2.1.17", "parseurl": "~1.3.2" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/serve-index/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/serve-index/node_modules/depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", "engines": { "node": ">= 0.6" } }, "node_modules/serve-index/node_modules/http-errors": { "version": "1.6.3", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", "dependencies": { "depd": "~1.1.2", "inherits": "2.0.3", "setprototypeof": "1.1.0", "statuses": ">= 1.4.0 < 2" }, "engines": { "node": ">= 0.6" } }, "node_modules/serve-index/node_modules/inherits": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "node_modules/serve-index/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/serve-index/node_modules/setprototypeof": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" }, "node_modules/serve-index/node_modules/statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", "engines": { "node": ">= 0.6" } }, "node_modules/serve-static": { "version": "1.15.0", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.18.0" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/shallow-clone": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", "dependencies": { "kind-of": "^6.0.2" }, "engines": { "node": ">=8" } }, "node_modules/shallowequal": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==" }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dependencies": { "shebang-regex": "^3.0.0" }, "engines": { "node": ">=8" } }, "node_modules/shebang-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "engines": { "node": ">=8" } }, "node_modules/shell-quote": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz", "integrity": "sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==" }, "node_modules/shelljs": { "version": "0.8.5", "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "dependencies": { "glob": "^7.0.0", "interpret": "^1.0.0", "rechoir": "^0.6.2" }, "bin": { "shjs": "bin/shjs" }, "engines": { "node": ">=4" } }, "node_modules/side-channel": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", "dependencies": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", "object-inspect": "^1.9.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/signal-exit": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" }, "node_modules/sirv": { "version": "1.0.19", "resolved": "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz", "integrity": "sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ==", "dependencies": { "@polka/url": "^1.0.0-next.20", "mrmime": "^1.0.0", "totalist": "^1.0.0" }, "engines": { "node": ">= 10" } }, "node_modules/sisteransi": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" }, "node_modules/sitemap": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz", "integrity": "sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==", "dependencies": { "@types/node": "^17.0.5", "@types/sax": "^1.2.1", "arg": "^5.0.0", "sax": "^1.2.4" }, "bin": { "sitemap": "dist/cli.js" }, "engines": { "node": ">=12.0.0", "npm": ">=5.6.0" } }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "engines": { "node": ">=8" } }, "node_modules/sockjs": { "version": "0.3.24", "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", "dependencies": { "faye-websocket": "^0.11.3", "uuid": "^8.3.2", "websocket-driver": "^0.7.4" } }, "node_modules/sort-css-media-queries": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.0.4.tgz", "integrity": "sha512-PAIsEK/XupCQwitjv7XxoMvYhT7EAfyzI3hsy/MyDgTvc+Ft55ctdkctJLOy6cQejaIC+zjpUL4djFVm2ivOOw==", "engines": { "node": ">= 6.3.0" } }, "node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "engines": { "node": ">=0.10.0" } }, "node_modules/source-map-js": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", "engines": { "node": ">=0.10.0" } }, "node_modules/source-map-support": { "version": "0.5.21", "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "node_modules/space-separated-tokens": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/spdy": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", "dependencies": { "debug": "^4.1.0", "handle-thing": "^2.0.0", "http-deceiver": "^1.2.7", "select-hose": "^2.0.0", "spdy-transport": "^3.0.0" }, "engines": { "node": ">=6.0.0" } }, "node_modules/spdy-transport": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", "dependencies": { "debug": "^4.1.0", "detect-node": "^2.0.4", "hpack.js": "^2.1.6", "obuf": "^1.1.2", "readable-stream": "^3.0.6", "wbuf": "^1.7.3" } }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" }, "node_modules/stable": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" }, "node_modules/state-toggle": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", "engines": { "node": ">= 0.8" } }, "node_modules/std-env": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.0.1.tgz", "integrity": "sha512-mC1Ps9l77/97qeOZc+HrOL7TIaOboHqMZ24dGVQrlxFcpPpfCHpH+qfUT7Dz+6mlG8+JPA1KfBQo19iC/+Ngcw==" }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "dependencies": { "safe-buffer": "~5.2.0" } }, "node_modules/string_decoder/node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/string-width": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.0" }, "engines": { "node": ">=8" } }, "node_modules/string-width/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/string-width/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "engines": { "node": ">=8" } }, "node_modules/string.prototype.trimend": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.1.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.1.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/stringify-object": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", "dependencies": { "get-own-enumerable-property-symbols": "^3.0.0", "is-obj": "^1.0.1", "is-regexp": "^1.0.0" }, "engines": { "node": ">=4" } }, "node_modules/stringify-object/node_modules/is-obj": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", "integrity": "sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==", "engines": { "node": ">=0.10.0" } }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dependencies": { "ansi-regex": "^5.0.1" }, "engines": { "node": ">=8" } }, "node_modules/strip-ansi/node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "engines": { "node": ">=8" } }, "node_modules/strip-bom-string": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", "engines": { "node": ">=0.10.0" } }, "node_modules/strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "engines": { "node": ">=6" } }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", "engines": { "node": ">=0.10.0" } }, "node_modules/style-to-object": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", "dependencies": { "inline-style-parser": "0.1.1" } }, "node_modules/stylehacks": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.0.tgz", "integrity": "sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q==", "dependencies": { "browserslist": "^4.16.6", "postcss-selector-parser": "^6.0.4" }, "engines": { "node": "^10 || ^12 || >=14.0" }, "peerDependencies": { "postcss": "^8.2.15" } }, "node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dependencies": { "has-flag": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/svg-parser": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, "node_modules/svgo": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz", "integrity": "sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==", "dependencies": { "@trysound/sax": "0.2.0", "commander": "^7.2.0", "css-select": "^4.1.3", "css-tree": "^1.1.3", "csso": "^4.2.0", "picocolors": "^1.0.0", "stable": "^0.1.8" }, "bin": { "svgo": "bin/svgo" }, "engines": { "node": ">=10.13.0" } }, "node_modules/svgo/node_modules/commander": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", "engines": { "node": ">= 10" } }, "node_modules/tapable": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.0.tgz", "integrity": "sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw==", "engines": { "node": ">=6" } }, "node_modules/terser": { "version": "5.10.0", "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", "dependencies": { "commander": "^2.20.0", "source-map": "~0.7.2", "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" }, "engines": { "node": ">=10" }, "peerDependencies": { "acorn": "^8.5.0" }, "peerDependenciesMeta": { "acorn": { "optional": true } } }, "node_modules/terser-webpack-plugin": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.3.tgz", "integrity": "sha512-Fx60G5HNYknNTNQnzQ1VePRuu89ZVYWfjRAeT5rITuCY/1b08s49e5kSQwHDirKZWuoKOBRFS98EUUoZ9kLEwQ==", "dependencies": { "@jridgewell/trace-mapping": "^0.3.7", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", "terser": "^5.7.2" }, "engines": { "node": ">= 10.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^5.1.0" }, "peerDependenciesMeta": { "@swc/core": { "optional": true }, "esbuild": { "optional": true }, "uglify-js": { "optional": true } } }, "node_modules/terser/node_modules/commander": { "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" }, "node_modules/terser/node_modules/source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", "engines": { "node": ">= 8" } }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" }, "node_modules/thunky": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, "node_modules/tiny-invariant": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.1.0.tgz", "integrity": "sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==" }, "node_modules/tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" }, "node_modules/to-fast-properties": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", "engines": { "node": ">=4" } }, "node_modules/to-readable-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", "engines": { "node": ">=6" } }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dependencies": { "is-number": "^7.0.0" }, "engines": { "node": ">=8.0" } }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", "engines": { "node": ">=0.6" } }, "node_modules/totalist": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz", "integrity": "sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==", "engines": { "node": ">=6" } }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "node_modules/trim": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", "integrity": "sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==" }, "node_modules/trim-trailing-lines": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz", "integrity": "sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/trough": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/tslib": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/type-fest": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" }, "engines": { "node": ">= 0.6" } }, "node_modules/typedarray-to-buffer": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", "dependencies": { "is-typedarray": "^1.0.0" } }, "node_modules/typescript": { "version": "4.7.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" }, "engines": { "node": ">=4.2.0" } }, "node_modules/ua-parser-js": { "version": "0.7.31", "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.31.tgz", "integrity": "sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ==", "funding": [ { "type": "opencollective", "url": "https://opencollective.com/ua-parser-js" }, { "type": "paypal", "url": "https://paypal.me/faisalman" } ], "engines": { "node": "*" } }, "node_modules/unbox-primitive": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", "dependencies": { "function-bind": "^1.1.1", "has-bigints": "^1.0.1", "has-symbols": "^1.0.2", "which-boxed-primitive": "^1.0.2" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/unherit": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", "dependencies": { "inherits": "^2.0.0", "xtend": "^4.0.0" }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", "engines": { "node": ">=4" } }, "node_modules/unicode-match-property-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "dependencies": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/unicode-match-property-value-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==", "engines": { "node": ">=4" } }, "node_modules/unicode-property-aliases-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz", "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==", "engines": { "node": ">=4" } }, "node_modules/unified": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz", "integrity": "sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ==", "dependencies": { "bail": "^1.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", "is-plain-obj": "^2.0.0", "trough": "^1.0.0", "vfile": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unique-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", "dependencies": { "crypto-random-string": "^2.0.0" }, "engines": { "node": ">=8" } }, "node_modules/unist-builder": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-generated": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz", "integrity": "sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-is": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-position": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-remove": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", "integrity": "sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==", "dependencies": { "unist-util-is": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-remove-position": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz", "integrity": "sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==", "dependencies": { "unist-util-visit": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-stringify-position": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", "dependencies": { "@types/unist": "^2.0.2" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-visit": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0", "unist-util-visit-parents": "^3.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-visit-parents": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", "engines": { "node": ">= 10.0.0" } }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", "engines": { "node": ">= 0.8" } }, "node_modules/unquote": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" }, "node_modules/update-notifier": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", "dependencies": { "boxen": "^5.0.0", "chalk": "^4.1.0", "configstore": "^5.0.1", "has-yarn": "^2.1.0", "import-lazy": "^2.1.0", "is-ci": "^2.0.0", "is-installed-globally": "^0.4.0", "is-npm": "^5.0.0", "is-yarn-global": "^0.3.0", "latest-version": "^5.1.0", "pupa": "^2.1.1", "semver": "^7.3.4", "semver-diff": "^3.1.1", "xdg-basedir": "^4.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/yeoman/update-notifier?sponsor=1" } }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dependencies": { "punycode": "^2.1.0" } }, "node_modules/url-loader": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz", "integrity": "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==", "dependencies": { "loader-utils": "^2.0.0", "mime-types": "^2.1.27", "schema-utils": "^3.0.0" }, "engines": { "node": ">= 10.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "file-loader": "*", "webpack": "^4.0.0 || ^5.0.0" }, "peerDependenciesMeta": { "file-loader": { "optional": true } } }, "node_modules/url-parse": { "version": "1.5.3", "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz", "integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==", "dependencies": { "querystringify": "^2.1.1", "requires-port": "^1.0.0" } }, "node_modules/url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", "dependencies": { "prepend-http": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, "node_modules/util.promisify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.1.tgz", "integrity": "sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA==", "dependencies": { "define-properties": "^1.1.3", "es-abstract": "^1.17.2", "has-symbols": "^1.0.1", "object.getownpropertydescriptors": "^2.1.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/utila": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", "integrity": "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=" }, "node_modules/utility-types": { "version": "3.10.0", "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz", "integrity": "sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==", "engines": { "node": ">= 4" } }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", "engines": { "node": ">= 0.4.0" } }, "node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "bin": { "uuid": "dist/bin/uuid" } }, "node_modules/value-equal": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", "engines": { "node": ">= 0.8" } }, "node_modules/vfile": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", "dependencies": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", "unist-util-stringify-position": "^2.0.0", "vfile-message": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/vfile-location": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz", "integrity": "sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/vfile-message": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/wait-on": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz", "integrity": "sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw==", "dependencies": { "axios": "^0.25.0", "joi": "^17.6.0", "lodash": "^4.17.21", "minimist": "^1.2.5", "rxjs": "^7.5.4" }, "bin": { "wait-on": "bin/wait-on" }, "engines": { "node": ">=10.0.0" } }, "node_modules/watchpack": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" }, "engines": { "node": ">=10.13.0" } }, "node_modules/wbuf": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", "dependencies": { "minimalistic-assert": "^1.0.0" } }, "node_modules/web-namespaces": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "node_modules/webpack": { "version": "5.73.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.73.0.tgz", "integrity": "sha512-svjudQRPPa0YiOYa2lM/Gacw0r6PvxptHj4FuEKQ2kX05ZLkjbVc5MnPs6its5j7IZljnIqSVo/OsY2X0IpHGA==", "dependencies": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/wasm-edit": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1", "acorn": "^8.4.1", "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.9.3", "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.9", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.1.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.1.3", "watchpack": "^2.3.1", "webpack-sources": "^3.2.3" }, "bin": { "webpack": "bin/webpack.js" }, "engines": { "node": ">=10.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependenciesMeta": { "webpack-cli": { "optional": true } } }, "node_modules/webpack-bundle-analyzer": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.5.0.tgz", "integrity": "sha512-GUMZlM3SKwS8Z+CKeIFx7CVoHn3dXFcUAjT/dcZQQmfSZGvitPfMob2ipjai7ovFFqPvTqkEZ/leL4O0YOdAYQ==", "dependencies": { "acorn": "^8.0.4", "acorn-walk": "^8.0.0", "chalk": "^4.1.0", "commander": "^7.2.0", "gzip-size": "^6.0.0", "lodash": "^4.17.20", "opener": "^1.5.2", "sirv": "^1.0.7", "ws": "^7.3.1" }, "bin": { "webpack-bundle-analyzer": "lib/bin/analyzer.js" }, "engines": { "node": ">= 10.13.0" } }, "node_modules/webpack-bundle-analyzer/node_modules/commander": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", "engines": { "node": ">= 10" } }, "node_modules/webpack-dev-middleware": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", "dependencies": { "colorette": "^2.0.10", "memfs": "^3.4.3", "mime-types": "^2.1.31", "range-parser": "^1.2.1", "schema-utils": "^4.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^4.0.0 || ^5.0.0" } }, "node_modules/webpack-dev-middleware/node_modules/ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/webpack-dev-middleware/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, "peerDependencies": { "ajv": "^8.8.2" } }, "node_modules/webpack-dev-middleware/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "node_modules/webpack-dev-middleware/node_modules/schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/webpack-dev-server": { "version": "4.9.3", "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.9.3.tgz", "integrity": "sha512-3qp/eoboZG5/6QgiZ3llN8TUzkSpYg1Ko9khWX1h40MIEUNS2mDoIa8aXsPfskER+GbTvs/IJZ1QTBBhhuetSw==", "dependencies": { "@types/bonjour": "^3.5.9", "@types/connect-history-api-fallback": "^1.3.5", "@types/express": "^4.17.13", "@types/serve-index": "^1.9.1", "@types/serve-static": "^1.13.10", "@types/sockjs": "^0.3.33", "@types/ws": "^8.5.1", "ansi-html-community": "^0.0.8", "bonjour-service": "^1.0.11", "chokidar": "^3.5.3", "colorette": "^2.0.10", "compression": "^1.7.4", "connect-history-api-fallback": "^2.0.0", "default-gateway": "^6.0.3", "express": "^4.17.3", "graceful-fs": "^4.2.6", "html-entities": "^2.3.2", "http-proxy-middleware": "^2.0.3", "ipaddr.js": "^2.0.1", "open": "^8.0.9", "p-retry": "^4.5.0", "rimraf": "^3.0.2", "schema-utils": "^4.0.0", "selfsigned": "^2.0.1", "serve-index": "^1.9.1", "sockjs": "^0.3.24", "spdy": "^4.0.2", "webpack-dev-middleware": "^5.3.1", "ws": "^8.4.2" }, "bin": { "webpack-dev-server": "bin/webpack-dev-server.js" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^4.37.0 || ^5.0.0" }, "peerDependenciesMeta": { "webpack-cli": { "optional": true } } }, "node_modules/webpack-dev-server/node_modules/ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { "type": "github", "url": "https://github.com/sponsors/epoberezkin" } }, "node_modules/webpack-dev-server/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, "peerDependencies": { "ajv": "^8.8.2" } }, "node_modules/webpack-dev-server/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "node_modules/webpack-dev-server/node_modules/schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" }, "engines": { "node": ">= 12.13.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" } }, "node_modules/webpack-dev-server/node_modules/ws": { "version": "8.8.1", "resolved": "https://registry.npmjs.org/ws/-/ws-8.8.1.tgz", "integrity": "sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA==", "engines": { "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "peerDependenciesMeta": { "bufferutil": { "optional": true }, "utf-8-validate": { "optional": true } } }, "node_modules/webpack-merge": { "version": "5.8.0", "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", "dependencies": { "clone-deep": "^4.0.1", "wildcard": "^2.0.0" }, "engines": { "node": ">=10.0.0" } }, "node_modules/webpack-sources": { "version": "3.2.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", "engines": { "node": ">=10.13.0" } }, "node_modules/webpackbar": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz", "integrity": "sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==", "dependencies": { "chalk": "^4.1.0", "consola": "^2.15.3", "pretty-time": "^1.1.0", "std-env": "^3.0.1" }, "engines": { "node": ">=12" }, "peerDependencies": { "webpack": "3 || 4 || 5" } }, "node_modules/websocket-driver": { "version": "0.7.4", "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", "dependencies": { "http-parser-js": ">=0.5.1", "safe-buffer": ">=5.1.0", "websocket-extensions": ">=0.1.1" }, "engines": { "node": ">=0.8.0" } }, "node_modules/websocket-extensions": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", "engines": { "node": ">=0.8.0" } }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/node-which" }, "engines": { "node": ">= 8" } }, "node_modules/which-boxed-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "dependencies": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", "is-number-object": "^1.0.4", "is-string": "^1.0.5", "is-symbol": "^1.0.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/widest-line": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", "dependencies": { "string-width": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/wildcard": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==" }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/wrap-ansi/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/wrap-ansi/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, "engines": { "node": ">=7.0.0" } }, "node_modules/wrap-ansi/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "node_modules/write-file-atomic": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", "dependencies": { "imurmurhash": "^0.1.4", "is-typedarray": "^1.0.0", "signal-exit": "^3.0.2", "typedarray-to-buffer": "^3.1.5" } }, "node_modules/ws": { "version": "7.5.7", "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz", "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==", "engines": { "node": ">=8.3.0" }, "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "peerDependenciesMeta": { "bufferutil": { "optional": true }, "utf-8-validate": { "optional": true } } }, "node_modules/xdg-basedir": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", "engines": { "node": ">=8" } }, "node_modules/xml-js": { "version": "1.6.11", "resolved": "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz", "integrity": "sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==", "dependencies": { "sax": "^1.2.4" }, "bin": { "xml-js": "bin/cli.js" } }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", "engines": { "node": ">=0.4" } }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yaml": { "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", "engines": { "node": ">= 6" } }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/zwitch": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } } }, "dependencies": { "@algolia/autocomplete-core": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.1.tgz", "integrity": "sha512-eiZw+fxMzNQn01S8dA/hcCpoWCOCwcIIEUtHHdzN5TGB3IpzLbuhqFeTfh2OUhhgkE8Uo17+wH+QJ/wYyQmmzg==", "requires": { "@algolia/autocomplete-shared": "1.7.1" } }, "@algolia/autocomplete-preset-algolia": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.1.tgz", "integrity": "sha512-pJwmIxeJCymU1M6cGujnaIYcY3QPOVYZOXhFkWVM7IxKzy272BwCvMFMyc5NpG/QmiObBxjo7myd060OeTNJXg==", "requires": { "@algolia/autocomplete-shared": "1.7.1" } }, "@algolia/autocomplete-shared": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.1.tgz", "integrity": "sha512-eTmGVqY3GeyBTT8IWiB2K5EuURAqhnumfktAEoHxfDY2o7vg2rSnO16ZtIG0fMgt3py28Vwgq42/bVEuaQV7pg==" }, "@algolia/cache-browser-local-storage": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.13.1.tgz", "integrity": "sha512-UAUVG2PEfwd/FfudsZtYnidJ9eSCpS+LW9cQiesePQLz41NAcddKxBak6eP2GErqyFagSlnVXe/w2E9h2m2ttg==", "requires": { "@algolia/cache-common": "4.13.1" } }, "@algolia/cache-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.13.1.tgz", "integrity": "sha512-7Vaf6IM4L0Jkl3sYXbwK+2beQOgVJ0mKFbz/4qSxKd1iy2Sp77uTAazcX+Dlexekg1fqGUOSO7HS4Sx47ZJmjA==" }, "@algolia/cache-in-memory": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.13.1.tgz", "integrity": "sha512-pZzybCDGApfA/nutsFK1P0Sbsq6fYJU3DwIvyKg4pURerlJM4qZbB9bfLRef0FkzfQu7W11E4cVLCIOWmyZeuQ==", "requires": { "@algolia/cache-common": "4.13.1" } }, "@algolia/client-account": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.13.1.tgz", "integrity": "sha512-TFLiZ1KqMiir3FNHU+h3b0MArmyaHG+eT8Iojio6TdpeFcAQ1Aiy+2gb3SZk3+pgRJa/BxGmDkRUwE5E/lv3QQ==", "requires": { "@algolia/client-common": "4.13.1", "@algolia/client-search": "4.13.1", "@algolia/transporter": "4.13.1" } }, "@algolia/client-analytics": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.13.1.tgz", "integrity": "sha512-iOS1JBqh7xaL5x00M5zyluZ9+9Uy9GqtYHv/2SMuzNW1qP7/0doz1lbcsP3S7KBbZANJTFHUOfuqyRLPk91iFA==", "requires": { "@algolia/client-common": "4.13.1", "@algolia/client-search": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "@algolia/client-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.13.1.tgz", "integrity": "sha512-LcDoUE0Zz3YwfXJL6lJ2OMY2soClbjrrAKB6auYVMNJcoKZZ2cbhQoFR24AYoxnGUYBER/8B+9sTBj5bj/Gqbg==", "requires": { "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "@algolia/client-personalization": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.13.1.tgz", "integrity": "sha512-1CqrOW1ypVrB4Lssh02hP//YxluoIYXAQCpg03L+/RiXJlCs+uIqlzC0ctpQPmxSlTK6h07kr50JQoYH/TIM9w==", "requires": { "@algolia/client-common": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "@algolia/client-search": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.13.1.tgz", "integrity": "sha512-YQKYA83MNRz3FgTNM+4eRYbSmHi0WWpo019s5SeYcL3HUan/i5R09VO9dk3evELDFJYciiydSjbsmhBzbpPP2A==", "requires": { "@algolia/client-common": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/transporter": "4.13.1" } }, "@algolia/events": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz", "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, "@algolia/logger-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.13.1.tgz", "integrity": "sha512-L6slbL/OyZaAXNtS/1A8SAbOJeEXD5JcZeDCPYDqSTYScfHu+2ePRTDMgUTY4gQ7HsYZ39N1LujOd8WBTmM2Aw==" }, "@algolia/logger-console": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.13.1.tgz", "integrity": "sha512-7jQOTftfeeLlnb3YqF8bNgA2GZht7rdKkJ31OCeSH2/61haO0tWPoNRjZq9XLlgMQZH276pPo0NdiArcYPHjCA==", "requires": { "@algolia/logger-common": "4.13.1" } }, "@algolia/requester-browser-xhr": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.13.1.tgz", "integrity": "sha512-oa0CKr1iH6Nc7CmU6RE7TnXMjHnlyp7S80pP/LvZVABeJHX3p/BcSCKovNYWWltgTxUg0U1o+2uuy8BpMKljwA==", "requires": { "@algolia/requester-common": "4.13.1" } }, "@algolia/requester-common": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.13.1.tgz", "integrity": "sha512-eGVf0ID84apfFEuXsaoSgIxbU3oFsIbz4XiotU3VS8qGCJAaLVUC5BUJEkiFENZIhon7hIB4d0RI13HY4RSA+w==" }, "@algolia/requester-node-http": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.13.1.tgz", "integrity": "sha512-7C0skwtLdCz5heKTVe/vjvrqgL/eJxmiEjHqXdtypcE5GCQCYI15cb+wC4ytYioZDMiuDGeVYmCYImPoEgUGPw==", "requires": { "@algolia/requester-common": "4.13.1" } }, "@algolia/transporter": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.13.1.tgz", "integrity": "sha512-pICnNQN7TtrcYJqqPEXByV8rJ8ZRU2hCiIKLTLRyNpghtQG3VAFk6fVtdzlNfdUGZcehSKGarPIZEHlQXnKjgw==", "requires": { "@algolia/cache-common": "4.13.1", "@algolia/logger-common": "4.13.1", "@algolia/requester-common": "4.13.1" } }, "@ampproject/remapping": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", "requires": { "@jridgewell/gen-mapping": "^0.1.0", "@jridgewell/trace-mapping": "^0.3.9" } }, "@babel/code-frame": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", "requires": { "@babel/highlight": "^7.18.6" } }, "@babel/compat-data": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.18.8.tgz", "integrity": "sha512-HSmX4WZPPK3FUxYp7g2T6EyO8j96HlZJlxmKPSh6KAcqwyDrfx7hKjXpAW/0FhFfTJsR0Yt4lAjLI2coMptIHQ==" }, "@babel/core": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.18.6.tgz", "integrity": "sha512-cQbWBpxcbbs/IUredIPkHiAGULLV8iwgNRMFzvbhEXISp4f3rUUXE5+TIw6KwUWUR3DwyI6gmBRnmAtYaWehwQ==", "requires": { "@ampproject/remapping": "^2.1.0", "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.18.6", "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-module-transforms": "^7.18.6", "@babel/helpers": "^7.18.6", "@babel/parser": "^7.18.6", "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.1", "semver": "^6.3.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/generator": { "version": "7.18.7", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.18.7.tgz", "integrity": "sha512-shck+7VLlY72a2w9c3zYWuE1pwOKEiQHV7GTUbSnhyl5eu3i04t30tBY82ZRWrDfo3gkakCFtevExnxbkf2a3A==", "requires": { "@babel/types": "^7.18.7", "@jridgewell/gen-mapping": "^0.3.2", "jsesc": "^2.5.1" }, "dependencies": { "@jridgewell/gen-mapping": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", "requires": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" } } } }, "@babel/helper-annotate-as-pure": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz", "integrity": "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-builder-binary-assignment-operator-visitor": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.6.tgz", "integrity": "sha512-KT10c1oWEpmrIRYnthbzHgoOf6B+Xd6a5yhdbNtdhtG7aO1or5HViuf1TQR36xY/QprXA5nvxO6nAjhJ4y38jw==", "requires": { "@babel/helper-explode-assignable-expression": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/helper-compilation-targets": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.6.tgz", "integrity": "sha512-vFjbfhNCzqdeAtZflUFrG5YIFqGTqsctrtkZ1D/NB0mDW9TwW3GmmUepYY4G9wCET5rY5ugz4OGTcLd614IzQg==", "requires": { "@babel/compat-data": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "browserslist": "^4.20.2", "semver": "^6.3.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/helper-create-class-features-plugin": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.6.tgz", "integrity": "sha512-YfDzdnoxHGV8CzqHGyCbFvXg5QESPFkXlHtvdCkesLjjVMT2Adxe4FGUR5ChIb3DxSaXO12iIOCWoXdsUVwnqw==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-member-expression-to-functions": "^7.18.6", "@babel/helper-optimise-call-expression": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6" } }, "@babel/helper-create-regexp-features-plugin": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.18.6.tgz", "integrity": "sha512-7LcpH1wnQLGrI+4v+nPp+zUvIkF9x0ddv1Hkdue10tg3gmRnLy97DXh4STiOf1qeIInyD69Qv5kKSZzKD8B/7A==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "regexpu-core": "^5.1.0" } }, "@babel/helper-define-polyfill-provider": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz", "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==", "requires": { "@babel/helper-compilation-targets": "^7.13.0", "@babel/helper-module-imports": "^7.12.13", "@babel/helper-plugin-utils": "^7.13.0", "@babel/traverse": "^7.13.0", "debug": "^4.1.1", "lodash.debounce": "^4.0.8", "resolve": "^1.14.2", "semver": "^6.1.2" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/helper-environment-visitor": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.6.tgz", "integrity": "sha512-8n6gSfn2baOY+qlp+VSzsosjCVGFqWKmDF0cCWOybh52Dw3SEyoWR1KrhMJASjLwIEkkAufZ0xvr+SxLHSpy2Q==" }, "@babel/helper-explode-assignable-expression": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz", "integrity": "sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-function-name": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.18.6.tgz", "integrity": "sha512-0mWMxV1aC97dhjCah5U5Ua7668r5ZmSC2DLfH2EZnf9c3/dHZKiFa5pRLMH5tjSl471tY6496ZWk/kjNONBxhw==", "requires": { "@babel/template": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/helper-hoist-variables": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-member-expression-to-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.6.tgz", "integrity": "sha512-CeHxqwwipekotzPDUuJOfIMtcIHBuc7WAzLmTYWctVigqS5RktNMQ5bEwQSuGewzYnCtTWa3BARXeiLxDTv+Ng==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-module-imports": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-module-transforms": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.18.8.tgz", "integrity": "sha512-che3jvZwIcZxrwh63VfnFTUzcAM9v/lznYkkRxIBGMPt1SudOKHAEec0SIRCfiuIzTcF7VGj/CaTT6gY4eWxvA==", "requires": { "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-simple-access": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/helper-validator-identifier": "^7.18.6", "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.8", "@babel/types": "^7.18.8" } }, "@babel/helper-optimise-call-expression": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz", "integrity": "sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-plugin-utils": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.6.tgz", "integrity": "sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg==" }, "@babel/helper-remap-async-to-generator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.6.tgz", "integrity": "sha512-z5wbmV55TveUPZlCLZvxWHtrjuJd+8inFhk7DG0WW87/oJuGDcjDiu7HIvGcpf5464L6xKCg3vNkmlVVz9hwyQ==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-wrap-function": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/helper-replace-supers": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.18.6.tgz", "integrity": "sha512-fTf7zoXnUGl9gF25fXCWE26t7Tvtyn6H4hkLSYhATwJvw2uYxd3aoXplMSe0g9XbwK7bmxNes7+FGO0rB/xC0g==", "requires": { "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-member-expression-to-functions": "^7.18.6", "@babel/helper-optimise-call-expression": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/helper-simple-access": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz", "integrity": "sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-skip-transparent-expression-wrappers": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.6.tgz", "integrity": "sha512-4KoLhwGS9vGethZpAhYnMejWkX64wsnHPDwvOsKWU6Fg4+AlK2Jz3TyjQLMEPvz+1zemi/WBdkYxCD0bAfIkiw==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-split-export-declaration": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", "requires": { "@babel/types": "^7.18.6" } }, "@babel/helper-validator-identifier": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz", "integrity": "sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==" }, "@babel/helper-validator-option": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==" }, "@babel/helper-wrap-function": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.18.6.tgz", "integrity": "sha512-I5/LZfozwMNbwr/b1vhhuYD+J/mU+gfGAj5td7l5Rv9WYmH6i3Om69WGKNmlIpsVW/mF6O5bvTKbvDQZVgjqOw==", "requires": { "@babel/helper-function-name": "^7.18.6", "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/helpers": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.18.6.tgz", "integrity": "sha512-vzSiiqbQOghPngUYt/zWGvK3LAsPhz55vc9XNN0xAl2gV4ieShI2OQli5duxWHD+72PZPTKAcfcZDE1Cwc5zsQ==", "requires": { "@babel/template": "^7.18.6", "@babel/traverse": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/highlight": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", "requires": { "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, "dependencies": { "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } } } }, "@babel/parser": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.18.8.tgz", "integrity": "sha512-RSKRfYX20dyH+elbJK2uqAkVyucL+xXzhqlMD5/ZXx+dAAwpyB7HsvnHe/ZUGOF+xLr5Wx9/JoXVTj6BQE2/oA==" }, "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz", "integrity": "sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.6.tgz", "integrity": "sha512-Udgu8ZRgrBrttVz6A0EVL0SJ1z+RLbIeqsu632SA1hf0awEppD6TvdznoH+orIF8wtFFAV/Enmw9Y+9oV8TQcw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.18.6", "@babel/plugin-proposal-optional-chaining": "^7.18.6" } }, "@babel/plugin-proposal-async-generator-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.18.6.tgz", "integrity": "sha512-WAz4R9bvozx4qwf74M+sfqPMKfSqwM0phxPTR6iJIi8robgzXwkEgmeJG1gEKhm6sDqT/U9aV3lfcqybIpev8w==", "requires": { "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-remap-async-to-generator": "^7.18.6", "@babel/plugin-syntax-async-generators": "^7.8.4" } }, "@babel/plugin-proposal-class-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", "requires": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-proposal-class-static-block": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz", "integrity": "sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==", "requires": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-class-static-block": "^7.14.5" } }, "@babel/plugin-proposal-dynamic-import": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz", "integrity": "sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" } }, "@babel/plugin-proposal-export-namespace-from": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.6.tgz", "integrity": "sha512-zr/QcUlUo7GPo6+X1wC98NJADqmy5QTFWWhqeQWiki4XHafJtLl/YMGkmRB2szDD2IYJCCdBTd4ElwhId9T7Xw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" } }, "@babel/plugin-proposal-json-strings": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz", "integrity": "sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3" } }, "@babel/plugin-proposal-logical-assignment-operators": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.6.tgz", "integrity": "sha512-zMo66azZth/0tVd7gmkxOkOjs2rpHyhpcFo565PUP37hSp6hSd9uUKIfTDFMz58BwqgQKhJ9YxtM5XddjXVn+Q==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" } }, "@babel/plugin-proposal-nullish-coalescing-operator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz", "integrity": "sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" } }, "@babel/plugin-proposal-numeric-separator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz", "integrity": "sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" } }, "@babel/plugin-proposal-object-rest-spread": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.6.tgz", "integrity": "sha512-9yuM6wr4rIsKa1wlUAbZEazkCrgw2sMPEXCr4Rnwetu7cEW1NydkCWytLuYletbf8vFxdJxFhwEZqMpOx2eZyw==", "requires": { "@babel/compat-data": "^7.18.6", "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-transform-parameters": "^7.18.6" } }, "@babel/plugin-proposal-optional-catch-binding": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz", "integrity": "sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" } }, "@babel/plugin-proposal-optional-chaining": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.6.tgz", "integrity": "sha512-PatI6elL5eMzoypFAiYDpYQyMtXTn+iMhuxxQt5mAXD4fEmKorpSI3PHd+i3JXBJN3xyA6MvJv7at23HffFHwA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.18.6", "@babel/plugin-syntax-optional-chaining": "^7.8.3" } }, "@babel/plugin-proposal-private-methods": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz", "integrity": "sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==", "requires": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-proposal-private-property-in-object": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz", "integrity": "sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" } }, "@babel/plugin-proposal-unicode-property-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz", "integrity": "sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==", "requires": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-syntax-async-generators": { "version": "7.8.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-class-properties": { "version": "7.12.13", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "requires": { "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-syntax-class-static-block": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-syntax-dynamic-import": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-export-namespace-from": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "requires": { "@babel/helper-plugin-utils": "^7.8.3" } }, "@babel/plugin-syntax-import-assertions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz", "integrity": "sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-syntax-json-strings": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-jsx": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz", "integrity": "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-syntax-nullish-coalescing-operator": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-numeric-separator": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-syntax-object-rest-spread": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-optional-catch-binding": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-optional-chaining": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-private-property-in-object": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-syntax-top-level-await": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-syntax-typescript": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz", "integrity": "sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-arrow-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz", "integrity": "sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-async-to-generator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz", "integrity": "sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==", "requires": { "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-remap-async-to-generator": "^7.18.6" } }, "@babel/plugin-transform-block-scoped-functions": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz", "integrity": "sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-block-scoping": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.6.tgz", "integrity": "sha512-pRqwb91C42vs1ahSAWJkxOxU1RHWDn16XAa6ggQ72wjLlWyYeAcLvTtE0aM8ph3KNydy9CQF2nLYcjq1WysgxQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-classes": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.18.8.tgz", "integrity": "sha512-RySDoXdF6hgHSHuAW4aLGyVQdmvEX/iJtjVre52k0pxRq4hzqze+rAVP++NmNv596brBpYmaiKgTZby7ziBnVg==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-optimise-call-expression": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "globals": "^11.1.0" } }, "@babel/plugin-transform-computed-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.6.tgz", "integrity": "sha512-9repI4BhNrR0KenoR9vm3/cIc1tSBIo+u1WVjKCAynahj25O8zfbiE6JtAtHPGQSs4yZ+bA8mRasRP+qc+2R5A==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-destructuring": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.18.6.tgz", "integrity": "sha512-tgy3u6lRp17ilY8r1kP4i2+HDUwxlVqq3RTc943eAWSzGgpU1qhiKpqZ5CMyHReIYPHdo3Kg8v8edKtDqSVEyQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-dotall-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz", "integrity": "sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==", "requires": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-duplicate-keys": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.6.tgz", "integrity": "sha512-NJU26U/208+sxYszf82nmGYqVF9QN8py2HFTblPT9hbawi8+1C5a9JubODLTGFuT0qlkqVinmkwOD13s0sZktg==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-exponentiation-operator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz", "integrity": "sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==", "requires": { "@babel/helper-builder-binary-assignment-operator-visitor": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-for-of": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz", "integrity": "sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-function-name": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.6.tgz", "integrity": "sha512-kJha/Gbs5RjzIu0CxZwf5e3aTTSlhZnHMT8zPWnJMjNpLOUgqevg+PN5oMH68nMCXnfiMo4Bhgxqj59KHTlAnA==", "requires": { "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.6.tgz", "integrity": "sha512-x3HEw0cJZVDoENXOp20HlypIHfl0zMIhMVZEBVTfmqbObIpsMxMbmU5nOEO8R7LYT+z5RORKPlTI5Hj4OsO9/Q==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-member-expression-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz", "integrity": "sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-modules-amd": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz", "integrity": "sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==", "requires": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" } }, "@babel/plugin-transform-modules-commonjs": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz", "integrity": "sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==", "requires": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-simple-access": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" } }, "@babel/plugin-transform-modules-systemjs": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.18.6.tgz", "integrity": "sha512-UbPYpXxLjTw6w6yXX2BYNxF3p6QY225wcTkfQCy3OMnSlS/C3xGtwUjEzGkldb/sy6PWLiCQ3NbYfjWUTI3t4g==", "requires": { "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-identifier": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" } }, "@babel/plugin-transform-modules-umd": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz", "integrity": "sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==", "requires": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-named-capturing-groups-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.18.6.tgz", "integrity": "sha512-UmEOGF8XgaIqD74bC8g7iV3RYj8lMf0Bw7NJzvnS9qQhM4mg+1WHKotUIdjxgD2RGrgFLZZPCFPFj3P/kVDYhg==", "requires": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-new-target": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz", "integrity": "sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-object-super": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz", "integrity": "sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6" } }, "@babel/plugin-transform-parameters": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz", "integrity": "sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-property-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz", "integrity": "sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-react-constant-elements": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.14.5.tgz", "integrity": "sha512-NBqLEx1GxllIOXJInJAQbrnwwYJsV3WaMHIcOwD8rhYS0AabTWn7kHdHgPgu5RmHLU0q4DMxhAMu8ue/KampgQ==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-transform-react-display-name": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz", "integrity": "sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-react-jsx": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.18.6.tgz", "integrity": "sha512-Mz7xMPxoy9kPS/JScj6fJs03TZ/fZ1dJPlMjRAgTaxaS0fUBk8FV/A2rRgfPsVCZqALNwMexD+0Uaf5zlcKPpw==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/plugin-transform-react-jsx-development": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz", "integrity": "sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==", "requires": { "@babel/plugin-transform-react-jsx": "^7.18.6" } }, "@babel/plugin-transform-react-pure-annotations": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz", "integrity": "sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==", "requires": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-regenerator": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz", "integrity": "sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "regenerator-transform": "^0.15.0" } }, "@babel/plugin-transform-reserved-words": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz", "integrity": "sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-runtime": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.6.tgz", "integrity": "sha512-8uRHk9ZmRSnWqUgyae249EJZ94b0yAGLBIqzZzl+0iEdbno55Pmlt/32JZsHwXD9k/uZj18Aqqk35wBX4CBTXA==", "requires": { "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "babel-plugin-polyfill-corejs2": "^0.3.1", "babel-plugin-polyfill-corejs3": "^0.5.2", "babel-plugin-polyfill-regenerator": "^0.3.1", "semver": "^6.3.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/plugin-transform-shorthand-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz", "integrity": "sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-spread": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.18.6.tgz", "integrity": "sha512-ayT53rT/ENF8WWexIRg9AiV9h0aIteyWn5ptfZTZQrjk/+f3WdrJGCY4c9wcgl2+MKkKPhzbYp97FTsquZpDCw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.18.6" } }, "@babel/plugin-transform-sticky-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz", "integrity": "sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-template-literals": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.6.tgz", "integrity": "sha512-UuqlRrQmT2SWRvahW46cGSany0uTlcj8NYOS5sRGYi8FxPYPoLd5DDmMd32ZXEj2Jq+06uGVQKHxa/hJx2EzKw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-typeof-symbol": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.6.tgz", "integrity": "sha512-7m71iS/QhsPk85xSjFPovHPcH3H9qeyzsujhTc+vcdnsXavoWYJ74zx0lP5RhpC5+iDnVLO+PPMHzC11qels1g==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-typescript": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.18.8.tgz", "integrity": "sha512-p2xM8HI83UObjsZGofMV/EdYjamsDm6MoN3hXPYIT0+gxIoopE+B7rPYKAxfrz9K9PK7JafTTjqYC6qipLExYA==", "requires": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-typescript": "^7.18.6" } }, "@babel/plugin-transform-unicode-escapes": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.6.tgz", "integrity": "sha512-XNRwQUXYMP7VLuy54cr/KS/WeL3AZeORhrmeZ7iewgu+X2eBqmpaLI/hzqr9ZxCeUoq0ASK4GUzSM0BDhZkLFw==", "requires": { "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-unicode-regex": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz", "integrity": "sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==", "requires": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/preset-env": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.18.6.tgz", "integrity": "sha512-WrthhuIIYKrEFAwttYzgRNQ5hULGmwTj+D6l7Zdfsv5M7IWV/OZbUfbeL++Qrzx1nVJwWROIFhCHRYQV4xbPNw==", "requires": { "@babel/compat-data": "^7.18.6", "@babel/helper-compilation-targets": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.18.6", "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.18.6", "@babel/plugin-proposal-async-generator-functions": "^7.18.6", "@babel/plugin-proposal-class-properties": "^7.18.6", "@babel/plugin-proposal-class-static-block": "^7.18.6", "@babel/plugin-proposal-dynamic-import": "^7.18.6", "@babel/plugin-proposal-export-namespace-from": "^7.18.6", "@babel/plugin-proposal-json-strings": "^7.18.6", "@babel/plugin-proposal-logical-assignment-operators": "^7.18.6", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.18.6", "@babel/plugin-proposal-numeric-separator": "^7.18.6", "@babel/plugin-proposal-object-rest-spread": "^7.18.6", "@babel/plugin-proposal-optional-catch-binding": "^7.18.6", "@babel/plugin-proposal-optional-chaining": "^7.18.6", "@babel/plugin-proposal-private-methods": "^7.18.6", "@babel/plugin-proposal-private-property-in-object": "^7.18.6", "@babel/plugin-proposal-unicode-property-regex": "^7.18.6", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", "@babel/plugin-syntax-import-assertions": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", "@babel/plugin-syntax-numeric-separator": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", "@babel/plugin-transform-arrow-functions": "^7.18.6", "@babel/plugin-transform-async-to-generator": "^7.18.6", "@babel/plugin-transform-block-scoped-functions": "^7.18.6", "@babel/plugin-transform-block-scoping": "^7.18.6", "@babel/plugin-transform-classes": "^7.18.6", "@babel/plugin-transform-computed-properties": "^7.18.6", "@babel/plugin-transform-destructuring": "^7.18.6", "@babel/plugin-transform-dotall-regex": "^7.18.6", "@babel/plugin-transform-duplicate-keys": "^7.18.6", "@babel/plugin-transform-exponentiation-operator": "^7.18.6", "@babel/plugin-transform-for-of": "^7.18.6", "@babel/plugin-transform-function-name": "^7.18.6", "@babel/plugin-transform-literals": "^7.18.6", "@babel/plugin-transform-member-expression-literals": "^7.18.6", "@babel/plugin-transform-modules-amd": "^7.18.6", "@babel/plugin-transform-modules-commonjs": "^7.18.6", "@babel/plugin-transform-modules-systemjs": "^7.18.6", "@babel/plugin-transform-modules-umd": "^7.18.6", "@babel/plugin-transform-named-capturing-groups-regex": "^7.18.6", "@babel/plugin-transform-new-target": "^7.18.6", "@babel/plugin-transform-object-super": "^7.18.6", "@babel/plugin-transform-parameters": "^7.18.6", "@babel/plugin-transform-property-literals": "^7.18.6", "@babel/plugin-transform-regenerator": "^7.18.6", "@babel/plugin-transform-reserved-words": "^7.18.6", "@babel/plugin-transform-shorthand-properties": "^7.18.6", "@babel/plugin-transform-spread": "^7.18.6", "@babel/plugin-transform-sticky-regex": "^7.18.6", "@babel/plugin-transform-template-literals": "^7.18.6", "@babel/plugin-transform-typeof-symbol": "^7.18.6", "@babel/plugin-transform-unicode-escapes": "^7.18.6", "@babel/plugin-transform-unicode-regex": "^7.18.6", "@babel/preset-modules": "^0.1.5", "@babel/types": "^7.18.6", "babel-plugin-polyfill-corejs2": "^0.3.1", "babel-plugin-polyfill-corejs3": "^0.5.2", "babel-plugin-polyfill-regenerator": "^0.3.1", "core-js-compat": "^3.22.1", "semver": "^6.3.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/preset-modules": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", "requires": { "@babel/helper-plugin-utils": "^7.0.0", "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", "@babel/plugin-transform-dotall-regex": "^7.4.4", "@babel/types": "^7.4.4", "esutils": "^2.0.2" } }, "@babel/preset-react": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz", "integrity": "sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-transform-react-display-name": "^7.18.6", "@babel/plugin-transform-react-jsx": "^7.18.6", "@babel/plugin-transform-react-jsx-development": "^7.18.6", "@babel/plugin-transform-react-pure-annotations": "^7.18.6" } }, "@babel/preset-typescript": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz", "integrity": "sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==", "requires": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-transform-typescript": "^7.18.6" } }, "@babel/runtime": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.18.6.tgz", "integrity": "sha512-t9wi7/AW6XtKahAe20Yw0/mMljKq0B1r2fPdvaAdV/KPDZewFXdaaa6K7lxmZBZ8FBNpCiAT6iHPmd6QO9bKfQ==", "requires": { "regenerator-runtime": "^0.13.4" } }, "@babel/runtime-corejs3": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.18.6.tgz", "integrity": "sha512-cOu5wH2JFBgMjje+a+fz2JNIWU4GzYpl05oSob3UDvBEh6EuIn+TXFHMmBbhSb+k/4HMzgKCQfEEDArAWNF9Cw==", "requires": { "core-js-pure": "^3.20.2", "regenerator-runtime": "^0.13.4" } }, "@babel/template": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.6.tgz", "integrity": "sha512-JoDWzPe+wgBsTTgdnIma3iHNFC7YVJoPssVBDjiHfNlyt4YcunDtcDOUmfVDfCK5MfdsaIoX9PkijPhjH3nYUw==", "requires": { "@babel/code-frame": "^7.18.6", "@babel/parser": "^7.18.6", "@babel/types": "^7.18.6" } }, "@babel/traverse": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.18.8.tgz", "integrity": "sha512-UNg/AcSySJYR/+mIcJQDCv00T+AqRO7j/ZEJLzpaYtgM48rMg5MnkJgyNqkzo88+p4tfRvZJCEiwwfG6h4jkRg==", "requires": { "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.18.7", "@babel/helper-environment-visitor": "^7.18.6", "@babel/helper-function-name": "^7.18.6", "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/parser": "^7.18.8", "@babel/types": "^7.18.8", "debug": "^4.1.0", "globals": "^11.1.0" } }, "@babel/types": { "version": "7.18.8", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.18.8.tgz", "integrity": "sha512-qwpdsmraq0aJ3osLJRApsc2ouSJCdnMeZwB0DhbtHAtRpZNZCdlbRnHIgcRKzdE1g0iOGg644fzjOBcdOz9cPw==", "requires": { "@babel/helper-validator-identifier": "^7.18.6", "to-fast-properties": "^2.0.0" } }, "@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", "optional": true }, "@docsearch/css": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.1.1.tgz", "integrity": "sha512-utLgg7E1agqQeqCJn05DWC7XXMk4tMUUnL7MZupcknRu2OzGN13qwey2qA/0NAKkVBGugiWtON0+rlU0QIPojg==" }, "@docsearch/react": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.1.1.tgz", "integrity": "sha512-cfoql4qvtsVRqBMYxhlGNpvyy/KlCoPqjIsJSZYqYf9AplZncKjLBTcwBu6RXFMVCe30cIFljniI4OjqAU67pQ==", "requires": { "@algolia/autocomplete-core": "1.7.1", "@algolia/autocomplete-preset-algolia": "1.7.1", "@docsearch/css": "3.1.1", "algoliasearch": "^4.0.0" } }, "@docusaurus/core": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-2.0.0-rc.1.tgz", "integrity": "sha512-b9FX0Z+EddfQ6wAiNh+Wx4fysKfcvEcWJrZ5USROn3C+EVU5P4luaa8mwWK//O+hTwD9ur7/A44IZ/tWCTAoLQ==", "requires": { "@babel/core": "^7.18.6", "@babel/generator": "^7.18.7", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-transform-runtime": "^7.18.6", "@babel/preset-env": "^7.18.6", "@babel/preset-react": "^7.18.6", "@babel/preset-typescript": "^7.18.6", "@babel/runtime": "^7.18.6", "@babel/runtime-corejs3": "^7.18.6", "@babel/traverse": "^7.18.8", "@docusaurus/cssnano-preset": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/react-loadable": "5.5.2", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "@slorber/static-site-generator-webpack-plugin": "^4.0.7", "@svgr/webpack": "^6.2.1", "autoprefixer": "^10.4.7", "babel-loader": "^8.2.5", "babel-plugin-dynamic-import-node": "^2.3.3", "boxen": "^6.2.1", "chalk": "^4.1.2", "chokidar": "^3.5.3", "clean-css": "^5.3.0", "cli-table3": "^0.6.2", "combine-promises": "^1.1.0", "commander": "^5.1.0", "copy-webpack-plugin": "^11.0.0", "core-js": "^3.23.3", "css-loader": "^6.7.1", "css-minimizer-webpack-plugin": "^4.0.0", "cssnano": "^5.1.12", "del": "^6.1.1", "detect-port": "^1.3.0", "escape-html": "^1.0.3", "eta": "^1.12.3", "file-loader": "^6.2.0", "fs-extra": "^10.1.0", "html-minifier-terser": "^6.1.0", "html-tags": "^3.2.0", "html-webpack-plugin": "^5.5.0", "import-fresh": "^3.3.0", "leven": "^3.1.0", "lodash": "^4.17.21", "mini-css-extract-plugin": "^2.6.1", "postcss": "^8.4.14", "postcss-loader": "^7.0.0", "prompts": "^2.4.2", "react-dev-utils": "^12.0.1", "react-helmet-async": "^1.3.0", "react-loadable": "npm:@docusaurus/react-loadable@5.5.2", "react-loadable-ssr-addon-v5-slorber": "^1.0.1", "react-router": "^5.3.3", "react-router-config": "^5.1.1", "react-router-dom": "^5.3.3", "rtl-detect": "^1.0.4", "semver": "^7.3.7", "serve-handler": "^6.1.3", "shelljs": "^0.8.5", "terser-webpack-plugin": "^5.3.3", "tslib": "^2.4.0", "update-notifier": "^5.1.0", "url-loader": "^4.1.1", "wait-on": "^6.0.1", "webpack": "^5.73.0", "webpack-bundle-analyzer": "^4.5.0", "webpack-dev-server": "^4.9.3", "webpack-merge": "^5.8.0", "webpackbar": "^5.0.2" }, "dependencies": { "@svgr/babel-plugin-add-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.0.0.tgz", "integrity": "sha512-MdPdhdWLtQsjd29Wa4pABdhWbaRMACdM1h31BY+c6FghTZqNGT7pEYdBoaGeKtdTOBC/XNFQaKVj+r/Ei2ryWA==", "requires": {} }, "@svgr/babel-plugin-remove-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.0.0.tgz", "integrity": "sha512-aVdtfx9jlaaxc3unA6l+M9YRnKIZjOhQPthLKqmTXC8UVkBLDRGwPKo+r8n3VZN8B34+yVajzPTZ+ptTSuZZCw==", "requires": {} }, "@svgr/babel-plugin-remove-jsx-empty-expression": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.0.0.tgz", "integrity": "sha512-Ccj42ApsePD451AZJJf1QzTD1B/BOU392URJTeXFxSK709i0KUsGtbwyiqsKu7vsYxpTM0IA5clAKDyf9RCZyA==", "requires": {} }, "@svgr/babel-plugin-replace-jsx-attribute-value": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.0.0.tgz", "integrity": "sha512-88V26WGyt1Sfd1emBYmBJRWMmgarrExpKNVmI9vVozha4kqs6FzQJ/Kp5+EYli1apgX44518/0+t9+NU36lThQ==", "requires": {} }, "@svgr/babel-plugin-svg-dynamic-title": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.0.0.tgz", "integrity": "sha512-F7YXNLfGze+xv0KMQxrl2vkNbI9kzT9oDK55/kUuymh1ACyXkMV+VZWX1zEhSTfEKh7VkHVZGmVtHg8eTZ6PRg==", "requires": {} }, "@svgr/babel-plugin-svg-em-dimensions": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.0.0.tgz", "integrity": "sha512-+rghFXxdIqJNLQK08kwPBD3Z22/0b2tEZ9lKiL/yTfuyj1wW8HUXu4bo/XkogATIYuXSghVQOOCwURXzHGKyZA==", "requires": {} }, "@svgr/babel-plugin-transform-react-native-svg": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.0.0.tgz", "integrity": "sha512-VaphyHZ+xIKv5v0K0HCzyfAaLhPGJXSk2HkpYfXIOKb7DjLBv0soHDxNv6X0vr2titsxE7klb++u7iOf7TSrFQ==", "requires": {} }, "@svgr/babel-plugin-transform-svg-component": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.2.0.tgz", "integrity": "sha512-bhYIpsORb++wpsp91fymbFkf09Z/YEKR0DnFjxvN+8JHeCUD2unnh18jIMKnDJTWtvpTaGYPXELVe4OOzFI0xg==", "requires": {} }, "@svgr/babel-preset": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.2.0.tgz", "integrity": "sha512-4WQNY0J71JIaL03DRn0vLiz87JXx0b9dYm2aA8XHlQJQoixMl4r/soYHm8dsaJZ3jWtkCiOYy48dp9izvXhDkQ==", "requires": { "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", "@svgr/babel-plugin-transform-svg-component": "^6.2.0" } }, "@svgr/core": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/core/-/core-6.2.1.tgz", "integrity": "sha512-NWufjGI2WUyrg46mKuySfviEJ6IxHUOm/8a3Ph38VCWSp+83HBraCQrpEM3F3dB6LBs5x8OElS8h3C0oOJaJAA==", "requires": { "@svgr/plugin-jsx": "^6.2.1", "camelcase": "^6.2.0", "cosmiconfig": "^7.0.1" } }, "@svgr/hast-util-to-babel-ast": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.2.1.tgz", "integrity": "sha512-pt7MMkQFDlWJVy9ULJ1h+hZBDGFfSCwlBNW1HkLnVi7jUhyEXUaGYWi1x6bM2IXuAR9l265khBT4Av4lPmaNLQ==", "requires": { "@babel/types": "^7.15.6", "entities": "^3.0.1" } }, "@svgr/plugin-jsx": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.2.1.tgz", "integrity": "sha512-u+MpjTsLaKo6r3pHeeSVsh9hmGRag2L7VzApWIaS8imNguqoUwDq/u6U/NDmYs/KAsrmtBjOEaAAPbwNGXXp1g==", "requires": { "@babel/core": "^7.15.5", "@svgr/babel-preset": "^6.2.0", "@svgr/hast-util-to-babel-ast": "^6.2.1", "svg-parser": "^2.0.2" } }, "@svgr/plugin-svgo": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.2.0.tgz", "integrity": "sha512-oDdMQONKOJEbuKwuy4Np6VdV6qoaLLvoY86hjvQEgU82Vx1MSWRyYms6Sl0f+NtqxLI/rDVufATbP/ev996k3Q==", "requires": { "cosmiconfig": "^7.0.1", "deepmerge": "^4.2.2", "svgo": "^2.5.0" } }, "@svgr/webpack": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.2.1.tgz", "integrity": "sha512-h09ngMNd13hnePwgXa+Y5CgOjzlCvfWLHg+MBnydEedAnuLRzUHUJmGS3o2OsrhxTOOqEsPOFt5v/f6C5Qulcw==", "requires": { "@babel/core": "^7.15.5", "@babel/plugin-transform-react-constant-elements": "^7.14.5", "@babel/preset-env": "^7.15.6", "@babel/preset-react": "^7.14.5", "@babel/preset-typescript": "^7.15.0", "@svgr/core": "^6.2.1", "@svgr/plugin-jsx": "^6.2.1", "@svgr/plugin-svgo": "^6.2.0" } }, "ansi-styles": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.1.0.tgz", "integrity": "sha512-VbqNsoz55SYGczauuup0MFUyXNQviSpFTj1RQtFzmQLk18qbVSpTFFGMT293rmDaQuKCT6InmbuEyUne4mTuxQ==" }, "boxen": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz", "integrity": "sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==", "requires": { "ansi-align": "^3.0.1", "camelcase": "^6.2.0", "chalk": "^4.1.2", "cli-boxes": "^3.0.0", "string-width": "^5.0.1", "type-fest": "^2.5.0", "widest-line": "^4.0.1", "wrap-ansi": "^8.0.1" } }, "cli-boxes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==" }, "entities": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==" }, "react-loadable": { "version": "npm:@docusaurus/react-loadable@5.5.2", "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "requires": { "@types/react": "*", "prop-types": "^15.6.2" } }, "semver": { "version": "7.3.7", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", "requires": { "lru-cache": "^6.0.0" } }, "string-width": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "requires": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "strip-ansi": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", "requires": { "ansi-regex": "^6.0.1" } }, "type-fest": { "version": "2.12.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.12.2.tgz", "integrity": "sha512-qt6ylCGpLjZ7AaODxbpyBZSs9fCI9SkL3Z9q2oxMBQhs/uyY+VD8jHA8ULCGmWQJlBgqvO3EJeAngOHD8zQCrQ==" }, "widest-line": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", "requires": { "string-width": "^5.0.1" } }, "wrap-ansi": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.0.1.tgz", "integrity": "sha512-QFF+ufAqhoYHvoHdajT/Po7KoXVBPXS2bgjIam5isfWJPfIOnQZ50JtUiVvCv/sjgacf3yRrt2ZKUZ/V4itN4g==", "requires": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } } } }, "@docusaurus/cssnano-preset": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-rc.1.tgz", "integrity": "sha512-9/KmQvF+eTlMqUqG6UcXbRgxbGed/8bQInXuKEs+95/jI6jO/3xSzuRwuHHHP0naUvSVWjnNI9jngPrQerXE5w==", "requires": { "cssnano-preset-advanced": "^5.3.8", "postcss": "^8.4.14", "postcss-sort-media-queries": "^4.2.1", "tslib": "^2.4.0" } }, "@docusaurus/logger": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.0-rc.1.tgz", "integrity": "sha512-daa3g+SXuO9K60PVMiSUmDEK9Vro+Ed7i7uF8CH6QQJLcNZy/zJc0Xz62eH7ip1x77fmeb6Rg4Us1TqTFc9AbQ==", "requires": { "chalk": "^4.1.2", "tslib": "^2.4.0" } }, "@docusaurus/mdx-loader": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-rc.1.tgz", "integrity": "sha512-8Fg0c/ceu39knmr7w0dutm7gq3YxKYCqWVS2cB/cPATzChCCNH/AGLfBT6sz/Z4tjVXE+NyREq2pfOFvkhjVXg==", "requires": { "@babel/parser": "^7.18.8", "@babel/traverse": "^7.18.8", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@mdx-js/mdx": "^1.6.22", "escape-html": "^1.0.3", "file-loader": "^6.2.0", "fs-extra": "^10.1.0", "image-size": "^1.0.1", "mdast-util-to-string": "^2.0.0", "remark-emoji": "^2.2.0", "stringify-object": "^3.3.0", "tslib": "^2.4.0", "unified": "^9.2.2", "unist-util-visit": "^2.0.3", "url-loader": "^4.1.1", "webpack": "^5.73.0" } }, "@docusaurus/module-type-aliases": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.0.0-rc.1.tgz", "integrity": "sha512-la7D8ggFP8I5nOp/Epl6NqTeDWcbofPVMOaVisRxQbx5iuF9Al+AITbaDgm4CXpFLJACsqhsXD5W4BnKX8ZxfA==", "requires": { "@docusaurus/react-loadable": "5.5.2", "@docusaurus/types": "2.0.0-rc.1", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", "@types/react-router-dom": "*", "react-helmet-async": "*", "react-loadable": "npm:@docusaurus/react-loadable@5.5.2" }, "dependencies": { "react-loadable": { "version": "npm:@docusaurus/react-loadable@5.5.2", "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "requires": { "@types/react": "*", "prop-types": "^15.6.2" } } } }, "@docusaurus/plugin-client-redirects": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-client-redirects/-/plugin-client-redirects-2.0.0-rc.1.tgz", "integrity": "sha512-uGiK7kzQeJ+gChzIgazKMlHEonOwlmK6NEJvr44aWS6DbauVXOs/aolXZCHx8ZdKPETOpZEzSRYjU5e+QIN+HA==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "eta": "^1.12.3", "fs-extra": "^10.1.0", "lodash": "^4.17.21", "tslib": "^2.4.0" } }, "@docusaurus/plugin-content-blog": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-rc.1.tgz", "integrity": "sha512-BVVrAGZujpjS/0rarY2o24rlylRRh2NZuM65kg0JNkkViF79SeEHsepog7IuHyoqGWPm1N/I7LpEp7k+gowZzQ==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "cheerio": "^1.0.0-rc.12", "feed": "^4.2.2", "fs-extra": "^10.1.0", "lodash": "^4.17.21", "reading-time": "^1.5.0", "tslib": "^2.4.0", "unist-util-visit": "^2.0.3", "utility-types": "^3.10.0", "webpack": "^5.73.0" } }, "@docusaurus/plugin-content-docs": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-rc.1.tgz", "integrity": "sha512-Yk5Hu6uaw3tRplzJnbDygwRhmZ3PCzEXD4SJpBA6cPC73ylfqOEh6qhiU+BWhMTtDXNhY+athk5Kycfk3DW1aQ==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/module-type-aliases": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "@types/react-router-config": "^5.0.6", "combine-promises": "^1.1.0", "fs-extra": "^10.1.0", "import-fresh": "^3.3.0", "js-yaml": "^4.1.0", "lodash": "^4.17.21", "tslib": "^2.4.0", "utility-types": "^3.10.0", "webpack": "^5.73.0" }, "dependencies": { "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "requires": { "argparse": "^2.0.1" } } } }, "@docusaurus/plugin-content-pages": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-rc.1.tgz", "integrity": "sha512-FdO79WC5hfWDQu3/CTFLRQzTNc0e5n+HNzavm2MNkSzGV08BFJ6RAkbPbtra5CWef+6iXZav6D/tzv2jDPvLzA==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "fs-extra": "^10.1.0", "tslib": "^2.4.0", "webpack": "^5.73.0" } }, "@docusaurus/plugin-debug": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-rc.1.tgz", "integrity": "sha512-aOsyYrPMbnsyqHwsVZ+0frrMRtnYqm4eaJpG4sC/6LYAJ07IDRQ9j3GOku2dKr5GsFK1Vx7VlE6ZLwe0MaGstg==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "fs-extra": "^10.1.0", "react-json-view": "^1.21.3", "tslib": "^2.4.0" } }, "@docusaurus/plugin-google-analytics": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-rc.1.tgz", "integrity": "sha512-f+G8z5OJWfg5QqWDLIdcN2SDoK5J5Gg8HMrqCI6Pfl+rxPb5I1niA+/UkAM+kMCpnekvhSt5AWz2fgkRenkPLA==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "tslib": "^2.4.0" } }, "@docusaurus/plugin-google-gtag": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-rc.1.tgz", "integrity": "sha512-yE1Et9hhhX9qMRnMJzpNq0854qIYiSEc2dZaXNk537HN7Q0rKkr/YONUHz2iqNYwPX2hGOY4LdpTxlMP88uVhA==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "tslib": "^2.4.0" } }, "@docusaurus/plugin-sitemap": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-rc.1.tgz", "integrity": "sha512-5JmbNpssUF03odFM4ArvIsrO9bv7HnAJ0VtefXhh0WBpaFs8NgI3rTkCTFimvtRQjDR9U2bh23fXz2vjQQz6oA==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "fs-extra": "^10.1.0", "sitemap": "^7.1.1", "tslib": "^2.4.0" } }, "@docusaurus/preset-classic": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.0.0-rc.1.tgz", "integrity": "sha512-5jjTVZkhArjyoNHwCI9x4PSG0zPmBJILjZLVrxPcHpm/K0ltkYcp6J3GxYpf5EbMuOh5+yCWM63cSshGcNOo3Q==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/plugin-content-blog": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/plugin-content-pages": "2.0.0-rc.1", "@docusaurus/plugin-debug": "2.0.0-rc.1", "@docusaurus/plugin-google-analytics": "2.0.0-rc.1", "@docusaurus/plugin-google-gtag": "2.0.0-rc.1", "@docusaurus/plugin-sitemap": "2.0.0-rc.1", "@docusaurus/theme-classic": "2.0.0-rc.1", "@docusaurus/theme-common": "2.0.0-rc.1", "@docusaurus/theme-search-algolia": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1" } }, "@docusaurus/react-loadable": { "version": "5.5.2", "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "requires": { "@types/react": "*", "prop-types": "^15.6.2" } }, "@docusaurus/theme-classic": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.0.0-rc.1.tgz", "integrity": "sha512-qNiz7ieeq3AC+V8TbW6S63pWLJph1CbzWDDPTqxDLHgA8VQaNaSmJM8S92pH+yKALRb9u14ogjjYYc75Nj2JmQ==", "requires": { "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/module-type-aliases": "2.0.0-rc.1", "@docusaurus/plugin-content-blog": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/plugin-content-pages": "2.0.0-rc.1", "@docusaurus/theme-common": "2.0.0-rc.1", "@docusaurus/theme-translations": "2.0.0-rc.1", "@docusaurus/types": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-common": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "@mdx-js/react": "^1.6.22", "clsx": "^1.2.1", "copy-text-to-clipboard": "^3.0.1", "infima": "0.2.0-alpha.42", "lodash": "^4.17.21", "nprogress": "^0.2.0", "postcss": "^8.4.14", "prism-react-renderer": "^1.3.5", "prismjs": "^1.28.0", "react-router-dom": "^5.3.3", "rtlcss": "^3.5.0", "tslib": "^2.4.0", "utility-types": "^3.10.0" } }, "@docusaurus/theme-common": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.0.0-rc.1.tgz", "integrity": "sha512-1r9ZLKD9SeoCYVzWzcdR79Dia4ANlrlRjNl6uzETOEybjK6FF7yEa9Yra8EJcOCbi3coyYz5xFh/r1YHFTFHug==", "requires": { "@docusaurus/mdx-loader": "2.0.0-rc.1", "@docusaurus/module-type-aliases": "2.0.0-rc.1", "@docusaurus/plugin-content-blog": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/plugin-content-pages": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", "clsx": "^1.2.1", "parse-numeric-range": "^1.3.0", "prism-react-renderer": "^1.3.5", "tslib": "^2.4.0", "utility-types": "^3.10.0" } }, "@docusaurus/theme-search-algolia": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-rc.1.tgz", "integrity": "sha512-H5yq6V/B4qo6GZrDKMbeSpk3T9e9K2MliDzLonRu0w3QHW9orVGe0c/lZvRbGlDZjnsOo7XGddhXXIDWGwnpaA==", "requires": { "@docsearch/react": "^3.1.1", "@docusaurus/core": "2.0.0-rc.1", "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/plugin-content-docs": "2.0.0-rc.1", "@docusaurus/theme-common": "2.0.0-rc.1", "@docusaurus/theme-translations": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "@docusaurus/utils-validation": "2.0.0-rc.1", "algoliasearch": "^4.13.1", "algoliasearch-helper": "^3.10.0", "clsx": "^1.2.1", "eta": "^1.12.3", "fs-extra": "^10.1.0", "lodash": "^4.17.21", "tslib": "^2.4.0", "utility-types": "^3.10.0" } }, "@docusaurus/theme-translations": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.0.0-rc.1.tgz", "integrity": "sha512-JLhNdlnbQhxVQzOnLyiCaTzKFa1lpVrM3nCrkGQKscoG2rY6ARGYMgMN2DkoH6hm7TflQ8+PE1S5MzzASeLs4Q==", "requires": { "fs-extra": "^10.1.0", "tslib": "^2.4.0" } }, "@docusaurus/types": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-2.0.0-rc.1.tgz", "integrity": "sha512-wX25FOZa/aKnCGA5ljWPaDpMW3TuTbs0BtjQ8WTC557p8zDvuz4r+g2/FPHsgWE0TKwUMf4usQU1m3XpJLPN+g==", "requires": { "@types/history": "^4.7.11", "@types/react": "*", "commander": "^5.1.0", "joi": "^17.6.0", "react-helmet-async": "^1.3.0", "utility-types": "^3.10.0", "webpack": "^5.73.0", "webpack-merge": "^5.8.0" } }, "@docusaurus/utils": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.0-rc.1.tgz", "integrity": "sha512-ym9I1OwIYbKs1LGaUajaA/vDG8VweJj/6YoZjHp+eDQHhTRIrHXiYoGDqorafRhftKwnA1EnyomuXpNd9bq8Gg==", "requires": { "@docusaurus/logger": "2.0.0-rc.1", "@svgr/webpack": "^6.2.1", "file-loader": "^6.2.0", "fs-extra": "^10.1.0", "github-slugger": "^1.4.0", "globby": "^11.1.0", "gray-matter": "^4.0.3", "js-yaml": "^4.1.0", "lodash": "^4.17.21", "micromatch": "^4.0.5", "resolve-pathname": "^3.0.0", "shelljs": "^0.8.5", "tslib": "^2.4.0", "url-loader": "^4.1.1", "webpack": "^5.73.0" }, "dependencies": { "@svgr/babel-plugin-add-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.0.0.tgz", "integrity": "sha512-MdPdhdWLtQsjd29Wa4pABdhWbaRMACdM1h31BY+c6FghTZqNGT7pEYdBoaGeKtdTOBC/XNFQaKVj+r/Ei2ryWA==", "requires": {} }, "@svgr/babel-plugin-remove-jsx-attribute": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.0.0.tgz", "integrity": "sha512-aVdtfx9jlaaxc3unA6l+M9YRnKIZjOhQPthLKqmTXC8UVkBLDRGwPKo+r8n3VZN8B34+yVajzPTZ+ptTSuZZCw==", "requires": {} }, "@svgr/babel-plugin-remove-jsx-empty-expression": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.0.0.tgz", "integrity": "sha512-Ccj42ApsePD451AZJJf1QzTD1B/BOU392URJTeXFxSK709i0KUsGtbwyiqsKu7vsYxpTM0IA5clAKDyf9RCZyA==", "requires": {} }, "@svgr/babel-plugin-replace-jsx-attribute-value": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.0.0.tgz", "integrity": "sha512-88V26WGyt1Sfd1emBYmBJRWMmgarrExpKNVmI9vVozha4kqs6FzQJ/Kp5+EYli1apgX44518/0+t9+NU36lThQ==", "requires": {} }, "@svgr/babel-plugin-svg-dynamic-title": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.0.0.tgz", "integrity": "sha512-F7YXNLfGze+xv0KMQxrl2vkNbI9kzT9oDK55/kUuymh1ACyXkMV+VZWX1zEhSTfEKh7VkHVZGmVtHg8eTZ6PRg==", "requires": {} }, "@svgr/babel-plugin-svg-em-dimensions": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.0.0.tgz", "integrity": "sha512-+rghFXxdIqJNLQK08kwPBD3Z22/0b2tEZ9lKiL/yTfuyj1wW8HUXu4bo/XkogATIYuXSghVQOOCwURXzHGKyZA==", "requires": {} }, "@svgr/babel-plugin-transform-react-native-svg": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.0.0.tgz", "integrity": "sha512-VaphyHZ+xIKv5v0K0HCzyfAaLhPGJXSk2HkpYfXIOKb7DjLBv0soHDxNv6X0vr2titsxE7klb++u7iOf7TSrFQ==", "requires": {} }, "@svgr/babel-plugin-transform-svg-component": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.2.0.tgz", "integrity": "sha512-bhYIpsORb++wpsp91fymbFkf09Z/YEKR0DnFjxvN+8JHeCUD2unnh18jIMKnDJTWtvpTaGYPXELVe4OOzFI0xg==", "requires": {} }, "@svgr/babel-preset": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.2.0.tgz", "integrity": "sha512-4WQNY0J71JIaL03DRn0vLiz87JXx0b9dYm2aA8XHlQJQoixMl4r/soYHm8dsaJZ3jWtkCiOYy48dp9izvXhDkQ==", "requires": { "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", "@svgr/babel-plugin-transform-svg-component": "^6.2.0" } }, "@svgr/core": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/core/-/core-6.2.1.tgz", "integrity": "sha512-NWufjGI2WUyrg46mKuySfviEJ6IxHUOm/8a3Ph38VCWSp+83HBraCQrpEM3F3dB6LBs5x8OElS8h3C0oOJaJAA==", "requires": { "@svgr/plugin-jsx": "^6.2.1", "camelcase": "^6.2.0", "cosmiconfig": "^7.0.1" } }, "@svgr/hast-util-to-babel-ast": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.2.1.tgz", "integrity": "sha512-pt7MMkQFDlWJVy9ULJ1h+hZBDGFfSCwlBNW1HkLnVi7jUhyEXUaGYWi1x6bM2IXuAR9l265khBT4Av4lPmaNLQ==", "requires": { "@babel/types": "^7.15.6", "entities": "^3.0.1" } }, "@svgr/plugin-jsx": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.2.1.tgz", "integrity": "sha512-u+MpjTsLaKo6r3pHeeSVsh9hmGRag2L7VzApWIaS8imNguqoUwDq/u6U/NDmYs/KAsrmtBjOEaAAPbwNGXXp1g==", "requires": { "@babel/core": "^7.15.5", "@svgr/babel-preset": "^6.2.0", "@svgr/hast-util-to-babel-ast": "^6.2.1", "svg-parser": "^2.0.2" } }, "@svgr/plugin-svgo": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.2.0.tgz", "integrity": "sha512-oDdMQONKOJEbuKwuy4Np6VdV6qoaLLvoY86hjvQEgU82Vx1MSWRyYms6Sl0f+NtqxLI/rDVufATbP/ev996k3Q==", "requires": { "cosmiconfig": "^7.0.1", "deepmerge": "^4.2.2", "svgo": "^2.5.0" } }, "@svgr/webpack": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.2.1.tgz", "integrity": "sha512-h09ngMNd13hnePwgXa+Y5CgOjzlCvfWLHg+MBnydEedAnuLRzUHUJmGS3o2OsrhxTOOqEsPOFt5v/f6C5Qulcw==", "requires": { "@babel/core": "^7.15.5", "@babel/plugin-transform-react-constant-elements": "^7.14.5", "@babel/preset-env": "^7.15.6", "@babel/preset-react": "^7.14.5", "@babel/preset-typescript": "^7.15.0", "@svgr/core": "^6.2.1", "@svgr/plugin-jsx": "^6.2.1", "@svgr/plugin-svgo": "^6.2.0" } }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "entities": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==" }, "js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "requires": { "argparse": "^2.0.1" } } } }, "@docusaurus/utils-common": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.0.0-rc.1.tgz", "integrity": "sha512-+iZICpeFPZJ9oGJXuG92WTWee6WRnVx5BdzlcfuKf/f5KQX8PvwXR2tDME78FGGhShB8zr+vjuNEXuLvXT7j2A==", "requires": { "tslib": "^2.4.0" } }, "@docusaurus/utils-validation": { "version": "2.0.0-rc.1", "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.0.0-rc.1.tgz", "integrity": "sha512-lj36gm9Ksu4tt/EUeLDWoMbXe3sfBxeIPIUUdqYcBYkF/rpQkh+uL/dncjNGiw6uvBOqXhOfsFVP045HtgShVw==", "requires": { "@docusaurus/logger": "2.0.0-rc.1", "@docusaurus/utils": "2.0.0-rc.1", "joi": "^17.6.0", "js-yaml": "^4.1.0", "tslib": "^2.4.0" }, "dependencies": { "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "requires": { "argparse": "^2.0.1" } } } }, "@hapi/hoek": { "version": "9.3.0", "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==" }, "@hapi/topo": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", "requires": { "@hapi/hoek": "^9.0.0" } }, "@jridgewell/gen-mapping": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", "requires": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "@jridgewell/resolve-uri": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.6.tgz", "integrity": "sha512-R7xHtBSNm+9SyvpJkdQl+qrM3Hm2fea3Ef197M3mUug+v+yR+Rhfbs7PBtcBUVnIWJ4JcAdjvij+c8hXS9p5aw==" }, "@jridgewell/set-array": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.0.tgz", "integrity": "sha512-SfJxIxNVYLTsKwzB3MoOQ1yxf4w/E6MdkvTgrgAt1bfxjSrLUoHMKrDOykwN14q65waezZIdqDneUIPh4/sKxg==" }, "@jridgewell/sourcemap-codec": { "version": "1.4.11", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz", "integrity": "sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==" }, "@jridgewell/trace-mapping": { "version": "0.3.9", "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", "requires": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "@leichtgewicht/ip-codec": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz", "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==" }, "@mdx-js/mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz", "integrity": "sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA==", "requires": { "@babel/core": "7.12.9", "@babel/plugin-syntax-jsx": "7.12.1", "@babel/plugin-syntax-object-rest-spread": "7.8.3", "@mdx-js/util": "1.6.22", "babel-plugin-apply-mdx-type-prop": "1.6.22", "babel-plugin-extract-import-names": "1.6.22", "camelcase-css": "2.0.1", "detab": "2.0.4", "hast-util-raw": "6.0.1", "lodash.uniq": "4.5.0", "mdast-util-to-hast": "10.0.1", "remark-footnotes": "2.0.0", "remark-mdx": "1.6.22", "remark-parse": "8.0.3", "remark-squeeze-paragraphs": "4.0.0", "style-to-object": "0.3.0", "unified": "9.2.0", "unist-builder": "2.0.3", "unist-util-visit": "2.0.3" }, "dependencies": { "@babel/core": { "version": "7.12.9", "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "requires": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", "@babel/helper-module-transforms": "^7.12.1", "@babel/helpers": "^7.12.5", "@babel/parser": "^7.12.7", "@babel/template": "^7.12.7", "@babel/traverse": "^7.12.9", "@babel/types": "^7.12.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", "json5": "^2.1.2", "lodash": "^4.17.19", "resolve": "^1.3.2", "semver": "^5.4.1", "source-map": "^0.5.0" } }, "@babel/plugin-syntax-jsx": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==" }, "unified": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", "requires": { "bail": "^1.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", "is-plain-obj": "^2.0.0", "trough": "^1.0.0", "vfile": "^4.0.0" } } } }, "@mdx-js/react": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", "requires": {} }, "@mdx-js/util": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz", "integrity": "sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==" }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "requires": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "@nodelib/fs.stat": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" }, "@nodelib/fs.walk": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "requires": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "@polka/url": { "version": "1.0.0-next.21", "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz", "integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==" }, "@sideway/address": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==", "requires": { "@hapi/hoek": "^9.0.0" } }, "@sideway/formula": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz", "integrity": "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" }, "@sideway/pinpoint": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" }, "@sindresorhus/is": { "version": "0.14.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==" }, "@slorber/static-site-generator-webpack-plugin": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz", "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==", "requires": { "eval": "^0.1.8", "p-map": "^4.0.0", "webpack-sources": "^3.2.2" } }, "@svgr/babel-plugin-add-jsx-attribute": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz", "integrity": "sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg==" }, "@svgr/babel-plugin-remove-jsx-attribute": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz", "integrity": "sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg==" }, "@svgr/babel-plugin-remove-jsx-empty-expression": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz", "integrity": "sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA==" }, "@svgr/babel-plugin-replace-jsx-attribute-value": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz", "integrity": "sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ==" }, "@svgr/babel-plugin-svg-dynamic-title": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz", "integrity": "sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg==" }, "@svgr/babel-plugin-svg-em-dimensions": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz", "integrity": "sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw==" }, "@svgr/babel-plugin-transform-react-native-svg": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz", "integrity": "sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q==" }, "@svgr/babel-plugin-transform-svg-component": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz", "integrity": "sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ==" }, "@svgr/babel-preset": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-5.5.0.tgz", "integrity": "sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig==", "requires": { "@svgr/babel-plugin-add-jsx-attribute": "^5.4.0", "@svgr/babel-plugin-remove-jsx-attribute": "^5.4.0", "@svgr/babel-plugin-remove-jsx-empty-expression": "^5.0.1", "@svgr/babel-plugin-replace-jsx-attribute-value": "^5.0.1", "@svgr/babel-plugin-svg-dynamic-title": "^5.4.0", "@svgr/babel-plugin-svg-em-dimensions": "^5.4.0", "@svgr/babel-plugin-transform-react-native-svg": "^5.4.0", "@svgr/babel-plugin-transform-svg-component": "^5.5.0" } }, "@svgr/core": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/core/-/core-5.5.0.tgz", "integrity": "sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ==", "requires": { "@svgr/plugin-jsx": "^5.5.0", "camelcase": "^6.2.0", "cosmiconfig": "^7.0.0" } }, "@svgr/hast-util-to-babel-ast": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz", "integrity": "sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ==", "requires": { "@babel/types": "^7.12.6" } }, "@svgr/plugin-jsx": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz", "integrity": "sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA==", "requires": { "@babel/core": "^7.12.3", "@svgr/babel-preset": "^5.5.0", "@svgr/hast-util-to-babel-ast": "^5.5.0", "svg-parser": "^2.0.2" } }, "@svgr/plugin-svgo": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz", "integrity": "sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ==", "requires": { "cosmiconfig": "^7.0.0", "deepmerge": "^4.2.2", "svgo": "^1.2.2" }, "dependencies": { "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "css-select": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz", "integrity": "sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==", "requires": { "boolbase": "^1.0.0", "css-what": "^3.2.1", "domutils": "^1.7.0", "nth-check": "^1.0.2" } }, "css-tree": { "version": "1.0.0-alpha.37", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz", "integrity": "sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==", "requires": { "mdn-data": "2.0.4", "source-map": "^0.6.1" } }, "css-what": { "version": "3.4.2", "resolved": "https://registry.npmjs.org/css-what/-/css-what-3.4.2.tgz", "integrity": "sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==" }, "dom-serializer": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz", "integrity": "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==", "requires": { "domelementtype": "^2.0.1", "entities": "^2.0.0" }, "dependencies": { "domelementtype": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.2.0.tgz", "integrity": "sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==" } } }, "domutils": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz", "integrity": "sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==", "requires": { "dom-serializer": "0", "domelementtype": "1" } }, "mdn-data": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz", "integrity": "sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==" }, "svgo": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz", "integrity": "sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==", "requires": { "chalk": "^2.4.1", "coa": "^2.0.2", "css-select": "^2.0.0", "css-select-base-adapter": "^0.1.1", "css-tree": "1.0.0-alpha.37", "csso": "^4.0.2", "js-yaml": "^3.13.1", "mkdirp": "~0.5.1", "object.values": "^1.1.0", "sax": "~1.2.4", "stable": "^0.1.8", "unquote": "~1.1.1", "util.promisify": "~1.0.0" } } } }, "@svgr/webpack": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-5.5.0.tgz", "integrity": "sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g==", "requires": { "@babel/core": "^7.12.3", "@babel/plugin-transform-react-constant-elements": "^7.12.1", "@babel/preset-env": "^7.12.1", "@babel/preset-react": "^7.12.5", "@svgr/core": "^5.5.0", "@svgr/plugin-jsx": "^5.5.0", "@svgr/plugin-svgo": "^5.5.0", "loader-utils": "^2.0.0" } }, "@szmarczak/http-timer": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", "requires": { "defer-to-connect": "^1.0.1" } }, "@trysound/sax": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==" }, "@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", "requires": { "@types/connect": "*", "@types/node": "*" } }, "@types/bonjour": { "version": "3.5.10", "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz", "integrity": "sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw==", "requires": { "@types/node": "*" } }, "@types/connect": { "version": "3.4.35", "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", "requires": { "@types/node": "*" } }, "@types/connect-history-api-fallback": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz", "integrity": "sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw==", "requires": { "@types/express-serve-static-core": "*", "@types/node": "*" } }, "@types/eslint": { "version": "8.4.1", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==", "requires": { "@types/estree": "*", "@types/json-schema": "*" } }, "@types/eslint-scope": { "version": "3.7.3", "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", "requires": { "@types/eslint": "*", "@types/estree": "*" } }, "@types/estree": { "version": "0.0.51", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" }, "@types/express": { "version": "4.17.13", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", "requires": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.18", "@types/qs": "*", "@types/serve-static": "*" } }, "@types/express-serve-static-core": { "version": "4.17.29", "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz", "integrity": "sha512-uMd++6dMKS32EOuw1Uli3e3BPgdLIXmezcfHv7N4c1s3gkhikBplORPpMq3fuWkxncZN1reb16d5n8yhQ80x7Q==", "requires": { "@types/node": "*", "@types/qs": "*", "@types/range-parser": "*" } }, "@types/hast": { "version": "2.3.4", "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz", "integrity": "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==", "requires": { "@types/unist": "*" } }, "@types/history": { "version": "4.7.11", "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz", "integrity": "sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==" }, "@types/html-minifier-terser": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==" }, "@types/http-proxy": { "version": "1.17.9", "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.9.tgz", "integrity": "sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw==", "requires": { "@types/node": "*" } }, "@types/json-schema": { "version": "7.0.9", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==" }, "@types/mdast": { "version": "3.0.10", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", "requires": { "@types/unist": "*" } }, "@types/mime": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" }, "@types/node": { "version": "17.0.8", "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.8.tgz", "integrity": "sha512-YofkM6fGv4gDJq78g4j0mMuGMkZVxZDgtU0JRdx6FgiJDG+0fY0GKVolOV8WqVmEhLCXkQRjwDdKyPxJp/uucg==" }, "@types/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, "@types/parse5": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz", "integrity": "sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==" }, "@types/prop-types": { "version": "15.7.4", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.4.tgz", "integrity": "sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ==" }, "@types/q": { "version": "1.5.5", "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.5.tgz", "integrity": "sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ==" }, "@types/qs": { "version": "6.9.7", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, "@types/range-parser": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, "@types/react": { "version": "17.0.38", "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.38.tgz", "integrity": "sha512-SI92X1IA+FMnP3qM5m4QReluXzhcmovhZnLNm3pyeQlooi02qI7sLiepEYqT678uNiyc25XfCqxREFpy3W7YhQ==", "requires": { "@types/prop-types": "*", "@types/scheduler": "*", "csstype": "^3.0.2" } }, "@types/react-router": { "version": "5.1.18", "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.18.tgz", "integrity": "sha512-YYknwy0D0iOwKQgz9v8nOzt2J6l4gouBmDnWqUUznltOTaon+r8US8ky8HvN0tXvc38U9m6z/t2RsVsnd1zM0g==", "requires": { "@types/history": "^4.7.11", "@types/react": "*" } }, "@types/react-router-config": { "version": "5.0.6", "resolved": "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz", "integrity": "sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg==", "requires": { "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router": "*" } }, "@types/react-router-dom": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz", "integrity": "sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==", "requires": { "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router": "*" } }, "@types/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz", "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==" }, "@types/sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.4.tgz", "integrity": "sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw==", "requires": { "@types/node": "*" } }, "@types/scheduler": { "version": "0.16.2", "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" }, "@types/serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz", "integrity": "sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg==", "requires": { "@types/express": "*" } }, "@types/serve-static": { "version": "1.13.10", "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", "requires": { "@types/mime": "^1", "@types/node": "*" } }, "@types/sockjs": { "version": "0.3.33", "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz", "integrity": "sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw==", "requires": { "@types/node": "*" } }, "@types/unist": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" }, "@types/ws": { "version": "8.5.3", "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.3.tgz", "integrity": "sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==", "requires": { "@types/node": "*" } }, "@webassemblyjs/ast": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", "requires": { "@webassemblyjs/helper-numbers": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1" } }, "@webassemblyjs/floating-point-hex-parser": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==" }, "@webassemblyjs/helper-api-error": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==" }, "@webassemblyjs/helper-buffer": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==" }, "@webassemblyjs/helper-numbers": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", "requires": { "@webassemblyjs/floating-point-hex-parser": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", "@xtuc/long": "4.2.2" } }, "@webassemblyjs/helper-wasm-bytecode": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==" }, "@webassemblyjs/helper-wasm-section": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/wasm-gen": "1.11.1" } }, "@webassemblyjs/ieee754": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", "requires": { "@xtuc/ieee754": "^1.2.0" } }, "@webassemblyjs/leb128": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", "requires": { "@xtuc/long": "4.2.2" } }, "@webassemblyjs/utf8": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==" }, "@webassemblyjs/wasm-edit": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/helper-wasm-section": "1.11.1", "@webassemblyjs/wasm-gen": "1.11.1", "@webassemblyjs/wasm-opt": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1", "@webassemblyjs/wast-printer": "1.11.1" } }, "@webassemblyjs/wasm-gen": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/ieee754": "1.11.1", "@webassemblyjs/leb128": "1.11.1", "@webassemblyjs/utf8": "1.11.1" } }, "@webassemblyjs/wasm-opt": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", "@webassemblyjs/wasm-gen": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1" } }, "@webassemblyjs/wasm-parser": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", "@webassemblyjs/ieee754": "1.11.1", "@webassemblyjs/leb128": "1.11.1", "@webassemblyjs/utf8": "1.11.1" } }, "@webassemblyjs/wast-printer": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", "requires": { "@webassemblyjs/ast": "1.11.1", "@xtuc/long": "4.2.2" } }, "@xtuc/ieee754": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" }, "@xtuc/long": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" }, "accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", "requires": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "acorn": { "version": "8.7.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==" }, "acorn-import-assertions": { "version": "1.7.6", "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.7.6.tgz", "integrity": "sha512-FlVvVFA1TX6l3lp8VjDnYYq7R1nyW6x3svAt4nDgrWQ9SBaSh9CnbwgSUTasgfNfOG5HlM1ehugCvM+hjo56LA==", "requires": {} }, "acorn-walk": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" }, "address": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/address/-/address-1.1.2.tgz", "integrity": "sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==" }, "aggregate-error": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "requires": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "requires": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "ajv-formats": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", "requires": { "ajv": "^8.0.0" }, "dependencies": { "ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, "json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" } } }, "ajv-keywords": { "version": "3.5.2", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "requires": {} }, "algoliasearch": { "version": "4.13.1", "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.13.1.tgz", "integrity": "sha512-dtHUSE0caWTCE7liE1xaL+19AFf6kWEcyn76uhcitWpntqvicFHXKFoZe5JJcv9whQOTRM6+B8qJz6sFj+rDJA==", "requires": { "@algolia/cache-browser-local-storage": "4.13.1", "@algolia/cache-common": "4.13.1", "@algolia/cache-in-memory": "4.13.1", "@algolia/client-account": "4.13.1", "@algolia/client-analytics": "4.13.1", "@algolia/client-common": "4.13.1", "@algolia/client-personalization": "4.13.1", "@algolia/client-search": "4.13.1", "@algolia/logger-common": "4.13.1", "@algolia/logger-console": "4.13.1", "@algolia/requester-browser-xhr": "4.13.1", "@algolia/requester-common": "4.13.1", "@algolia/requester-node-http": "4.13.1", "@algolia/transporter": "4.13.1" } }, "algoliasearch-helper": { "version": "3.10.0", "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.10.0.tgz", "integrity": "sha512-4E4od8qWWDMVvQ3jaRX6Oks/k35ywD011wAA4LbYMMjOtaZV6VWaTjRr4iN2bdaXP2o1BP7SLFMBf3wvnHmd8Q==", "requires": { "@algolia/events": "^4.0.1" } }, "ansi-align": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", "requires": { "string-width": "^4.1.0" } }, "ansi-html-community": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" }, "ansi-regex": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" }, "ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "requires": { "color-convert": "^1.9.0" } }, "anymatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "requires": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "arg": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "requires": { "sprintf-js": "~1.0.2" } }, "array-flatten": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" }, "array-union": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" }, "asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" }, "at-least-node": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==" }, "autoprefixer": { "version": "10.4.7", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.7.tgz", "integrity": "sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA==", "requires": { "browserslist": "^4.20.3", "caniuse-lite": "^1.0.30001335", "fraction.js": "^4.2.0", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", "postcss-value-parser": "^4.2.0" } }, "axios": { "version": "0.25.0", "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", "requires": { "follow-redirects": "^1.14.7" } }, "babel-loader": { "version": "8.2.5", "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz", "integrity": "sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==", "requires": { "find-cache-dir": "^3.3.1", "loader-utils": "^2.0.0", "make-dir": "^3.1.0", "schema-utils": "^2.6.5" }, "dependencies": { "schema-utils": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", "requires": { "@types/json-schema": "^7.0.5", "ajv": "^6.12.4", "ajv-keywords": "^3.5.2" } } } }, "babel-plugin-apply-mdx-type-prop": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz", "integrity": "sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ==", "requires": { "@babel/helper-plugin-utils": "7.10.4", "@mdx-js/util": "1.6.22" }, "dependencies": { "@babel/helper-plugin-utils": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" } } }, "babel-plugin-dynamic-import-node": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "requires": { "object.assign": "^4.1.0" } }, "babel-plugin-extract-import-names": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz", "integrity": "sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ==", "requires": { "@babel/helper-plugin-utils": "7.10.4" }, "dependencies": { "@babel/helper-plugin-utils": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" } } }, "babel-plugin-polyfill-corejs2": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz", "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==", "requires": { "@babel/compat-data": "^7.13.11", "@babel/helper-define-polyfill-provider": "^0.3.1", "semver": "^6.1.1" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "babel-plugin-polyfill-corejs3": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz", "integrity": "sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==", "requires": { "@babel/helper-define-polyfill-provider": "^0.3.1", "core-js-compat": "^3.21.0" } }, "babel-plugin-polyfill-regenerator": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz", "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==", "requires": { "@babel/helper-define-polyfill-provider": "^0.3.1" } }, "bail": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==" }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "base16": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==" }, "batch": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==" }, "big.js": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==" }, "binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" }, "body-parser": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz", "integrity": "sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==", "requires": { "bytes": "3.1.2", "content-type": "~1.0.4", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.10.3", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" }, "dependencies": { "bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "bonjour-service": { "version": "1.0.13", "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.0.13.tgz", "integrity": "sha512-LWKRU/7EqDUC9CTAQtuZl5HzBALoCYwtLhffW3et7vZMwv3bWLpJf8bRYlMD5OCcDpTfnPgNCV4yo9ZIaJGMiA==", "requires": { "array-flatten": "^2.1.2", "dns-equal": "^1.0.0", "fast-deep-equal": "^3.1.3", "multicast-dns": "^7.2.5" } }, "boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" }, "boxen": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.0.1.tgz", "integrity": "sha512-49VBlw+PrWEF51aCmy7QIteYPIFZxSpvqBdP/2itCPPlJ49kj9zg/XPRFrdkne2W+CfwXUls8exMvu1RysZpKA==", "requires": { "ansi-align": "^3.0.0", "camelcase": "^6.2.0", "chalk": "^4.1.0", "cli-boxes": "^2.2.1", "string-width": "^4.2.0", "type-fest": "^0.20.2", "widest-line": "^3.1.0", "wrap-ansi": "^7.0.0" } }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "braces": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "requires": { "fill-range": "^7.0.1" } }, "browserslist": { "version": "4.20.3", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz", "integrity": "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==", "requires": { "caniuse-lite": "^1.0.30001332", "electron-to-chromium": "^1.4.118", "escalade": "^3.1.1", "node-releases": "^2.0.3", "picocolors": "^1.0.0" } }, "buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, "bytes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" }, "cacheable-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", "requires": { "clone-response": "^1.0.2", "get-stream": "^5.1.0", "http-cache-semantics": "^4.0.0", "keyv": "^3.0.0", "lowercase-keys": "^2.0.0", "normalize-url": "^4.1.0", "responselike": "^1.0.2" }, "dependencies": { "get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", "requires": { "pump": "^3.0.0" } }, "lowercase-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==" }, "normalize-url": { "version": "4.5.1", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==" } } }, "call-bind": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", "requires": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" } }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" }, "camel-case": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", "requires": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "camelcase": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==" }, "camelcase-css": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" }, "caniuse-api": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", "requires": { "browserslist": "^4.0.0", "caniuse-lite": "^1.0.0", "lodash.memoize": "^4.1.2", "lodash.uniq": "^4.5.0" } }, "caniuse-lite": { "version": "1.0.30001335", "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001335.tgz", "integrity": "sha512-ddP1Tgm7z2iIxu6QTtbZUv6HJxSaV/PZeSrWFZtbY4JZ69tOeNhBCl3HyRQgeNZKE5AOn1kpV7fhljigy0Ty3w==" }, "ccount": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==" }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "dependencies": { "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } } } }, "character-entities": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==" }, "character-entities-legacy": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==" }, "character-reference-invalid": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==" }, "cheerio": { "version": "1.0.0-rc.12", "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", "requires": { "cheerio-select": "^2.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "domutils": "^3.0.1", "htmlparser2": "^8.0.1", "parse5": "^7.0.0", "parse5-htmlparser2-tree-adapter": "^7.0.0" }, "dependencies": { "dom-serializer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "requires": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" }, "domhandler": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "requires": { "domelementtype": "^2.3.0" } }, "domutils": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", "requires": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.1" } }, "entities": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==" }, "htmlparser2": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", "requires": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "entities": "^4.3.0" } } } }, "cheerio-select": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", "requires": { "boolbase": "^1.0.0", "css-select": "^5.1.0", "css-what": "^6.1.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1" }, "dependencies": { "css-select": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", "requires": { "boolbase": "^1.0.0", "css-what": "^6.1.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "nth-check": "^2.0.1" } }, "dom-serializer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "requires": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" }, "domhandler": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "requires": { "domelementtype": "^2.3.0" } }, "domutils": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", "requires": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.1" } }, "entities": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==" }, "nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", "requires": { "boolbase": "^1.0.0" } } } }, "chokidar": { "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "requires": { "anymatch": "~3.1.2", "braces": "~3.0.2", "fsevents": "~2.3.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" } }, "chrome-trace-event": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==" }, "clean-css": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.0.tgz", "integrity": "sha512-YYuuxv4H/iNb1Z/5IbMRoxgrzjWGhOEFfd+groZ5dMCVkpENiMZmwspdrzBo9286JjM1gZJPAyL7ZIdzuvu2AQ==", "requires": { "source-map": "~0.6.0" } }, "clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" }, "cli-boxes": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==" }, "cli-table3": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.2.tgz", "integrity": "sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw==", "requires": { "@colors/colors": "1.5.0", "string-width": "^4.2.0" } }, "clone-deep": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", "requires": { "is-plain-object": "^2.0.4", "kind-of": "^6.0.2", "shallow-clone": "^3.0.0" } }, "clone-response": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", "requires": { "mimic-response": "^1.0.0" } }, "clsx": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==" }, "coa": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/coa/-/coa-2.0.2.tgz", "integrity": "sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==", "requires": { "@types/q": "^1.5.1", "chalk": "^2.4.1", "q": "^1.1.2" }, "dependencies": { "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } } } }, "collapse-white-space": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==" }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "requires": { "color-name": "1.1.3" } }, "color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" }, "colord": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.2.tgz", "integrity": "sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ==" }, "colorette": { "version": "2.0.19", "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==" }, "combine-promises": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/combine-promises/-/combine-promises-1.1.0.tgz", "integrity": "sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg==" }, "comma-separated-tokens": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==" }, "commander": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==" }, "commondir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" }, "compressible": { "version": "2.0.18", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", "requires": { "mime-db": ">= 1.43.0 < 2" } }, "compression": { "version": "1.7.4", "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", "requires": { "accepts": "~1.3.5", "bytes": "3.0.0", "compressible": "~2.0.16", "debug": "2.6.9", "on-headers": "~1.0.2", "safe-buffer": "5.1.2", "vary": "~1.1.2" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "configstore": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", "requires": { "dot-prop": "^5.2.0", "graceful-fs": "^4.1.2", "make-dir": "^3.0.0", "unique-string": "^2.0.0", "write-file-atomic": "^3.0.0", "xdg-basedir": "^4.0.0" } }, "connect-history-api-fallback": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==" }, "consola": { "version": "2.15.3", "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" }, "content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "requires": { "safe-buffer": "5.2.1" }, "dependencies": { "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" } } }, "content-type": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" }, "convert-source-map": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", "requires": { "safe-buffer": "~5.1.1" } }, "cookie": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==" }, "cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "copy-text-to-clipboard": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz", "integrity": "sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q==" }, "copy-webpack-plugin": { "version": "11.0.0", "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz", "integrity": "sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==", "requires": { "fast-glob": "^3.2.11", "glob-parent": "^6.0.1", "globby": "^13.1.1", "normalize-path": "^3.0.0", "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0" }, "dependencies": { "ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, "ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "requires": { "is-glob": "^4.0.3" } }, "globby": { "version": "13.1.2", "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.2.tgz", "integrity": "sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==", "requires": { "dir-glob": "^3.0.1", "fast-glob": "^3.2.11", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^4.0.0" } }, "json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" } }, "slash": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==" } } }, "core-js": { "version": "3.23.5", "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.23.5.tgz", "integrity": "sha512-7Vh11tujtAZy82da4duVreQysIoO2EvVrur7y6IzZkH1IHPSekuDi8Vuw1+YKjkbfWLRD7Nc9ICQ/sIUDutcyg==" }, "core-js-compat": { "version": "3.22.3", "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.22.3.tgz", "integrity": "sha512-wliMbvPI2idgFWpFe7UEyHMvu6HWgW8WA+HnDRtgzoSDYvXFMpoGX1H3tPDDXrcfUSyXafCLDd7hOeMQHEZxGw==", "requires": { "browserslist": "^4.20.3", "semver": "7.0.0" }, "dependencies": { "semver": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" } } }, "core-js-pure": { "version": "3.23.5", "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.23.5.tgz", "integrity": "sha512-8t78LdpKSuCq4pJYCYk8hl7XEkAX+BP16yRIwL3AanTksxuEf7CM83vRyctmiEL8NDZ3jpUcv56fk9/zG3aIuw==" }, "core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, "cosmiconfig": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", "requires": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.10.0" } }, "cross-fetch": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz", "integrity": "sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==", "requires": { "node-fetch": "2.6.7" } }, "cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "requires": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "crypto-random-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" }, "css-declaration-sorter": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.3.0.tgz", "integrity": "sha512-OGT677UGHJTAVMRhPO+HJ4oKln3wkBTwtDFH0ojbqm+MJm6xuDMHp2nkhh/ThaBqq20IbraBQSWKfSLNHQO9Og==", "requires": {} }, "css-loader": { "version": "6.7.1", "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz", "integrity": "sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==", "requires": { "icss-utils": "^5.1.0", "postcss": "^8.4.7", "postcss-modules-extract-imports": "^3.0.0", "postcss-modules-local-by-default": "^4.0.0", "postcss-modules-scope": "^3.0.0", "postcss-modules-values": "^4.0.0", "postcss-value-parser": "^4.2.0", "semver": "^7.3.5" } }, "css-minimizer-webpack-plugin": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.0.0.tgz", "integrity": "sha512-7ZXXRzRHvofv3Uac5Y+RkWRNo0ZMlcg8e9/OtrqUYmwDWJo+qs67GvdeFrXLsFb7czKNwjQhPkM0avlIYl+1nA==", "requires": { "cssnano": "^5.1.8", "jest-worker": "^27.5.1", "postcss": "^8.4.13", "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0", "source-map": "^0.6.1" }, "dependencies": { "ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, "ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" } } } }, "css-select": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", "requires": { "boolbase": "^1.0.0", "css-what": "^6.0.1", "domhandler": "^4.3.1", "domutils": "^2.8.0", "nth-check": "^2.0.1" }, "dependencies": { "nth-check": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", "requires": { "boolbase": "^1.0.0" } } } }, "css-select-base-adapter": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz", "integrity": "sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==" }, "css-tree": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", "requires": { "mdn-data": "2.0.14", "source-map": "^0.6.1" } }, "css-what": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==" }, "cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" }, "cssnano": { "version": "5.1.12", "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.12.tgz", "integrity": "sha512-TgvArbEZu0lk/dvg2ja+B7kYoD7BBCmn3+k58xD0qjrGHsFzXY/wKTo9M5egcUCabPol05e/PVoIu79s2JN4WQ==", "requires": { "cssnano-preset-default": "^5.2.12", "lilconfig": "^2.0.3", "yaml": "^1.10.2" } }, "cssnano-preset-advanced": { "version": "5.3.8", "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.8.tgz", "integrity": "sha512-xUlLLnEB1LjpEik+zgRNlk8Y/koBPPtONZjp7JKbXigeAmCrFvq9H0pXW5jJV45bQWAlmJ0sKy+IMr0XxLYQZg==", "requires": { "autoprefixer": "^10.3.7", "cssnano-preset-default": "^5.2.12", "postcss-discard-unused": "^5.1.0", "postcss-merge-idents": "^5.1.1", "postcss-reduce-idents": "^5.2.0", "postcss-zindex": "^5.1.0" } }, "cssnano-preset-default": { "version": "5.2.12", "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz", "integrity": "sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew==", "requires": { "css-declaration-sorter": "^6.3.0", "cssnano-utils": "^3.1.0", "postcss-calc": "^8.2.3", "postcss-colormin": "^5.3.0", "postcss-convert-values": "^5.1.2", "postcss-discard-comments": "^5.1.2", "postcss-discard-duplicates": "^5.1.0", "postcss-discard-empty": "^5.1.1", "postcss-discard-overridden": "^5.1.0", "postcss-merge-longhand": "^5.1.6", "postcss-merge-rules": "^5.1.2", "postcss-minify-font-values": "^5.1.0", "postcss-minify-gradients": "^5.1.1", "postcss-minify-params": "^5.1.3", "postcss-minify-selectors": "^5.2.1", "postcss-normalize-charset": "^5.1.0", "postcss-normalize-display-values": "^5.1.0", "postcss-normalize-positions": "^5.1.1", "postcss-normalize-repeat-style": "^5.1.1", "postcss-normalize-string": "^5.1.0", "postcss-normalize-timing-functions": "^5.1.0", "postcss-normalize-unicode": "^5.1.0", "postcss-normalize-url": "^5.1.0", "postcss-normalize-whitespace": "^5.1.1", "postcss-ordered-values": "^5.1.3", "postcss-reduce-initial": "^5.1.0", "postcss-reduce-transforms": "^5.1.0", "postcss-svgo": "^5.1.0", "postcss-unique-selectors": "^5.1.1" } }, "cssnano-utils": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz", "integrity": "sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==", "requires": {} }, "csso": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "requires": { "css-tree": "^1.1.2" } }, "csstype": { "version": "3.0.10", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.10.tgz", "integrity": "sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA==" }, "debug": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", "requires": { "ms": "2.1.2" } }, "decompress-response": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", "requires": { "mimic-response": "^1.0.0" } }, "deep-extend": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" }, "deepmerge": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" }, "default-gateway": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", "requires": { "execa": "^5.0.0" } }, "defer-to-connect": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==" }, "define-lazy-prop": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==" }, "define-properties": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", "requires": { "object-keys": "^1.0.12" } }, "del": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "requires": { "globby": "^11.0.1", "graceful-fs": "^4.2.4", "is-glob": "^4.0.1", "is-path-cwd": "^2.2.0", "is-path-inside": "^3.0.2", "p-map": "^4.0.0", "rimraf": "^3.0.2", "slash": "^3.0.0" } }, "depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" }, "destroy": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, "detab": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz", "integrity": "sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g==", "requires": { "repeat-string": "^1.5.4" } }, "detect-node": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" }, "detect-port": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.3.0.tgz", "integrity": "sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ==", "requires": { "address": "^1.0.1", "debug": "^2.6.0" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" } } }, "detect-port-alt": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", "requires": { "address": "^1.0.1", "debug": "^2.6.0" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "requires": { "path-type": "^4.0.0" } }, "dns-equal": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", "integrity": "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" }, "dns-packet": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.4.0.tgz", "integrity": "sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g==", "requires": { "@leichtgewicht/ip-codec": "^2.0.1" } }, "docusaurus-gtm-plugin": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/docusaurus-gtm-plugin/-/docusaurus-gtm-plugin-0.0.2.tgz", "integrity": "sha512-Xx/df0Ppd5SultlzUj9qlQk2lX9mNVfTb41juyBUPZ1Nc/5dNx+uN0VuLyF4JEObkDRrUY1EFo9fEUDo8I6QOQ==" }, "dom-converter": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", "requires": { "utila": "~0.4" } }, "dom-serializer": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", "requires": { "domelementtype": "^2.0.1", "domhandler": "^4.2.0", "entities": "^2.0.0" }, "dependencies": { "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" } } }, "domelementtype": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz", "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==" }, "domhandler": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", "requires": { "domelementtype": "^2.2.0" }, "dependencies": { "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" } } }, "domutils": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", "requires": { "dom-serializer": "^1.0.1", "domelementtype": "^2.2.0", "domhandler": "^4.2.0" }, "dependencies": { "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" } } }, "dot-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", "requires": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "dot-prop": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "requires": { "is-obj": "^2.0.0" } }, "duplexer": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" }, "duplexer3": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=" }, "eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" }, "ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "electron-to-chromium": { "version": "1.4.129", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.129.tgz", "integrity": "sha512-GgtN6bsDtHdtXJtlMYZWGB/uOyjZWjmRDumXTas7dGBaB9zUyCjzHet1DY2KhyHN8R0GLbzZWqm4efeddqqyRQ==" }, "emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "emojis-list": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==" }, "emoticon": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz", "integrity": "sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg==" }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" }, "end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "requires": { "once": "^1.4.0" } }, "enhanced-resolve": { "version": "5.9.3", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz", "integrity": "sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow==", "requires": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "entities": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" }, "error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", "requires": { "is-arrayish": "^0.2.1" } }, "es-abstract": { "version": "1.18.5", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.5.tgz", "integrity": "sha512-DDggyJLoS91CkJjgauM5c0yZMjiD1uK3KcaCeAmffGwZ+ODWzOkPN4QwRbsK5DOFf06fywmyLci3ZD8jLGhVYA==", "requires": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "get-intrinsic": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.2", "internal-slot": "^1.0.3", "is-callable": "^1.2.3", "is-negative-zero": "^2.0.1", "is-regex": "^1.1.3", "is-string": "^1.0.6", "object-inspect": "^1.11.0", "object-keys": "^1.1.1", "object.assign": "^4.1.2", "string.prototype.trimend": "^1.0.4", "string.prototype.trimstart": "^1.0.4", "unbox-primitive": "^1.0.1" } }, "es-module-lexer": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==" }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", "is-symbol": "^1.0.2" } }, "escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" }, "escape-goat": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==" }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" }, "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint-scope": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "requires": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } }, "esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" }, "esrecurse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "requires": { "estraverse": "^5.2.0" }, "dependencies": { "estraverse": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==" } } }, "estraverse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" }, "esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" }, "eta": { "version": "1.12.3", "resolved": "https://registry.npmjs.org/eta/-/eta-1.12.3.tgz", "integrity": "sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg==" }, "etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" }, "eval": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", "requires": { "@types/node": "*", "require-like": ">= 0.1.1" } }, "eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, "events": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" }, "execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "requires": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" }, "dependencies": { "get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==" } } }, "express": { "version": "4.18.1", "resolved": "https://registry.npmjs.org/express/-/express-4.18.1.tgz", "integrity": "sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==", "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.0", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", "qs": "6.10.3", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", "serve-static": "1.15.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" }, "dependencies": { "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" } } }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "extend-shallow": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", "requires": { "is-extendable": "^0.1.0" } }, "fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "fast-glob": { "version": "3.2.11", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", "requires": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "fast-url-parser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", "integrity": "sha1-9K8+qfNNiicc9YrSs3WfQx8LMY0=", "requires": { "punycode": "^1.3.2" }, "dependencies": { "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" } } }, "fastq": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "requires": { "reusify": "^1.0.4" } }, "faye-websocket": { "version": "0.11.4", "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", "requires": { "websocket-driver": ">=0.5.1" } }, "fbemitter": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz", "integrity": "sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==", "requires": { "fbjs": "^3.0.0" } }, "fbjs": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz", "integrity": "sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ==", "requires": { "cross-fetch": "^3.1.5", "fbjs-css-vars": "^1.0.0", "loose-envify": "^1.0.0", "object-assign": "^4.1.0", "promise": "^7.1.1", "setimmediate": "^1.0.5", "ua-parser-js": "^0.7.30" } }, "fbjs-css-vars": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==" }, "feed": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", "requires": { "xml-js": "^1.6.11" } }, "file-loader": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", "requires": { "loader-utils": "^2.0.0", "schema-utils": "^3.0.0" } }, "filesize": { "version": "8.0.7", "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz", "integrity": "sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==" }, "fill-range": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "requires": { "to-regex-range": "^5.0.1" } }, "finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", "requires": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "find-cache-dir": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "requires": { "commondir": "^1.0.1", "make-dir": "^3.0.2", "pkg-dir": "^4.1.0" } }, "find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "requires": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" } }, "flux": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/flux/-/flux-4.0.3.tgz", "integrity": "sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw==", "requires": { "fbemitter": "^3.0.0", "fbjs": "^3.0.1" } }, "follow-redirects": { "version": "1.14.9", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz", "integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==" }, "fork-ts-checker-webpack-plugin": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz", "integrity": "sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA==", "requires": { "@babel/code-frame": "^7.8.3", "@types/json-schema": "^7.0.5", "chalk": "^4.1.0", "chokidar": "^3.4.2", "cosmiconfig": "^6.0.0", "deepmerge": "^4.2.2", "fs-extra": "^9.0.0", "glob": "^7.1.6", "memfs": "^3.1.2", "minimatch": "^3.0.4", "schema-utils": "2.7.0", "semver": "^7.3.2", "tapable": "^1.0.0" }, "dependencies": { "cosmiconfig": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", "requires": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.1.0", "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.7.2" } }, "fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", "requires": { "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "schema-utils": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", "integrity": "sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==", "requires": { "@types/json-schema": "^7.0.4", "ajv": "^6.12.2", "ajv-keywords": "^3.4.1" } }, "tapable": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==" } } }, "forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" }, "fraction.js": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==" }, "fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, "fs-extra": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "fs-monkey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz", "integrity": "sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==" }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "fsevents": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", "optional": true }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, "gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==" }, "get-intrinsic": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", "requires": { "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1" } }, "get-own-enumerable-property-symbols": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" }, "get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "requires": { "pump": "^3.0.0" } }, "github-slugger": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz", "integrity": "sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==" }, "glob": { "version": "7.1.7", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.0.4", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "requires": { "is-glob": "^4.0.1" } }, "glob-to-regexp": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" }, "global-dirs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", "requires": { "ini": "2.0.0" }, "dependencies": { "ini": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==" } } }, "global-modules": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", "requires": { "global-prefix": "^3.0.0" } }, "global-prefix": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", "requires": { "ini": "^1.3.5", "kind-of": "^6.0.2", "which": "^1.3.1" }, "dependencies": { "which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "requires": { "isexe": "^2.0.0" } } } }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" }, "globby": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "requires": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", "fast-glob": "^3.2.9", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^3.0.0" } }, "got": { "version": "9.6.0", "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", "requires": { "@sindresorhus/is": "^0.14.0", "@szmarczak/http-timer": "^1.1.2", "cacheable-request": "^6.0.0", "decompress-response": "^3.3.0", "duplexer3": "^0.1.4", "get-stream": "^4.1.0", "lowercase-keys": "^1.0.1", "mimic-response": "^1.0.1", "p-cancelable": "^1.0.0", "to-readable-stream": "^1.0.0", "url-parse-lax": "^3.0.0" } }, "graceful-fs": { "version": "4.2.9", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==" }, "gray-matter": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", "requires": { "js-yaml": "^3.13.1", "kind-of": "^6.0.2", "section-matter": "^1.0.0", "strip-bom-string": "^1.0.0" } }, "gzip-size": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", "requires": { "duplexer": "^0.1.2" } }, "handle-thing": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" }, "has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", "requires": { "function-bind": "^1.1.1" } }, "has-bigints": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==" }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" }, "has-symbols": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==" }, "has-tostringtag": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", "requires": { "has-symbols": "^1.0.2" } }, "has-yarn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==" }, "hast-to-hyperscript": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", "integrity": "sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==", "requires": { "@types/unist": "^2.0.3", "comma-separated-tokens": "^1.0.0", "property-information": "^5.3.0", "space-separated-tokens": "^1.0.0", "style-to-object": "^0.3.0", "unist-util-is": "^4.0.0", "web-namespaces": "^1.0.0" } }, "hast-util-from-parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz", "integrity": "sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==", "requires": { "@types/parse5": "^5.0.0", "hastscript": "^6.0.0", "property-information": "^5.0.0", "vfile": "^4.0.0", "vfile-location": "^3.2.0", "web-namespaces": "^1.0.0" } }, "hast-util-parse-selector": { "version": "2.2.5", "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==" }, "hast-util-raw": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz", "integrity": "sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig==", "requires": { "@types/hast": "^2.0.0", "hast-util-from-parse5": "^6.0.0", "hast-util-to-parse5": "^6.0.0", "html-void-elements": "^1.0.0", "parse5": "^6.0.0", "unist-util-position": "^3.0.0", "vfile": "^4.0.0", "web-namespaces": "^1.0.0", "xtend": "^4.0.0", "zwitch": "^1.0.0" }, "dependencies": { "parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" } } }, "hast-util-to-parse5": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", "integrity": "sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==", "requires": { "hast-to-hyperscript": "^9.0.0", "property-information": "^5.0.0", "web-namespaces": "^1.0.0", "xtend": "^4.0.0", "zwitch": "^1.0.0" } }, "hastscript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", "requires": { "@types/hast": "^2.0.0", "comma-separated-tokens": "^1.0.0", "hast-util-parse-selector": "^2.0.0", "property-information": "^5.0.0", "space-separated-tokens": "^1.0.0" } }, "he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==" }, "history": { "version": "4.10.1", "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", "requires": { "@babel/runtime": "^7.1.2", "loose-envify": "^1.2.0", "resolve-pathname": "^3.0.0", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0", "value-equal": "^1.0.1" } }, "hoist-non-react-statics": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", "requires": { "react-is": "^16.7.0" } }, "hpack.js": { "version": "2.1.6", "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", "requires": { "inherits": "^2.0.1", "obuf": "^1.0.0", "readable-stream": "^2.0.1", "wbuf": "^1.1.0" }, "dependencies": { "readable-stream": { "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } } } }, "html-entities": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz", "integrity": "sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==" }, "html-minifier-terser": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", "requires": { "camel-case": "^4.1.2", "clean-css": "^5.2.2", "commander": "^8.3.0", "he": "^1.2.0", "param-case": "^3.0.4", "relateurl": "^0.2.7", "terser": "^5.10.0" }, "dependencies": { "commander": { "version": "8.3.0", "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" } } }, "html-tags": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz", "integrity": "sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg==" }, "html-void-elements": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==" }, "html-webpack-plugin": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz", "integrity": "sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw==", "requires": { "@types/html-minifier-terser": "^6.0.0", "html-minifier-terser": "^6.0.2", "lodash": "^4.17.21", "pretty-error": "^4.0.0", "tapable": "^2.0.0" } }, "htmlparser2": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", "requires": { "domelementtype": "^2.0.1", "domhandler": "^4.0.0", "domutils": "^2.5.2", "entities": "^2.0.0" }, "dependencies": { "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" } } }, "http-cache-semantics": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==" }, "http-deceiver": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==" }, "http-errors": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "requires": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "http-parser-js": { "version": "0.5.8", "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" }, "http-proxy": { "version": "1.18.1", "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", "requires": { "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", "requires-port": "^1.0.0" } }, "http-proxy-middleware": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", "requires": { "@types/http-proxy": "^1.17.8", "http-proxy": "^1.18.1", "is-glob": "^4.0.1", "is-plain-obj": "^3.0.0", "micromatch": "^4.0.2" }, "dependencies": { "is-plain-obj": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==" } } }, "human-signals": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==" }, "iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "requires": { "safer-buffer": ">= 2.1.2 < 3" } }, "icss-utils": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", "requires": {} }, "ignore": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==" }, "image-size": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz", "integrity": "sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg==", "requires": { "queue": "6.0.2" } }, "immer": { "version": "9.0.15", "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.15.tgz", "integrity": "sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ==" }, "import-fresh": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "requires": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "import-lazy": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=" }, "imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" }, "indent-string": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" }, "infima": { "version": "0.2.0-alpha.42", "resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.42.tgz", "integrity": "sha512-ift8OXNbQQwtbIt6z16KnSWP7uJ/SysSMFI4F87MNRTicypfl4Pv3E2OGVv6N3nSZFJvA8imYulCBS64iyHYww==" }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "requires": { "once": "^1.3.0", "wrappy": "1" } }, "inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ini": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, "inline-style-parser": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, "internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", "requires": { "get-intrinsic": "^1.1.0", "has": "^1.0.3", "side-channel": "^1.0.4" } }, "interpret": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==" }, "invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", "requires": { "loose-envify": "^1.0.0" } }, "ipaddr.js": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz", "integrity": "sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==" }, "is-alphabetical": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==" }, "is-alphanumerical": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", "requires": { "is-alphabetical": "^1.0.0", "is-decimal": "^1.0.0" } }, "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" }, "is-bigint": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "requires": { "has-bigints": "^1.0.1" } }, "is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "requires": { "binary-extensions": "^2.0.0" } }, "is-boolean-object": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "requires": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" } }, "is-buffer": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==" }, "is-callable": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" }, "is-ci": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", "requires": { "ci-info": "^2.0.0" }, "dependencies": { "ci-info": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" } } }, "is-core-module": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.6.0.tgz", "integrity": "sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ==", "requires": { "has": "^1.0.3" } }, "is-date-object": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-decimal": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==" }, "is-docker": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==" }, "is-extendable": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" }, "is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "requires": { "is-extglob": "^2.1.1" } }, "is-hexadecimal": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==" }, "is-installed-globally": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", "requires": { "global-dirs": "^3.0.0", "is-path-inside": "^3.0.2" } }, "is-negative-zero": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==" }, "is-npm": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==" }, "is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" }, "is-number-object": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz", "integrity": "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, "is-path-cwd": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==" }, "is-path-inside": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==" }, "is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==" }, "is-plain-object": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", "requires": { "isobject": "^3.0.1" } }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "requires": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" } }, "is-regexp": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==" }, "is-root": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==" }, "is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" }, "is-string": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-symbol": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "requires": { "has-symbols": "^1.0.2" } }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" }, "is-whitespace-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", "integrity": "sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==" }, "is-word-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", "integrity": "sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==" }, "is-wsl": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "requires": { "is-docker": "^2.0.0" } }, "is-yarn-global": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==" }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "isobject": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=" }, "jest-worker": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", "requires": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" }, "dependencies": { "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "requires": { "has-flag": "^4.0.0" } } } }, "joi": { "version": "17.6.0", "resolved": "https://registry.npmjs.org/joi/-/joi-17.6.0.tgz", "integrity": "sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==", "requires": { "@hapi/hoek": "^9.0.0", "@hapi/topo": "^5.0.0", "@sideway/address": "^4.1.3", "@sideway/formula": "^3.0.0", "@sideway/pinpoint": "^2.0.0" } }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" } }, "jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" }, "json-buffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=" }, "json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, "json5": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==" }, "jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "requires": { "graceful-fs": "^4.1.6", "universalify": "^2.0.0" } }, "keyv": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", "requires": { "json-buffer": "3.0.0" } }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" }, "kleur": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==" }, "klona": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz", "integrity": "sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==" }, "latest-version": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", "requires": { "package-json": "^6.3.0" } }, "leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==" }, "lilconfig": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz", "integrity": "sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==" }, "lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=" }, "loader-runner": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz", "integrity": "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==" }, "loader-utils": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz", "integrity": "sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==", "requires": { "big.js": "^5.2.2", "emojis-list": "^3.0.0", "json5": "^2.1.2" } }, "locate-path": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "requires": { "p-locate": "^4.1.0" } }, "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "lodash.curry": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz", "integrity": "sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==" }, "lodash.debounce": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" }, "lodash.flow": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz", "integrity": "sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==" }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" }, "lodash.uniq": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" }, "loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "requires": { "js-tokens": "^3.0.0 || ^4.0.0" } }, "lower-case": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", "requires": { "tslib": "^2.0.3" } }, "lowercase-keys": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { "yallist": "^4.0.0" } }, "make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "requires": { "semver": "^6.0.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "markdown-escapes": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", "integrity": "sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==" }, "mdast-squeeze-paragraphs": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz", "integrity": "sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ==", "requires": { "unist-util-remove": "^2.0.0" } }, "mdast-util-definitions": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz", "integrity": "sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==", "requires": { "unist-util-visit": "^2.0.0" } }, "mdast-util-to-hast": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", "integrity": "sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==", "requires": { "@types/mdast": "^3.0.0", "@types/unist": "^2.0.0", "mdast-util-definitions": "^4.0.0", "mdurl": "^1.0.0", "unist-builder": "^2.0.0", "unist-util-generated": "^1.0.0", "unist-util-position": "^3.0.0", "unist-util-visit": "^2.0.0" } }, "mdast-util-to-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==" }, "mdn-data": { "version": "2.0.14", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" }, "mdurl": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" }, "memfs": { "version": "3.4.7", "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.4.7.tgz", "integrity": "sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw==", "requires": { "fs-monkey": "^1.0.3" } }, "merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, "merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" }, "merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "micromatch": { "version": "4.0.5", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "requires": { "braces": "^3.0.2", "picomatch": "^2.3.1" } }, "mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" }, "mime-db": { "version": "1.51.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==" }, "mime-types": { "version": "2.1.34", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "requires": { "mime-db": "1.51.0" } }, "mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" }, "mimic-response": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==" }, "mini-create-react-context": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz", "integrity": "sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==", "requires": { "@babel/runtime": "^7.12.1", "tiny-warning": "^1.0.3" } }, "mini-css-extract-plugin": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz", "integrity": "sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg==", "requires": { "schema-utils": "^4.0.0" }, "dependencies": { "ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, "ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" } } } }, "minimalistic-assert": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "requires": { "minimist": "^1.2.5" } }, "mrmime": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.0.tgz", "integrity": "sha512-a70zx7zFfVO7XpnQ2IX1Myh9yY4UYvfld/dikWRnsXxbyvMcfz+u6UfgNAtH+k2QqtJuzVpv6eLTx1G2+WKZbQ==" }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "multicast-dns": { "version": "7.2.5", "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", "requires": { "dns-packet": "^5.2.2", "thunky": "^1.0.2" } }, "nanoid": { "version": "3.3.4", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==" }, "negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" }, "neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, "no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", "requires": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "node-emoji": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz", "integrity": "sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==", "requires": { "lodash": "^4.17.21" } }, "node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "requires": { "whatwg-url": "^5.0.0" } }, "node-forge": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==" }, "node-releases": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.4.tgz", "integrity": "sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ==" }, "normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" }, "normalize-range": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==" }, "normalize-url": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==" }, "npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "requires": { "path-key": "^3.0.0" } }, "nprogress": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz", "integrity": "sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==" }, "nth-check": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", "requires": { "boolbase": "~1.0.0" } }, "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, "object-inspect": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==" }, "object-keys": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" }, "object.assign": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", "requires": { "call-bind": "^1.0.0", "define-properties": "^1.1.3", "has-symbols": "^1.0.1", "object-keys": "^1.1.1" } }, "object.getownpropertydescriptors": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", "integrity": "sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==", "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3", "es-abstract": "^1.18.0-next.2" } }, "object.values": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3", "es-abstract": "^1.18.2" } }, "obuf": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" }, "on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "requires": { "ee-first": "1.1.1" } }, "on-headers": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "requires": { "wrappy": "1" } }, "onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "requires": { "mimic-fn": "^2.1.0" } }, "open": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", "requires": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", "is-wsl": "^2.2.0" } }, "opener": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==" }, "p-cancelable": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==" }, "p-limit": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "requires": { "p-try": "^2.0.0" } }, "p-locate": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "requires": { "p-limit": "^2.2.0" } }, "p-map": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "requires": { "aggregate-error": "^3.0.0" } }, "p-retry": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", "requires": { "@types/retry": "0.12.0", "retry": "^0.13.1" } }, "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, "package-json": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", "requires": { "got": "^9.6.0", "registry-auth-token": "^4.0.0", "registry-url": "^5.0.0", "semver": "^6.2.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "param-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", "requires": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "requires": { "callsites": "^3.0.0" } }, "parse-entities": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", "requires": { "character-entities": "^1.0.0", "character-entities-legacy": "^1.0.0", "character-reference-invalid": "^1.0.0", "is-alphanumerical": "^1.0.0", "is-decimal": "^1.0.0", "is-hexadecimal": "^1.0.0" } }, "parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "requires": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", "json-parse-even-better-errors": "^2.3.0", "lines-and-columns": "^1.1.6" } }, "parse-numeric-range": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz", "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==" }, "parse5": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.0.0.tgz", "integrity": "sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==", "requires": { "entities": "^4.3.0" }, "dependencies": { "entities": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==" } } }, "parse5-htmlparser2-tree-adapter": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", "requires": { "domhandler": "^5.0.2", "parse5": "^7.0.0" }, "dependencies": { "domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" }, "domhandler": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "requires": { "domelementtype": "^2.3.0" } } } }, "parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, "pascal-case": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", "requires": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-is-inside": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=" }, "path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" }, "path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" }, "picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" }, "picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" }, "pkg-dir": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "requires": { "find-up": "^4.0.0" } }, "pkg-up": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", "requires": { "find-up": "^3.0.0" }, "dependencies": { "find-up": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "requires": { "locate-path": "^3.0.0" } }, "locate-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "requires": { "p-locate": "^3.0.0", "path-exists": "^3.0.0" } }, "p-locate": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "requires": { "p-limit": "^2.0.0" } }, "path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==" } } }, "postcss": { "version": "8.4.14", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", "requires": { "nanoid": "^3.3.4", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "postcss-calc": { "version": "8.2.4", "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz", "integrity": "sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==", "requires": { "postcss-selector-parser": "^6.0.9", "postcss-value-parser": "^4.2.0" } }, "postcss-colormin": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.0.tgz", "integrity": "sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg==", "requires": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", "colord": "^2.9.1", "postcss-value-parser": "^4.2.0" } }, "postcss-convert-values": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz", "integrity": "sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g==", "requires": { "browserslist": "^4.20.3", "postcss-value-parser": "^4.2.0" } }, "postcss-discard-comments": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz", "integrity": "sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==", "requires": {} }, "postcss-discard-duplicates": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz", "integrity": "sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==", "requires": {} }, "postcss-discard-empty": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz", "integrity": "sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==", "requires": {} }, "postcss-discard-overridden": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz", "integrity": "sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==", "requires": {} }, "postcss-discard-unused": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz", "integrity": "sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw==", "requires": { "postcss-selector-parser": "^6.0.5" } }, "postcss-loader": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.0.1.tgz", "integrity": "sha512-VRviFEyYlLjctSM93gAZtcJJ/iSkPZ79zWbN/1fSH+NisBByEiVLqpdVDrPLVSi8DX0oJo12kL/GppTBdKVXiQ==", "requires": { "cosmiconfig": "^7.0.0", "klona": "^2.0.5", "semver": "^7.3.7" }, "dependencies": { "semver": { "version": "7.3.7", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", "requires": { "lru-cache": "^6.0.0" } } } }, "postcss-merge-idents": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz", "integrity": "sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw==", "requires": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" } }, "postcss-merge-longhand": { "version": "5.1.6", "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz", "integrity": "sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw==", "requires": { "postcss-value-parser": "^4.2.0", "stylehacks": "^5.1.0" } }, "postcss-merge-rules": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz", "integrity": "sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ==", "requires": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", "cssnano-utils": "^3.1.0", "postcss-selector-parser": "^6.0.5" } }, "postcss-minify-font-values": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz", "integrity": "sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-minify-gradients": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz", "integrity": "sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==", "requires": { "colord": "^2.9.1", "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" } }, "postcss-minify-params": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz", "integrity": "sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg==", "requires": { "browserslist": "^4.16.6", "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" } }, "postcss-minify-selectors": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz", "integrity": "sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==", "requires": { "postcss-selector-parser": "^6.0.5" } }, "postcss-modules-extract-imports": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", "integrity": "sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==", "requires": {} }, "postcss-modules-local-by-default": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz", "integrity": "sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ==", "requires": { "icss-utils": "^5.0.0", "postcss-selector-parser": "^6.0.2", "postcss-value-parser": "^4.1.0" } }, "postcss-modules-scope": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz", "integrity": "sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==", "requires": { "postcss-selector-parser": "^6.0.4" } }, "postcss-modules-values": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", "requires": { "icss-utils": "^5.0.0" } }, "postcss-normalize-charset": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz", "integrity": "sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==", "requires": {} }, "postcss-normalize-display-values": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz", "integrity": "sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-positions": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz", "integrity": "sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-repeat-style": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz", "integrity": "sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-string": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz", "integrity": "sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-timing-functions": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz", "integrity": "sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-unicode": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz", "integrity": "sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ==", "requires": { "browserslist": "^4.16.6", "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-url": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz", "integrity": "sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==", "requires": { "normalize-url": "^6.0.1", "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-whitespace": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz", "integrity": "sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-ordered-values": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz", "integrity": "sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==", "requires": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" } }, "postcss-reduce-idents": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz", "integrity": "sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-reduce-initial": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz", "integrity": "sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw==", "requires": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0" } }, "postcss-reduce-transforms": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz", "integrity": "sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-selector-parser": { "version": "6.0.10", "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", "requires": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "postcss-sort-media-queries": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.2.1.tgz", "integrity": "sha512-9VYekQalFZ3sdgcTjXMa0dDjsfBVHXlraYJEMiOJ/2iMmI2JGCMavP16z3kWOaRu8NSaJCTgVpB/IVpH5yT9YQ==", "requires": { "sort-css-media-queries": "2.0.4" } }, "postcss-svgo": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz", "integrity": "sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==", "requires": { "postcss-value-parser": "^4.2.0", "svgo": "^2.7.0" } }, "postcss-unique-selectors": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz", "integrity": "sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==", "requires": { "postcss-selector-parser": "^6.0.5" } }, "postcss-value-parser": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, "postcss-zindex": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz", "integrity": "sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A==", "requires": {} }, "prepend-http": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=" }, "pretty-error": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", "requires": { "lodash": "^4.17.20", "renderkid": "^3.0.0" } }, "pretty-time": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz", "integrity": "sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==" }, "prism-react-renderer": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz", "integrity": "sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg==", "requires": {} }, "prismjs": { "version": "1.28.0", "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.28.0.tgz", "integrity": "sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw==" }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "promise": { "version": "7.3.1", "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", "requires": { "asap": "~2.0.3" } }, "prompts": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "requires": { "kleur": "^3.0.3", "sisteransi": "^1.0.5" } }, "prop-types": { "version": "15.7.2", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", "requires": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.8.1" } }, "property-information": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", "requires": { "xtend": "^4.0.0" } }, "proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", "requires": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" }, "dependencies": { "ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" } } }, "pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" }, "pupa": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", "requires": { "escape-goat": "^2.0.0" } }, "pure-color": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz", "integrity": "sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==" }, "q": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=" }, "qs": { "version": "6.10.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", "requires": { "side-channel": "^1.0.4" } }, "querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" }, "queue": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz", "integrity": "sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==", "requires": { "inherits": "~2.0.3" } }, "queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" }, "randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "requires": { "safe-buffer": "^5.1.0" } }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raw-body": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", "requires": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" }, "dependencies": { "bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" } } }, "rc": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", "requires": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" } }, "react": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "react-base16-styling": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz", "integrity": "sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==", "requires": { "base16": "^1.0.0", "lodash.curry": "^4.0.1", "lodash.flow": "^3.3.0", "pure-color": "^1.2.0" } }, "react-dev-utils": { "version": "12.0.1", "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz", "integrity": "sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ==", "requires": { "@babel/code-frame": "^7.16.0", "address": "^1.1.2", "browserslist": "^4.18.1", "chalk": "^4.1.2", "cross-spawn": "^7.0.3", "detect-port-alt": "^1.1.6", "escape-string-regexp": "^4.0.0", "filesize": "^8.0.6", "find-up": "^5.0.0", "fork-ts-checker-webpack-plugin": "^6.5.0", "global-modules": "^2.0.0", "globby": "^11.0.4", "gzip-size": "^6.0.0", "immer": "^9.0.7", "is-root": "^2.1.0", "loader-utils": "^3.2.0", "open": "^8.4.0", "pkg-up": "^3.1.0", "prompts": "^2.4.2", "react-error-overlay": "^6.0.11", "recursive-readdir": "^2.2.2", "shell-quote": "^1.7.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "dependencies": { "escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" }, "find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "requires": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "loader-utils": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz", "integrity": "sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ==" }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "requires": { "p-locate": "^5.0.0" } }, "p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "requires": { "yocto-queue": "^0.1.0" } }, "p-locate": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "requires": { "p-limit": "^3.0.2" } } } }, "react-dom": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "scheduler": "^0.20.2" } }, "react-error-overlay": { "version": "6.0.11", "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz", "integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==" }, "react-fast-compare": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.0.tgz", "integrity": "sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==" }, "react-helmet-async": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-1.3.0.tgz", "integrity": "sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg==", "requires": { "@babel/runtime": "^7.12.5", "invariant": "^2.2.4", "prop-types": "^15.7.2", "react-fast-compare": "^3.2.0", "shallowequal": "^1.1.0" } }, "react-is": { "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "react-json-view": { "version": "1.21.3", "resolved": "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz", "integrity": "sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==", "requires": { "flux": "^4.0.1", "react-base16-styling": "^0.6.0", "react-lifecycles-compat": "^3.0.4", "react-textarea-autosize": "^8.3.2" } }, "react-lifecycles-compat": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" }, "react-loadable": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/react-loadable/-/react-loadable-5.5.0.tgz", "integrity": "sha512-C8Aui0ZpMd4KokxRdVAm2bQtI03k2RMRNzOB+IipV3yxFTSVICv7WoUr5L9ALB5BmKO1iHgZtWM8EvYG83otdg==", "peer": true, "requires": { "prop-types": "^15.5.0" } }, "react-loadable-ssr-addon-v5-slorber": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz", "integrity": "sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==", "requires": { "@babel/runtime": "^7.10.3" } }, "react-router": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.3.tgz", "integrity": "sha512-mzQGUvS3bM84TnbtMYR8ZjKnuPJ71IjSzR+DE6UkUqvN4czWIqEs17yLL8xkAycv4ev0AiN+IGrWu88vJs/p2w==", "requires": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "hoist-non-react-statics": "^3.1.0", "loose-envify": "^1.3.1", "mini-create-react-context": "^0.4.0", "path-to-regexp": "^1.7.0", "prop-types": "^15.6.2", "react-is": "^16.6.0", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0" }, "dependencies": { "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" }, "path-to-regexp": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", "requires": { "isarray": "0.0.1" } } } }, "react-router-config": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz", "integrity": "sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==", "requires": { "@babel/runtime": "^7.1.2" } }, "react-router-dom": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.3.tgz", "integrity": "sha512-Ov0tGPMBgqmbu5CDmN++tv2HQ9HlWDuWIIqn4b88gjlAN5IHI+4ZUZRcpz9Hl0azFIwihbLDYw1OiHGRo7ZIng==", "requires": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "loose-envify": "^1.3.1", "prop-types": "^15.6.2", "react-router": "5.3.3", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0" } }, "react-textarea-autosize": { "version": "8.3.4", "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz", "integrity": "sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ==", "requires": { "@babel/runtime": "^7.10.2", "use-composed-ref": "^1.3.0", "use-latest": "^1.2.1" }, "dependencies": { "use-composed-ref": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz", "integrity": "sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ==", "requires": {} }, "use-latest": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz", "integrity": "sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw==", "requires": { "use-isomorphic-layout-effect": "^1.1.1" }, "dependencies": { "use-isomorphic-layout-effect": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz", "integrity": "sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==", "requires": {} } } } } }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "requires": { "picomatch": "^2.2.1" } }, "reading-time": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/reading-time/-/reading-time-1.5.0.tgz", "integrity": "sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg==" }, "rechoir": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", "requires": { "resolve": "^1.1.6" } }, "recursive-readdir": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz", "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", "requires": { "minimatch": "3.0.4" } }, "regenerate": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" }, "regenerate-unicode-properties": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz", "integrity": "sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==", "requires": { "regenerate": "^1.4.2" } }, "regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" }, "regenerator-transform": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz", "integrity": "sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==", "requires": { "@babel/runtime": "^7.8.4" } }, "regexpu-core": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.1.0.tgz", "integrity": "sha512-bb6hk+xWd2PEOkj5It46A16zFMs2mv86Iwpdu94la4S3sJ7C973h2dHpYKwIBGaWSO7cIRJ+UX0IeMaWcO4qwA==", "requires": { "regenerate": "^1.4.2", "regenerate-unicode-properties": "^10.0.1", "regjsgen": "^0.6.0", "regjsparser": "^0.8.2", "unicode-match-property-ecmascript": "^2.0.0", "unicode-match-property-value-ecmascript": "^2.0.0" } }, "registry-auth-token": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", "requires": { "rc": "^1.2.8" } }, "registry-url": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", "requires": { "rc": "^1.2.8" } }, "regjsgen": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.6.0.tgz", "integrity": "sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==" }, "regjsparser": { "version": "0.8.4", "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.8.4.tgz", "integrity": "sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==", "requires": { "jsesc": "~0.5.0" }, "dependencies": { "jsesc": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==" } } }, "relateurl": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", "integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=" }, "remark-emoji": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz", "integrity": "sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w==", "requires": { "emoticon": "^3.2.0", "node-emoji": "^1.10.0", "unist-util-visit": "^2.0.3" } }, "remark-footnotes": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz", "integrity": "sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ==" }, "remark-mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz", "integrity": "sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ==", "requires": { "@babel/core": "7.12.9", "@babel/helper-plugin-utils": "7.10.4", "@babel/plugin-proposal-object-rest-spread": "7.12.1", "@babel/plugin-syntax-jsx": "7.12.1", "@mdx-js/util": "1.6.22", "is-alphabetical": "1.0.4", "remark-parse": "8.0.3", "unified": "9.2.0" }, "dependencies": { "@babel/core": { "version": "7.12.9", "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "requires": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", "@babel/helper-module-transforms": "^7.12.1", "@babel/helpers": "^7.12.5", "@babel/parser": "^7.12.7", "@babel/template": "^7.12.7", "@babel/traverse": "^7.12.9", "@babel/types": "^7.12.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", "json5": "^2.1.2", "lodash": "^4.17.19", "resolve": "^1.3.2", "semver": "^5.4.1", "source-map": "^0.5.0" } }, "@babel/helper-plugin-utils": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "@babel/plugin-proposal-object-rest-spread": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz", "integrity": "sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==", "requires": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.0", "@babel/plugin-transform-parameters": "^7.12.1" } }, "@babel/plugin-syntax-jsx": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==" }, "unified": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", "requires": { "bail": "^1.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", "is-plain-obj": "^2.0.0", "trough": "^1.0.0", "vfile": "^4.0.0" } } } }, "remark-parse": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz", "integrity": "sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==", "requires": { "ccount": "^1.0.0", "collapse-white-space": "^1.0.2", "is-alphabetical": "^1.0.0", "is-decimal": "^1.0.0", "is-whitespace-character": "^1.0.0", "is-word-character": "^1.0.0", "markdown-escapes": "^1.0.0", "parse-entities": "^2.0.0", "repeat-string": "^1.5.4", "state-toggle": "^1.0.0", "trim": "0.0.1", "trim-trailing-lines": "^1.0.0", "unherit": "^1.0.4", "unist-util-remove-position": "^2.0.0", "vfile-location": "^3.0.0", "xtend": "^4.0.1" } }, "remark-squeeze-paragraphs": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz", "integrity": "sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw==", "requires": { "mdast-squeeze-paragraphs": "^4.0.0" } }, "renderkid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", "requires": { "css-select": "^4.1.3", "dom-converter": "^0.2.0", "htmlparser2": "^6.1.0", "lodash": "^4.17.21", "strip-ansi": "^6.0.1" } }, "repeat-string": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==" }, "require-from-string": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==" }, "requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" }, "resolve": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", "requires": { "is-core-module": "^2.2.0", "path-parse": "^1.0.6" } }, "resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" }, "resolve-pathname": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" }, "responselike": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", "requires": { "lowercase-keys": "^1.0.0" } }, "retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==" }, "reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" }, "rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "requires": { "glob": "^7.1.3" } }, "rtl-detect": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz", "integrity": "sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ==" }, "rtlcss": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz", "integrity": "sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A==", "requires": { "find-up": "^5.0.0", "picocolors": "^1.0.0", "postcss": "^8.3.11", "strip-json-comments": "^3.1.1" }, "dependencies": { "find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "requires": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "requires": { "p-locate": "^5.0.0" } }, "p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "requires": { "yocto-queue": "^0.1.0" } }, "p-locate": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "requires": { "p-limit": "^3.0.2" } }, "strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" } } }, "run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "requires": { "queue-microtask": "^1.2.2" } }, "rxjs": { "version": "7.5.5", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.5.tgz", "integrity": "sha512-sy+H0pQofO95VDmFLzyaw9xNJU4KTRSwQIGM6+iG3SypAtCiLDzpeG8sJrNCWn2Up9km+KhkvTdbkrdy+yzZdw==", "requires": { "tslib": "^2.1.0" } }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, "scheduler": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "schema-utils": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "requires": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } }, "section-matter": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", "requires": { "extend-shallow": "^2.0.1", "kind-of": "^6.0.0" } }, "select-hose": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==" }, "selfsigned": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-2.0.1.tgz", "integrity": "sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ==", "requires": { "node-forge": "^1" } }, "semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", "requires": { "lru-cache": "^6.0.0" } }, "semver-diff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", "requires": { "semver": "^6.3.0" }, "dependencies": { "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "send": { "version": "0.18.0", "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", "requires": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" }, "dependencies": { "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" } } }, "serialize-javascript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", "requires": { "randombytes": "^2.1.0" } }, "serve-handler": { "version": "6.1.3", "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz", "integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==", "requires": { "bytes": "3.0.0", "content-disposition": "0.5.2", "fast-url-parser": "1.1.3", "mime-types": "2.1.18", "minimatch": "3.0.4", "path-is-inside": "1.0.2", "path-to-regexp": "2.2.1", "range-parser": "1.2.0" }, "dependencies": { "content-disposition": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" }, "mime-db": { "version": "1.33.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==" }, "mime-types": { "version": "2.1.18", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", "requires": { "mime-db": "~1.33.0" } }, "path-to-regexp": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", "integrity": "sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==" }, "range-parser": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" } } }, "serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", "requires": { "accepts": "~1.3.4", "batch": "0.6.1", "debug": "2.6.9", "escape-html": "~1.0.3", "http-errors": "~1.6.2", "mime-types": "~2.1.17", "parseurl": "~1.3.2" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==" }, "http-errors": { "version": "1.6.3", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", "requires": { "depd": "~1.1.2", "inherits": "2.0.3", "setprototypeof": "1.1.0", "statuses": ">= 1.4.0 < 2" } }, "inherits": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "setprototypeof": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" } } }, "serve-static": { "version": "1.15.0", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", "requires": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.18.0" } }, "setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" }, "setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "shallow-clone": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", "requires": { "kind-of": "^6.0.2" } }, "shallowequal": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==" }, "shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "requires": { "shebang-regex": "^3.0.0" } }, "shebang-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" }, "shell-quote": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz", "integrity": "sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==" }, "shelljs": { "version": "0.8.5", "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "requires": { "glob": "^7.0.0", "interpret": "^1.0.0", "rechoir": "^0.6.2" } }, "side-channel": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", "requires": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", "object-inspect": "^1.9.0" } }, "signal-exit": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" }, "sirv": { "version": "1.0.19", "resolved": "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz", "integrity": "sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ==", "requires": { "@polka/url": "^1.0.0-next.20", "mrmime": "^1.0.0", "totalist": "^1.0.0" } }, "sisteransi": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" }, "sitemap": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz", "integrity": "sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==", "requires": { "@types/node": "^17.0.5", "@types/sax": "^1.2.1", "arg": "^5.0.0", "sax": "^1.2.4" } }, "slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" }, "sockjs": { "version": "0.3.24", "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", "requires": { "faye-websocket": "^0.11.3", "uuid": "^8.3.2", "websocket-driver": "^0.7.4" } }, "sort-css-media-queries": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.0.4.tgz", "integrity": "sha512-PAIsEK/XupCQwitjv7XxoMvYhT7EAfyzI3hsy/MyDgTvc+Ft55ctdkctJLOy6cQejaIC+zjpUL4djFVm2ivOOw==" }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-js": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" }, "source-map-support": { "version": "0.5.21", "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "space-separated-tokens": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==" }, "spdy": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", "requires": { "debug": "^4.1.0", "handle-thing": "^2.0.0", "http-deceiver": "^1.2.7", "select-hose": "^2.0.0", "spdy-transport": "^3.0.0" } }, "spdy-transport": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", "requires": { "debug": "^4.1.0", "detect-node": "^2.0.4", "hpack.js": "^2.1.6", "obuf": "^1.1.2", "readable-stream": "^3.0.6", "wbuf": "^1.7.3" } }, "sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" }, "stable": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" }, "state-toggle": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==" }, "statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" }, "std-env": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.0.1.tgz", "integrity": "sha512-mC1Ps9l77/97qeOZc+HrOL7TIaOboHqMZ24dGVQrlxFcpPpfCHpH+qfUT7Dz+6mlG8+JPA1KfBQo19iC/+Ngcw==" }, "string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "requires": { "safe-buffer": "~5.2.0" }, "dependencies": { "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" } } }, "string-width": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.0" }, "dependencies": { "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" } } }, "string.prototype.trimend": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3" } }, "string.prototype.trimstart": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3" } }, "stringify-object": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", "requires": { "get-own-enumerable-property-symbols": "^3.0.0", "is-obj": "^1.0.1", "is-regexp": "^1.0.0" }, "dependencies": { "is-obj": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", "integrity": "sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==" } } }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { "ansi-regex": "^5.0.1" }, "dependencies": { "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" } } }, "strip-bom-string": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==" }, "strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==" }, "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" }, "style-to-object": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", "requires": { "inline-style-parser": "0.1.1" } }, "stylehacks": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.0.tgz", "integrity": "sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q==", "requires": { "browserslist": "^4.16.6", "postcss-selector-parser": "^6.0.4" } }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "requires": { "has-flag": "^3.0.0" } }, "svg-parser": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, "svgo": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz", "integrity": "sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==", "requires": { "@trysound/sax": "0.2.0", "commander": "^7.2.0", "css-select": "^4.1.3", "css-tree": "^1.1.3", "csso": "^4.2.0", "picocolors": "^1.0.0", "stable": "^0.1.8" }, "dependencies": { "commander": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" } } }, "tapable": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.0.tgz", "integrity": "sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw==" }, "terser": { "version": "5.10.0", "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", "requires": { "commander": "^2.20.0", "source-map": "~0.7.2", "source-map-support": "~0.5.20" }, "dependencies": { "commander": { "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" }, "source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" } } }, "terser-webpack-plugin": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.3.tgz", "integrity": "sha512-Fx60G5HNYknNTNQnzQ1VePRuu89ZVYWfjRAeT5rITuCY/1b08s49e5kSQwHDirKZWuoKOBRFS98EUUoZ9kLEwQ==", "requires": { "@jridgewell/trace-mapping": "^0.3.7", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", "terser": "^5.7.2" } }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" }, "thunky": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, "tiny-invariant": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.1.0.tgz", "integrity": "sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==" }, "tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" }, "to-fast-properties": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" }, "to-readable-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==" }, "to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "requires": { "is-number": "^7.0.0" } }, "toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, "totalist": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz", "integrity": "sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==" }, "tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "trim": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", "integrity": "sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==" }, "trim-trailing-lines": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz", "integrity": "sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ==" }, "trough": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==" }, "tslib": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "type-fest": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" }, "type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "requires": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "typedarray-to-buffer": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", "requires": { "is-typedarray": "^1.0.0" } }, "typescript": { "version": "4.7.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", "peer": true }, "ua-parser-js": { "version": "0.7.31", "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.31.tgz", "integrity": "sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ==" }, "unbox-primitive": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", "requires": { "function-bind": "^1.1.1", "has-bigints": "^1.0.1", "has-symbols": "^1.0.2", "which-boxed-primitive": "^1.0.2" } }, "unherit": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", "requires": { "inherits": "^2.0.0", "xtend": "^4.0.0" } }, "unicode-canonical-property-names-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==" }, "unicode-match-property-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "requires": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" } }, "unicode-match-property-value-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==" }, "unicode-property-aliases-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz", "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==" }, "unified": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz", "integrity": "sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ==", "requires": { "bail": "^1.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", "is-plain-obj": "^2.0.0", "trough": "^1.0.0", "vfile": "^4.0.0" } }, "unique-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", "requires": { "crypto-random-string": "^2.0.0" } }, "unist-builder": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==" }, "unist-util-generated": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz", "integrity": "sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==" }, "unist-util-is": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==" }, "unist-util-position": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==" }, "unist-util-remove": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", "integrity": "sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==", "requires": { "unist-util-is": "^4.0.0" } }, "unist-util-remove-position": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz", "integrity": "sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==", "requires": { "unist-util-visit": "^2.0.0" } }, "unist-util-stringify-position": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", "requires": { "@types/unist": "^2.0.2" } }, "unist-util-visit": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", "requires": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0", "unist-util-visit-parents": "^3.0.0" } }, "unist-util-visit-parents": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", "requires": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0" } }, "universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" }, "unquote": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" }, "update-notifier": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", "requires": { "boxen": "^5.0.0", "chalk": "^4.1.0", "configstore": "^5.0.1", "has-yarn": "^2.1.0", "import-lazy": "^2.1.0", "is-ci": "^2.0.0", "is-installed-globally": "^0.4.0", "is-npm": "^5.0.0", "is-yarn-global": "^0.3.0", "latest-version": "^5.1.0", "pupa": "^2.1.1", "semver": "^7.3.4", "semver-diff": "^3.1.1", "xdg-basedir": "^4.0.0" } }, "uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "requires": { "punycode": "^2.1.0" } }, "url-loader": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz", "integrity": "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==", "requires": { "loader-utils": "^2.0.0", "mime-types": "^2.1.27", "schema-utils": "^3.0.0" } }, "url-parse": { "version": "1.5.3", "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz", "integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==", "requires": { "querystringify": "^2.1.1", "requires-port": "^1.0.0" } }, "url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", "requires": { "prepend-http": "^2.0.0" } }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, "util.promisify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.1.tgz", "integrity": "sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA==", "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.2", "has-symbols": "^1.0.1", "object.getownpropertydescriptors": "^2.1.0" } }, "utila": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", "integrity": "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=" }, "utility-types": { "version": "3.10.0", "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz", "integrity": "sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==" }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" }, "value-equal": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" }, "vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==" }, "vfile": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", "requires": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", "unist-util-stringify-position": "^2.0.0", "vfile-message": "^2.0.0" } }, "vfile-location": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz", "integrity": "sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==" }, "vfile-message": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", "requires": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^2.0.0" } }, "wait-on": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz", "integrity": "sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw==", "requires": { "axios": "^0.25.0", "joi": "^17.6.0", "lodash": "^4.17.21", "minimist": "^1.2.5", "rxjs": "^7.5.4" } }, "watchpack": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", "requires": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "wbuf": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", "requires": { "minimalistic-assert": "^1.0.0" } }, "web-namespaces": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==" }, "webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "webpack": { "version": "5.73.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.73.0.tgz", "integrity": "sha512-svjudQRPPa0YiOYa2lM/Gacw0r6PvxptHj4FuEKQ2kX05ZLkjbVc5MnPs6its5j7IZljnIqSVo/OsY2X0IpHGA==", "requires": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/wasm-edit": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1", "acorn": "^8.4.1", "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.9.3", "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.9", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.1.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.1.3", "watchpack": "^2.3.1", "webpack-sources": "^3.2.3" } }, "webpack-bundle-analyzer": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.5.0.tgz", "integrity": "sha512-GUMZlM3SKwS8Z+CKeIFx7CVoHn3dXFcUAjT/dcZQQmfSZGvitPfMob2ipjai7ovFFqPvTqkEZ/leL4O0YOdAYQ==", "requires": { "acorn": "^8.0.4", "acorn-walk": "^8.0.0", "chalk": "^4.1.0", "commander": "^7.2.0", "gzip-size": "^6.0.0", "lodash": "^4.17.20", "opener": "^1.5.2", "sirv": "^1.0.7", "ws": "^7.3.1" }, "dependencies": { "commander": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" } } }, "webpack-dev-middleware": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", "requires": { "colorette": "^2.0.10", "memfs": "^3.4.3", "mime-types": "^2.1.31", "range-parser": "^1.2.1", "schema-utils": "^4.0.0" }, "dependencies": { "ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, "ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" } } } }, "webpack-dev-server": { "version": "4.9.3", "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.9.3.tgz", "integrity": "sha512-3qp/eoboZG5/6QgiZ3llN8TUzkSpYg1Ko9khWX1h40MIEUNS2mDoIa8aXsPfskER+GbTvs/IJZ1QTBBhhuetSw==", "requires": { "@types/bonjour": "^3.5.9", "@types/connect-history-api-fallback": "^1.3.5", "@types/express": "^4.17.13", "@types/serve-index": "^1.9.1", "@types/serve-static": "^1.13.10", "@types/sockjs": "^0.3.33", "@types/ws": "^8.5.1", "ansi-html-community": "^0.0.8", "bonjour-service": "^1.0.11", "chokidar": "^3.5.3", "colorette": "^2.0.10", "compression": "^1.7.4", "connect-history-api-fallback": "^2.0.0", "default-gateway": "^6.0.3", "express": "^4.17.3", "graceful-fs": "^4.2.6", "html-entities": "^2.3.2", "http-proxy-middleware": "^2.0.3", "ipaddr.js": "^2.0.1", "open": "^8.0.9", "p-retry": "^4.5.0", "rimraf": "^3.0.2", "schema-utils": "^4.0.0", "selfsigned": "^2.0.1", "serve-index": "^1.9.1", "sockjs": "^0.3.24", "spdy": "^4.0.2", "webpack-dev-middleware": "^5.3.1", "ws": "^8.4.2" }, "dependencies": { "ajv": { "version": "8.11.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, "ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", "ajv-formats": "^2.1.1", "ajv-keywords": "^5.0.0" } }, "ws": { "version": "8.8.1", "resolved": "https://registry.npmjs.org/ws/-/ws-8.8.1.tgz", "integrity": "sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA==", "requires": {} } } }, "webpack-merge": { "version": "5.8.0", "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", "requires": { "clone-deep": "^4.0.1", "wildcard": "^2.0.0" } }, "webpack-sources": { "version": "3.2.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==" }, "webpackbar": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz", "integrity": "sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==", "requires": { "chalk": "^4.1.0", "consola": "^2.15.3", "pretty-time": "^1.1.0", "std-env": "^3.0.1" } }, "websocket-driver": { "version": "0.7.4", "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", "requires": { "http-parser-js": ">=0.5.1", "safe-buffer": ">=5.1.0", "websocket-extensions": ">=0.1.1" } }, "websocket-extensions": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==" }, "whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "requires": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "requires": { "isexe": "^2.0.0" } }, "which-boxed-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "requires": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", "is-number-object": "^1.0.4", "is-string": "^1.0.5", "is-symbol": "^1.0.3" } }, "widest-line": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", "requires": { "string-width": "^4.0.0" } }, "wildcard": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==" }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "requires": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" }, "dependencies": { "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" } } }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "write-file-atomic": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", "requires": { "imurmurhash": "^0.1.4", "is-typedarray": "^1.0.0", "signal-exit": "^3.0.2", "typedarray-to-buffer": "^3.1.5" } }, "ws": { "version": "7.5.7", "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz", "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==", "requires": {} }, "xdg-basedir": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==" }, "xml-js": { "version": "1.6.11", "resolved": "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz", "integrity": "sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==", "requires": { "sax": "^1.2.4" } }, "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yaml": { "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" }, "yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" }, "zwitch": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==" } } } atlas-0.7.2/doc/website/package.json000066400000000000000000000026531431455511600173340ustar00rootroot00000000000000{ "name": "website", "version": "0.0.0", "private": true, "scripts": { "docusaurus": "docusaurus", "start": "docusaurus start", "build": "docusaurus build", "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", "clear": "docusaurus clear", "serve": "docusaurus serve", "write-translations": "docusaurus write-translations", "write-heading-ids": "docusaurus write-heading-ids", "upload": "aws s3 sync build/ s3://ariga-atlas-website/ --delete --exclude uploads/*", "invalidate-cdn": "aws cloudfront create-invalidation --distribution-id E3VOJYSV9YO33D --paths \"/*\"" }, "dependencies": { "@docusaurus/core": "^2.0.0-rc.1", "@docusaurus/plugin-client-redirects": "^2.0.0-rc.1", "@docusaurus/preset-classic": "^2.0.0-rc.1", "@mdx-js/react": "^1.6.21", "@svgr/webpack": "^5.5.0", "clsx": "^1.1.1", "docusaurus-gtm-plugin": "^0.0.2", "file-loader": "^6.2.0", "react": "^17.0.1", "react-dom": "^17.0.1", "url-loader": "^4.1.1", "url-parse": "^1.5.2" }, "resolutions": { "trim": "^0.0.3", "browserslist": "^4.16.5", "glob-parent": "^5.1.2", "css-what": "^5.0.1", "prismjs": "^1.24.0" }, "browserslist": { "production": [ ">0.5%", "not dead", "not op_mini all" ], "development": [ "last 1 chrome version", "last 1 firefox version", "last 1 safari version" ] } } atlas-0.7.2/doc/website/sidebars.js000066400000000000000000000112461431455511600171760ustar00rootroot00000000000000/** * Creating a sidebar enables you to: - create an ordered group of docs - render a sidebar for each doc of that group - provide next/previous navigation The sidebars can be generated from the filesystem, or explicitly defined here. Create as many sidebars as you want. */ module.exports = { documentation: [ { type: 'category', label: 'Getting Started', collapsed: false, items: [ 'getting-started/getting-started', ] }, { type: "category", label: "Declarative Workflows", collapsed: false, items: [ {type: 'doc', id: 'declarative/inspect', label: 'Schema Inspection'}, {type: 'doc', id: 'declarative/apply', label: 'Applying Changes'}, {type: 'doc', id: 'declarative/diff', label: 'Calculating Diffs'}, ] }, { type: "category", label: "Versioned Workflows", collapsed: false, items: [ {type: 'doc', id: 'versioned/diff', label: 'Migration Authoring'}, {type: 'doc', id: 'versioned/lint', label: 'Migration Linting'}, {type: 'doc', id: 'versioned/new', label: 'Manual Migrations'}, {type: 'doc', id: 'versioned/apply', label: 'Migration Applying'}, {type: 'doc', id: 'versioned/import', label: 'Migration Import'}, ] }, { type: 'category', label: 'Atlas Schemas', collapsed: false, items: [ {type: 'doc', id: 'atlas-schema/sql-resources', label: 'SQL Resources'}, {type: 'doc', id: 'atlas-schema/sql-types', label: 'SQL Column Types'}, {type: 'doc', id: 'atlas-schema/projects', label: 'Project Structure'}, {type: 'doc', id: 'atlas-schema/input-variables', label: 'Input Variables'}, ], }, { type: 'category', label: 'Concepts', collapsed: false, items: [ {type: 'doc', id: 'concepts/workflows', label: 'Declarative vs Versioned'}, {type: 'doc', id: 'concepts/concepts-url', label: 'URLs'}, {type: 'doc', id: 'concepts/dev-database', label: 'Dev Database'}, {type: 'doc', id: 'concepts/migration-directory-integrity', label: 'Directory Integrity'}, ], }, { type: 'category', label: 'Cloud', collapsed: false, items: [ {type: 'doc', id: 'cloud/getting-started', label: 'Getting Started'}, ], }, { type: 'category', label: 'Integrations', collapsed: false, items: [ {type: 'doc', id: 'integrations/github-actions', label: 'GitHub Actions'}, {type: 'doc', id: 'integrations/terraform-provider', label: 'Terraform Provider'}, {type: 'doc', id: 'integrations/go-api', label: 'Go API'}, ] }, { type: 'doc', id: 'contributing', }, { type: 'doc', id: 'cli-reference' } ], guides: [ { type: 'doc', id: 'guides/guides' }, { type: 'category', label: 'MySQL', collapsed: false, items: [ { type: 'doc', id: 'guides/mysql/generated-columns', label: 'Generated Columns' }, ], }, { type: 'category', label: 'PostgreSQL', collapsed: false, items: [ { type: 'doc', id: 'guides/postgres/serial-columns', label: 'Serial Type Columns' }, { type: 'doc', id: 'guides/postgres/partial-indexes', label: 'Partial Indexes' }, ], }, { type: 'category', label: 'Migration tools', collapsed: false, items: [ { type: 'doc', id:'guides/migration-tools/golang-migrate', label: 'golang-migrate' } ] }, { type: 'doc', id: 'guides/ddl' }, ], about: [ { type: 'doc', label: 'About', id: 'about', } ] }; atlas-0.7.2/doc/website/src/000077500000000000000000000000001431455511600156275ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/assets/000077500000000000000000000000001431455511600171315ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/assets/icons/000077500000000000000000000000001431455511600202445ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/assets/icons/discord-white.svg000066400000000000000000000037561431455511600235450ustar00rootroot00000000000000 atlas-0.7.2/doc/website/src/assets/icons/discord.svg000066400000000000000000000037521431455511600224230ustar00rootroot00000000000000 atlas-0.7.2/doc/website/src/assets/icons/github.svg000066400000000000000000000017021431455511600222470ustar00rootroot00000000000000 atlas-0.7.2/doc/website/src/assets/icons/twitter.svg000066400000000000000000000013721431455511600224720ustar00rootroot00000000000000 atlas-0.7.2/doc/website/src/components/000077500000000000000000000000001431455511600200145ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/components/HomepageFeatures.js000066400000000000000000000032471431455511600236040ustar00rootroot00000000000000import React from 'react'; import clsx from 'clsx'; import styles from './HomepageFeatures.module.css'; const FeatureList = [ { title: 'Easy to Use', Svg: require('../../static/img/undraw_docusaurus_mountain.svg').default, description: ( <> Docusaurus was designed from the ground up to be easily installed and used to get your website up and running quickly. ), }, { title: 'Focus on What Matters', Svg: require('../../static/img/undraw_docusaurus_tree.svg').default, description: ( <> Docusaurus lets you focus on your docs, and we'll do the chores. Go ahead and move your docs into the docs directory. ), }, { title: 'Powered by React', Svg: require('../../static/img/undraw_docusaurus_react.svg').default, description: ( <> Extend or customize your website layout by reusing React. Docusaurus can be extended while reusing the same header and footer. ), }, ]; function Feature({Svg, title, description}) { return (

{title}

{description}

); } export default function HomepageFeatures() { return (
{FeatureList.map((props, idx) => ( ))}
); } atlas-0.7.2/doc/website/src/components/HomepageFeatures.module.css000066400000000000000000000002771431455511600252440ustar00rootroot00000000000000/* stylelint-disable docusaurus/copyright-header */ .features { display: flex; align-items: center; padding: 2rem 0; width: 100%; } .featureSvg { height: 200px; width: 200px; } atlas-0.7.2/doc/website/src/css/000077500000000000000000000000001431455511600164175ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/css/custom.css000066400000000000000000000347461431455511600204610ustar00rootroot00000000000000/* stylelint-disable docusaurus/copyright-header */ /** * Any CSS included here will be global. The classic template * bundles Infima by default. Infima is a CSS framework designed to * work well for content-centric websites. */ .navbar__title { font-size: 20px; } .navbar__brand img { height: 65px; } .navbar__logo { display: flex; align-items: center; } .footer__title { color: #000939; font-size: 22px; } .footer__link-item { font-size: 16px; } /* You can override the default Infima variables here. */ :root { --ifm-color-primary: #3578e5; --ifm-color-primary-dark: #1d68e1; --ifm-color-primary-darker: #1b62d4; --ifm-color-primary-darkest: #1751af; --ifm-color-primary-light: #4e89e8; --ifm-color-primary-lighter: #5a91ea; --ifm-color-primary-lightest: #80aaef; } .docusaurus-highlight-code-line { background-color: rgb(72, 77, 91); display: block; margin: 0 calc(-1 * var(--ifm-pre-padding)); padding: 0 var(--ifm-pre-padding); } /* discord */ .header-discord-link:hover { opacity: 0.6; } .header-discord-link:before { content: ''; width: 28px; height: 28px; display: flex; background: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAgAAAAIACAMAAADDpiTIAAAAq1BMVEUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0NbREAAAAOHRSTlMAAwUiKPrtBgqyF6NpLhsR9ec4DuG48Qh/cEPOXDNAHhRJV1Gel8lM3XmrhophwDvE2ZKDdKeP1JJ/9KAAABQfSURBVHja7MGBAAAAAICg/akXqQIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGYPDgQAAAAAgPxfG0FVVVVVVVVVVVVVVVVVVYVde91OFAYCABwIhXCTBqFSRFRQUeqlWs/uvP+T7Z7tj721TVSEtM73ABwyZMhMziCEEEIIIYQQQkhRWp/m3tx17T0MU9KVkt/t49mLV6SjMdMIasHAGRWJbcKfPNKNI/zFjJPKCHAfXMuAVuvv8KacdKGANy2/lQYjqEnMz1wTPuCQ9uXwgXs7oxZBDejvnjgIDHPSOgeEYs8YEHSBID+YIDIPu0g1PQIp0crAquAsGj1yEJqFFunG+GiCJLsKCDpJf2qD0H1CSaecbA+S+MrQCZLDqi2IuaEKEe09cvjl07yx6gZpDGL1Tp1Y9lYPuAcawspnEJv7qpVVNLkDWXaO7eE7jBmIxamS8bPSLUibGQT9i2URCC1ShfvqfhaBrKhQeCEd0MIYhHim/AVr4HGQZYcEvbIyDkK1cgf/xeUAz/A38FPw8iWS/zerWoKs9YTcOFqDkGuQhuiDwKH+rtqsDu5/5sljNt35tBcwXbtwVS7Iqim5YeEQRPhl5ZIe0HBaeMncXkRwir198Moddc6c82AbEyQtRuQ2aRUHke3o/O+erbcmNCGqV6U/sciJjFq+GFCytb0urTJB5Mk5+alsHBaJzeEaFodNThmRN5EuCB82N3ZDqJWmMCYnpQXrvU6KtSByPV928EsvTNktUH6OLqcRmjgs0VQyHozm3mwJrXtOUkeTWWsagRz+g707UUschqIAfFr2fSkigkIVWQQdEZnP8/5PNp/ODjQNM9Im6f1foUuSm5ObIbLBu24zxqiLeF74vNswVW+vOm9Bd0Q9nVtkwM2SMb7kEcOrDgcjGuLtoRtALX9BPUvny4P5EWPcV6EUdM159r+076dNDwrNGvVsmnBYs8IYtWbMw1/SWJuniYdI4Yx6Hp0tEHsPjDELEeny5rVI460WIaKENWpp38FJ4yLVtgEi+PlBh7bob58vo/+AWnoOTgXKK6pVJjguWKxom5eriDB4dUUtF67tEg3bVNrkcNSkbvCgr9R/vClFTIO11F0qDDVHVBrlcUR5XCvQavNp699fgV4DjvCeqNRfHE0HV+iCzSLAgXGHOgZu/AQaPaq0rz3sq9Zf6I7RXevwmyhQQ9GByaC/ptKsjD3BdE7XLOst/K00oI572zeKu0WqzCf7f/5b88p8n6Oyf5aluaGGotVxkVKNKsUh/uLdfKXL9k+0DIuO/wQaRarsfPwpt7Z8zq+hvwvxB39ADUVLlwPeK1XmVfwheLKn1Pd/5kMfv1W/UMPaxuXApEeF9i1+82/dm/Zpx5y7fcbrTWCbIVUqAX4Jd21mTW9awk+X99RgW1zojgr9MX7yxtn6+I8mXhs9xpvCJiEVZiX8EFy5P++LNn/2tDfKyYJFh6OArzpz2lyNGVf8lXzO9RhnBns0GWnm44M/7VGQryE+eGvGsSgsVmOEwi0++E99iu9WE3zId6i2gi2qjLBp4d1lPctD/6GLPN75M1d+Afc8ro535QeKPfMG3g0LVKnBDiGPWk4AIHilOOKtCwDhG1UsyYlteczMAxA8UkToDT3A29q/EGhF1jGCNYVCZ+rFDANWtJzd8tCyCrRmFBoHRMMeIz3CfAEPVUooDyj0Doj6FUYKYLw1DzzI13+C5dgbMMoapivzQF0Wfqd5eb5mFON3BHbcV5HHf7KLr4ywg9l8qfGdV8HwhOATxXk9wWSe7PGcW9vofmK3FOe2gMFkj//8OjDXDcX5GXxgMKsBz2SNYKoJRRKMvXHG7ZN95jA1GxZQfBobgyE7imQMYCKpAifHyHrwguJT2HpOTIpAaq4Xg/IUyTGwccwFRXI2ME2LIobbK0FZA8ZyeiXoZa/LR7oKhsUCxhSfx8KeMa62dzSXWXuCTYqkGdU4TA79nYsdx8RKFMkzaBooWdATuJgOdam7vz3eYAqZAp6FPW3DpOvLKdyrBkoS5BQOVgOfKdLxDCN8oTiBc5vCEgY+M9ObRtUp0lKHASy409tZJmQDJQuYphxSt6U4lUs7Qj5Figqp3ycmRYB/4k6zADfu+LZXBekqUyTC1M6RciDw3ziTCpAwaNpekKaQIm1VnErKwE65wumkDOyQItKTo0hfDieTLJhL1kiLJ1kwE7Q9nEQ2Al2TR0rkEkAzPEKXjABOSmtLsEFhhgZOIFEQ92yhR0YAR2mOATICOKuBFMhloOaYQZeEAd3kQ5PcD+SmG8STEcBhMyTNozCJh3iyBnBYHpqkMZybXpEw6Q1slj50SBbIXRMk6orCLHUkqkNhlh5iyXkAp4VI0DWFae6QoDcK04wQRxqDua0MNekO7rghdEhXCGetoCRRAOf5UJKNINflkZABhYkeEEMOhbutg2hSBsyCFhIxpTDTAipyUbzzKoggi8CM8BFBFoHZkEc06QuTAQ84ThaBGdHBcbIIzIoWjpPuwBmxwNmtKL6xdydKiQNRFEBvAkkQIYRRYVAWBVQURRmXuv//ZVPg7jikAyG5rTlfYElX5/VbunVd4j+KkbAfwsGWDVhQNsCWDZmz6PK6Xl66GdWmE+Zq8jjslcIwLPduuldnFDDEV75LHji66lXwkRsOb5mLy2Ho4AOnOsq9V+oAX/geeeBWbYCvtW92mLFx3cGX3F87TM6eIKDEnMwPHaxQGTaZmajrYoW9TsSkrLk7/Jq5OCojjnPSZCaiaxcx3JrHvFzjaza/EdQ6hAln5HH77lwYaJ8yJ2f4zPoQYLcNQ+0xt6xZhqFyk0lYUhIuM3NeHwnUfW7TbgBj7pgJWFIS7jBrzSoSCS65PSc2DFDV8C97h0JvA52XzLw6EvrFBGwYEnWZsR0HgMi/vTVAYmWPhuwIAkrM1hhrKUVMX1SV+VMSBwG23guxI7RQvZI1YfM1/mHnVPCtI7RV1bGmPo3plwOYJb+NZwJxwLlNJycHW1KlEYnKdo0LEu9yXdKU+nPSh4wnc5w9YHomLjYQ+DSif1XIH64m9SEL9klq3MDWoznpN6RazM6e0IsWNWxol0bU3xPfY3a62NwD07HvYEPtiCbU7wv7xXgy/3MAFY+p+GVdJ2UPW3HBzPSFSle32JzjM0sdvGNjM8gEqQg8mR6rEU1o14McZuZEaM86A+zbAly8sDINFDlCYeuNjfMUA7yxMA1UEype+EhHQBMCmWuFZ6IaSEtfp7S2yww9YgsmjCcTdj8LdBZjnWZknw9xuYpiCLgw42bmSIsTMY52FDhgLJks8JtrmXAEU8bRjgLPuZJcEiCdhpwQsPK17XOk7pHx9G66anMjHtJTYTzlKLDJ1UQT2UJXb865mnYUGHAF2RAAmKuEAAZ5SekosMQYoh1tDzq70Q0NyEaB5zQlc/BautLZjUqMpxsFTpmRUzyReN3EQYoajCUcBR4xhlIv0Dv3Om+yB4wjHAU6NKJTfXtRy78U/IpxhKPABldTPQXidwqzidZVU96GAyxMY5WRqlOZnJRxcVqmkzFxTl3xlqsDod66KeNINlXEV7OVt6650ALo0JDgtdE+44hM4XwWcQP3ACASkK7XymThCWYAQCXyvgAAkSNpYgFSFHI12SDQVVoAj0xEp5Id3xAqG71WKHQM3KEJreEa80KWYkMY0FAasJjTgOZ40AFXk00Fl2T6QQDQkOAzoh4NKA6394VK0xVmKgJUGquSVjF0Tl57WLCurfZJYOUh4LWKIdEW3gMA61oq3h2obexlqQIyn90aAGj0piTXl9lI1zgHijxzeosU7dOIYjVglxn6rRMDko6dgdTSKWBdS/iCD6GVG1r6HV2YW9gOtNQQ+sPv7Q0BSEDonoUEutCZyPUtHQ5damuk0xI7AnQ6MAY6izGx0MJSUKoHQacllFF/pBnF81SHprTGQ/vcXORa11HxqisTSycUOUIvHJ0D9t0T9+TR1gwGR0KhS9PClvBnZyL51OSaShtXz7Jrdt94dpYxF0YyGwB5ZuUmuhTAshsiX7VcbOyMlBm07dOU4nGqzuxNhTKvUYANuT4Tyf2lK4EXUENsxtknSYEXgxbumIzQ3pWsGCwUwU5JUqS1osrklPpC/zAPd0LDrM0AG3AmTESuIDyjMZUPWCXii/yfsPrDNShduttiLrxjoRGMjkI0mlgkUlRf09yVCACe1G3pBf7AsTQP9OzMFZpjKmMtDZ9rkHo+rMrcXDo6p5aoat/vz9DWPNCLmatTd/NKSGyvycQkPly554FezAKd1ruohITqEdclNGlbY578EpIIDrhFXSsqAO/U7M0DvfLOYa5xxK16dGDMHTN3fyzOA715aOtM300GMDSYMH8HFueB3olGMHE8YwYuXBhw76jAt7If6AtHZcSp3DEb/gniOIc+NeR+zVJqbstYpXHB7DRPHKzgDlV+ftK1ORH4yWQU4GtO75LZiu6r+I/BFYVUrE4E/uNgdIzPgt6jxxzsd+ptfOIMagqh3zsNnd7KtMwu+vXBcdutVMOwPJruM0f++Lpfr1ZctxLWz2unYj/+QmhpQ3NB59rNcxbs9cv2THAh92KARkajsJ6uzIBVIQmFVrYXtyzYa2rTUxGF9O1+j1JAYV2z71IKKKxn8n1KAYV1eBa9GFl4I1QPVqoFFZJzrbopvvCOyGWRasXAQjINm8dCCgL14KIabLey0PVG+bcX/0A9oXaA1k3uIwa2uNphSg6xoSHTM0O9yUKsZjlkWobY0DVTNIQ7ZSHGb9eZ60wH3jNNA6BafAdWOgpfxzHzv2sLwG+mya8AqO+z8B/eMOXP7lhsNngGAO49C1+6agMoc0nkBeEx0zXFwvEZC/84qAJAo8UlkfngHaasi6Whx8IH+z0sBBNKLYBbpq2PpaDDwhuv62DBmVFrAcyZuhKeNASu0FBx0caTB4otAJ/Gkl+31XhggWSnjWcPVFsA3IJWFS/KR/zxxg28uCJ/wgKgf4xX9R++BMaNj0euH7EAPl65Fx7wx/pL3p2oJwoDcQCfBBDwABTFC23dtq5HPbqt+n//J9uvX/fQKgpqYoK/J2g1xGFmkln59A9bAcB9LABYJm3xXNylnXtv2ASf7mQBwKrQNuMOd4FSkbbwJb7cyQLYO7lc/IW7MnmgbXyOL6otgAjCjGlXM76fc4ivRdoRbrBFpVpACeKs6Rv+dBcNI0G5SbuMCPvUKAd7EKhh03e93AcDmwFLM59fmdmRUwhUf6Q9/keA3LLevbRNN2qMPBQ99Myq0T62yGm1eFJhtMdZQpg3ptAQ5gQtOqTYyl25OGo/0gFGFeIY9EmlxuADlg4dwmq5yg65PZbpw1VmCPunFwhlJf2d9mCDXCiUw4T/cIUkqoyL+MTrEKtkUwKjpX9EuKpQgkoB36kWAHx5DCBWoUeJzKnO4UD3h0MJ2BQiRSH9oX4gCODZpmRGS89zhaPYp0TGCCJZHv2lx+y4qklHsMov3faBzVNIyewXJFDnluAdYwi3CokoJ/tAvezTMYMAidQ4E7jnGcJZM0ZHMXOqwxoIWgYdFa6QQKEM4HdsDvEKfTrFH6idH4haHh3HWxBtRQLwISRwfTrJWTwrGhBYLxVGJyyqEM1lJALvQoZxk05jD7Fyt1jPY49OMpbYp8f3T2R3IcVrk9JgXqxM0agbe4xOM5cQr8FIlOYIUgQzTuk4vY+bt5WPWouQjpJ6FKrLSRxnBDmsNqe0WHHwEuE23JlpUzreM2RocBLJHkKSQmxTBn7tYwip6utOkVIzGzhBkQshTuENSPNapExYsd8aQjzLLVdCyoAtujhK5ff/79gK8nT7jLKyvX7btSBGYdWuPTLKpDkrQJIPOkKnnOB/1TikM7DQ6JSfN7ie7vpH5ZFTZg+/kIIyI6JSaUOq5aBJ52p6tXjaqOJsm0lr1jeLDp3F+VGHPH2SZWFBLrfG6SI8LJr9H+OXeQFp1N3X8qDihZzSUaGprfBA8hSrkC0pyZodtx3fM8xa5ylu/zeLB51az/R8h1MWqvQydR2SqTmHdMG7QXpg8ivXJUYZqXWDZEoPpD6nU7KQksLl/9PMCOkpctBNBj+AbKMi3YIzh1xD0kIPkk1sug22hkyWT3poQyarT7fTiyBPhXThQp55SLfEf0GWGWnDrkOWD0Y3VolwhEZlri3ajNt8U+G1yJlAgjdOOnmCDO2bP/5fzCqSKXjOSYZ3CDf0SBX8FQfcZwD4BxtCrGhAKjE22KN5outCzSpEmtqkmFoVwqxJQx7EmaiYEmGxBTHmpKU+BHkzSU1+CSJEDunpHSJET4rE/mk63xW66SgTdTOChYHCX7+YXeAnaYu/4bqCmQbpEMPFp7sqAcspDdf7ij/9f4VtC4Ait12fRcXS8Ej1zX9bsxzgKurKve5mFOM6JtpFQqarxl3HZ1PoFIX1ruXnELYDXKhD2rs8J7ysabT372K1CS4xpRwII1wgGKtT8jkLr6xwLpdywcDZSj1tH/4tTse9hxaAazcHRC0jD9/+F157CZCRpWXgc9AaWRXG2kX9J/mDErLoUX40kMVwpvnvfiJulrs57AG9Zpdo6cnLz8aftAgaeesBvdJ968OykZew5zd797JDMABEAdQrElrvxKMJYWFDS0LQ//8yC0u0EhbanvMJk8xqbu7kqHej6yjNcivbJMZpluklXA5KvvlPGt3jYtdOXxoWLQOaL3zXRDDfNGe16qpPmo/ihpJEAD58vDPq7JNVa130S8dPNSb9XpScT0Gw/dfI03fCwy6I4zBqrau88QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHBvDw4JAAAAAAT9f+0MCwAAAAAAAAAAAAAAAAAAAAAAAPAK8EICyXknp24AAAAASUVORK5CYII='); background-size: cover; } html[data-theme='dark'] .header-discord-link:before { filter: invert(100%); } /* twitter */ .header-twitter-link:hover { opacity: 0.6; } .header-twitter-link:before { content: ''; width: 28px; height: 28px; display: flex; background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M22.46 6c-.77.35-1.6.58-2.46.69.88-.53 1.56-1.37 1.88-2.38-.83.5-1.75.85-2.72 1.05C18.37 4.5 17.26 4 16 4c-2.35 0-4.27 1.92-4.27 4.29 0 .34.04.67.11.98C8.28 9.09 5.11 7.38 3 4.79c-.37.63-.58 1.37-.58 2.15 0 1.49.75 2.81 1.91 3.56-.71 0-1.37-.2-1.95-.5v.03c0 2.08 1.48 3.82 3.44 4.21a4.22 4.22 0 0 1-1.93.07 4.28 4.28 0 0 0 4 2.98 8.521 8.521 0 0 1-5.33 1.84c-.34 0-.68-.02-1.02-.06C3.44 20.29 5.7 21 8.12 21 16 21 20.33 14.46 20.33 8.79c0-.19 0-.37-.01-.56.84-.6 1.56-1.36 2.14-2.23z'/%3E%3C/svg%3E") no-repeat; } html[data-theme='dark'] .header-twitter-link:before { content: ''; width: 28px; height: 28px; display: flex; background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='white' d='M22.46 6c-.77.35-1.6.58-2.46.69.88-.53 1.56-1.37 1.88-2.38-.83.5-1.75.85-2.72 1.05C18.37 4.5 17.26 4 16 4c-2.35 0-4.27 1.92-4.27 4.29 0 .34.04.67.11.98C8.28 9.09 5.11 7.38 3 4.79c-.37.63-.58 1.37-.58 2.15 0 1.49.75 2.81 1.91 3.56-.71 0-1.37-.2-1.95-.5v.03c0 2.08 1.48 3.82 3.44 4.21a4.22 4.22 0 0 1-1.93.07 4.28 4.28 0 0 0 4 2.98 8.521 8.521 0 0 1-5.33 1.84c-.34 0-.68-.02-1.02-.06C3.44 20.29 5.7 21 8.12 21 16 21 20.33 14.46 20.33 8.79c0-.19 0-.37-.01-.56.84-.6 1.56-1.36 2.14-2.23z'/%3E%3C/svg%3E") no-repeat; } /* github */ .header-github-link:hover { opacity: 0.6; } .header-github-link:before { content: ''; width: 28px; height: 28px; display: flex; background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") no-repeat; } html[data-theme='dark'] .header-github-link:before { background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='white' d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") no-repeat; } @media only screen and (max-width: 525px) { .menu__link { display: flex; justify-content: flex-start; align-items: center; } .header-github-link:after { margin-left: 20px; content: "Github "; } .header-twitter-link:after { margin-left: 20px; content: "Twitter "; } .header-discord-link:after { margin-left: 20px; content: "Discord "; } } /* Make navbar looks friendly on small devices. */ @media only screen and (max-width: 400px) { .nav_src-pages-index-module .linkItem_src-pages-index-module { padding-right: 0!important; } .nav_src-pages-index-module { width: 100%!important; } .socialLinks_src-pages-index-module { display: none !important; } } /* Make sql syntax highlight a bit better */ span.token.keyword { color: rgb(199, 146, 234); } span.token.punctuation { color: rgb(191, 199, 213)!important; } div.language-hcl * span.token.property { color: #9CDCFE; } :root { --site-primary-hue-saturation: 217, 73%, 78%; --ifm-footer-title-color: white; } div[class^='announcementBar_'] { --site-announcement-bar-stripe-color1: hsl( var(--site-primary-hue-saturation), 30% ); --site-announcement-bar-stripe-color2: hsl( var(--site-primary-hue-saturation), 55% ); background: repeating-linear-gradient( 35deg, var(--site-announcement-bar-stripe-color1), var(--site-announcement-bar-stripe-color1) 20px, var(--site-announcement-bar-stripe-color2) 10px, var(--site-announcement-bar-stripe-color2) 40px ); font-weight: bold; } .tabs-container p { margin-bottom: 10px; } .code-block-error-message { background-color: #ff6f8780; display: block; margin: 0 calc(-1 * var(--ifm-pre-padding)); padding: 0 var(--ifm-pre-padding); border-left: 3px solid #ff6f87a0; } .code-block-error-message span { color: rgb(191, 199, 213)!important; } .code-block-info-line { background-color: rgb(193 230 140 / 25%); display: block; margin: 0 calc(-1 * var(--ifm-pre-padding)); padding: 0 var(--ifm-pre-padding); border-left: 3px solid rgb(193 230 140 / 80%); } .code-block-info-line span { color: rgb(191, 199, 213)!important; } /* Full-width checks table. */ .docs-doc-id-lint\/analyzers table { display:table; width:100%; } .join-discord { background-color:#5C4AEC; border-radius:6px; border:1px solid #5D4BED; display:inline-block; cursor:pointer; color:#ffffff; font-family:Arial; font-size:17px; padding:16px 31px; text-decoration:none; margin: 1em 0; } .join-discord svg { margin-bottom: -6px; margin-left: -6px; } .join-discord span { padding-left: 6px; } .join-discord:hover { color: white; text-decoration: none; }atlas-0.7.2/doc/website/src/pages/000077500000000000000000000000001431455511600167265ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/pages/index.js000066400000000000000000000161571431455511600204050ustar00rootroot00000000000000import React from 'react'; import Link from '@docusaurus/Link'; import LayoutProvider from '@theme/Layout/Provider'; import Footer from '@theme/Footer'; import index from './index.module.css'; import GithubIcon from '../assets/icons/github.svg'; import DiscordIcon from '../assets/icons/discord.svg'; import TwitterIcon from '../assets/icons/twitter.svg'; import BrowserOnly from "@docusaurus/core/lib/client/exports/BrowserOnly"; function Nav() { return
  • Docs
  • Guides
  • Blog
} function SocialLinks() { return } function Header() { return
} function AtlasButton({ link, text, type, style }) { return ( ) } export default function () { return {/* first slide */}

Manage your
database schemas with Atlas CLI

Atlas CLI is an open source tool that helps developers manage their database schemas by applying modern DevOps principles. Contrary to existing tools, Atlas intelligently plans schema migrations for you, based on your desired state.

hero
{/* 2nd slide */}

Define your schemas using the Atlas DDL

Atlas DDL is a declarative, Terraform-like configuration language designed to capture an organization’s data topology. Currently, it supports defining schemas for SQL databases such as MySQL, Postgres, SQLite and MariaDB.

{() => { const mobile = window.innerWidth < 768; const erdImage = mobile ? "https://atlasgo.io/uploads/images/erd-mobile.png" : "https://atlasgo.io/uploads/erd-180122.png"; return erd }}
{/* 3rd slide*/}

Powering Ent

linux ent

Atlas powers Ent, an entity framework for Go, is a Linux foundation backed project, originally developed and open sourced by Facebook in 2019. Ent uses Atlas as its migration engine, allowing Ent users to unlock safe and robust migration workflows for their applications.

{/* 4th slide */}

Migrate, your way.

Atlas provides the user with two types of migrations - declarative and versioned.

Declarative Migrations

Declarative migrations are migrations in which the user provides the desired state, and Atlas gets your schema there instantly.

Versioned Migrations

Atlas offers you an alternative workflow, in which migrations are explicitly defined and assigned a version. Atlas can then bring a schema to the desired version by following the migrations between the current version and the specified one.

} atlas-0.7.2/doc/website/src/pages/index.module.css000066400000000000000000000231301431455511600220320ustar00rootroot00000000000000/* stylelint-disable docusaurus/copyright-header */ body { font-family: Helvetica; } /** * CSS files with the .module.css suffix will be treated as CSS modules * and scoped locally. */ p, h1, h2, a{ margin: 0; letter-spacing: 0.15px; } img { max-height: unset; max-width: 100%; } /* MOBILE FIRST */ .header{ display: flex; justify-content: space-between; align-content: center; padding: 25px 0; margin-bottom: 20px; } .socialLinks a { margin-right: 25px; height: 24px; width: 24px; line-height: 21px; padding-right: 5px; } .socialLinks { display: flex; justify-content: center; align-items: center; } /* start navbar*/ .nav { list-style-type: none; display: flex; justify-content: flex-start; align-items: center; color: white; width: 75%; margin: 0; } .nav .linkItem { padding-right: 5px; text-decoration: none; margin-right: 5%; color: white; } .nav .linkItem a { color: white; font-size: 16px; } .nav .linkItem a:hover { text-decoration: none; color: #2064E9; } /* end navbar*/ /*titles*/ .title { text-align: left; font-weight: 900; font-size: 34px; line-height: 44px; } .slide3__title { font-size: 56px; line-height: 64px; text-align: center; margin-bottom: 50px; } .titleSecondary { text-align: center; font-size: 28px; line-height: 32px; font-weight: 900; margin: 15px 0; color: #000939; } .subtitle { font-size: 22px; line-height: 25px; text-align: center; color: #000939; } .subtitleMargin { margin-bottom: 10px; color: #000939; } .subtitleWithChipWrapper { display: flex; justify-content: center; align-items: center; flex-direction: column; margin-bottom: 10px; } /*end titles*/ /* text */ .paragraph, .paragraphSecondary { font-size: 18px; line-height: 28px; color: #757889; text-align: center; } .paragraphSecondary { font-size: 14px; line-height: 28px; } /*end text */ /*buttons*/ .primaryButton, .secondaryButton { border-radius: 24px; text-transform: uppercase; max-width: 242px; height: 35px; border: none; padding: 5px 25px; cursor: pointer; } .primaryButtonText, .secondaryButtonText { letter-spacing: 0.15px; font-weight: bold; font-size: 16px; line-height: 27px; } .primaryButtonText:hover, .secondaryButtonText:hover { text-decoration: none; color: #000939; } .primaryButton { background: #82C7FF; } .primaryButtonText { color: #000939; } .secondaryButton { padding: 5px 35px; background: #2064E9; } .secondaryButtonText { color: white; } .secondaryButtonText:hover { color: white; } .textButton, .slide3__TextButton { color: #2064E9; -webkit-text-decoration: none; text-decoration: none; position: relative; -webkit-tap-highlight-color: transparent; background-color: transparent; font-size: 14px; line-height: 36px; outline: 0; border: 0; margin: 0; border-radius: 0; padding: 0; cursor: pointer; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; vertical-align: middle; -moz-appearance: none; -webkit-appearance: none; margin-top: 10px; } .slide3__TextButton { font-size: 20px; line-height: 36px; } /*end buttons*/ /*containers*/ .container, .rowContainer, .slide3__container { display: flex; flex-flow: column; justify-content: center; align-items: center; width: 85%; margin: 0 auto; } .slide3__container { width: 100%; } .slide1 { background-color: #013758; background-position: center; background-repeat: no-repeat; background-size: cover; position: relative; padding-bottom: 80px; } .slide2 { padding: 20px; background-color: rgba(0, 9, 57, 0.1); } .slide3 { background-color: #000939; padding: 30px; } .slide4 { background-color: rgba(0, 9, 57, 0.1); } .section { margin-bottom: 10%; } .sectionNoMargin { margin: 0; } /*end containers */ /*images*/ .linux { height: 50%; width: 50%; margin-bottom: 40px; } .chip { background: #CACACA; border-radius: 24px; color: white; height: 35px; font-weight: 700; width: fit-content; padding: 5px 20px; } .heroBanner { padding: 4rem 0; text-align: center; position: relative; overflow: hidden; } .imageContainer { transform: unset; } .dashboardImage { max-width: 75%; max-height: 70%; margin-top: 20px; } .entImage { margin-bottom: 40px; } /*TABLET*/ @media only screen and (min-width: 525px) { /* start navbar*/ .header { margin: 0 auto 20px auto; align-content: flex-start; justify-content: space-between; width: 90%; } .nav { list-style-type: none; display: flex; color: white; justify-content: flex-start; margin: 0; padding: 0; } .nav .linkItem a { color: white; font-size: 18px; } /*end navbar*/ /*titels*/ .title, .slide3__title { margin-bottom: 10px; text-align: left; font-size: 48px; line-height: 55px; } .slide3__title { margin-bottom: 30px; font-size: 70px; line-height: 80px; } .titleSecondary { font-size: 32px; line-height: 55px; } .subtitle, .subtitleMargin { font-size: 30px; line-height: 34px; } .subtitleWithChipWrapper { flex-direction: column; justify-content: center; align-items: center; margin-bottom: 0; } .subtitleMargin { margin-bottom: 20px; } .subtitleWithChipWrapper .chip { margin-bottom: 20px; } /*end titles*/ .paragraphSecondary, .paragraph { font-size: 20px; line-height: 36px; } /*buttons*/ .primaryButton, .secondaryButton { border-radius: 24px; text-transform: uppercase; height: 48px; border: none; padding: 10px 50px; cursor: pointer; } .primaryButtonText, .secondaryButtonText { font-size: 16px; line-height: 27px; } .textButton, .slide3__TextButton { font-size: 20px; line-height: 36px; } /*end buttons*/ .sectionNoMargin { width: 55%; } /*containers*/ .container, .rowContainer { flex-flow: column; width: 90%; } .slide3__container { width: 75%; } .slide1 { padding-bottom: 40px; } .slide3 { padding: 50px; } .imageContainer { align-self: flex-end; width: 100%; position: relative; transition: unset; } .slide1 .slide1LeftSide { width: 100%; } .section { margin-bottom: 5%; width: 55%; } /*end containers*/ .dashboardImage { max-width: 50%; } .linux { width: 165px; height: 54px; margin-bottom: 15px; } } /*DESKTOP*/ @media only screen and (min-width: 1100px) { .header{ width: 75%; } .socialLinks a { margin-right: 25px; height: 24px; width: 24px; } .socialLinks { display: flex; justify-content: center; align-items: center; } .nav { margin: 0; } /*start navbar*/ .nav .linkItem { padding-right: 5px; text-decoration: none; margin-right: 5%; color: white; } .nav .linkItem a { color: white; /*font-size: 1vw;*/ } .nav .linkItem a:hover { text-decoration: none; } /*end navbar */ .title { font-size: 40px; line-height: 50px; text-align: left; } .titleSecondary { text-align: center; font-size: 48px; line-height: 55px; font-weight: 900; margin: 50px 0 10px 0; } .subtitle { margin-bottom: 10px; text-align: center; } .subtitleWithChipWrapper { display: flex; justify-content: center; align-items: center; margin-bottom: 0; flex-direction: row; } .paragraph, .paragraphSecondary { color: #757889; text-align: center; font-size: 20px; line-height: 30px; } .primaryButton, .secondaryButton { border-radius: 24px; text-transform: uppercase; height: 48px; border: none; padding: 10px 50px; cursor: pointer; } .container, .rowContainer { padding: 0; display: flex; flex-flow: column; justify-content: center; align-items: center; width: 75%; margin: 0 auto; } .section { margin-bottom: 35px; width: 55%; } .heroBanner { padding: 4rem 0; text-align: center; position: relative; overflow: hidden; } .slide1LeftSide { padding-bottom: 30px; display: flex; flex-flow: column; } .slide1LeftSide .fullWidthSection { width: 100%; } .rowContainer { flex-flow: row; align-items: flex-end; } .imageContainer { align-self: center; width: 100%; position: relative; transition: unset; max-height: 550px; } .imageContainer img { max-height: 550px; } } @media only screen and (min-width: 1440px) { .title { font-size: 62px; line-height: 70px; text-align: left; } .imageContainer { align-self: flex-end; display: flex; } .entImage { width: 945px; height: 367px; } } html[data-theme='dark'] .titleSecondary { color: white; } html[data-theme='dark'] .subtitle { color: white; } html[data-theme='dark'] .subtitle2 { color: white; } html[data-theme='dark'] .subtitleMargin { color: white; } :root { --site-primary-hue-saturation: 217, 73%, 78%; --ifm-footer-title-color: white; } div[class^='announcementBar_'] { --site-announcement-bar-stripe-color1: hsl( var(--site-primary-hue-saturation), 30% ); --site-announcement-bar-stripe-color2: hsl( var(--site-primary-hue-saturation), 55% ); background: repeating-linear-gradient( 35deg, var(--site-announcement-bar-stripe-color1), var(--site-announcement-bar-stripe-color1) 20px, var(--site-announcement-bar-stripe-color2) 10px, var(--site-announcement-bar-stripe-color2) 40px ); font-weight: bold; }atlas-0.7.2/doc/website/src/pages/index.skip000066400000000000000000000022531431455511600207270ustar00rootroot00000000000000import React from 'react'; import clsx from 'clsx'; import Layout from '@theme/Layout'; import Link from '@docusaurus/Link'; import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; import styles from './index.module.css'; import HomepageFeatures from '../components/HomepageFeatures'; function HomepageHeader() { const {siteConfig} = useDocusaurusContext(); return (

{siteConfig.title}

{siteConfig.tagline}

Docusaurus Tutorial - 5min ⏱️
); } export default function Home() { const {siteConfig} = useDocusaurusContext(); return (
); } atlas-0.7.2/doc/website/src/theme/000077500000000000000000000000001431455511600167315ustar00rootroot00000000000000atlas-0.7.2/doc/website/src/theme/prism-include-languages.js000066400000000000000000000013251431455511600240070ustar00rootroot00000000000000/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import ExecutionEnvironment from '@docusaurus/ExecutionEnvironment'; import siteConfig from '@generated/docusaurus.config'; const prismIncludeLanguages = (PrismObject) => { if (ExecutionEnvironment.canUseDOM) { const { themeConfig: {prism: {additionalLanguages = []} = {}}, } = siteConfig; window.Prism = PrismObject; additionalLanguages.forEach((lang) => { require(`prismjs/components/prism-${lang}`); // eslint-disable-line }); delete window.Prism; } }; export default prismIncludeLanguages; atlas-0.7.2/doc/website/static/000077500000000000000000000000001431455511600163275ustar00rootroot00000000000000atlas-0.7.2/doc/website/static/.nojekyll000066400000000000000000000000001431455511600201450ustar00rootroot00000000000000atlas-0.7.2/doc/website/static/favicon.ico000066400000000000000000000024531431455511600204540ustar00rootroot00000000000000 PNG  IHDRKvsRGBIDATHK}LSgƟ{޶Zڵ(Z*(Q&:$ɲiEPCׂjGqY21θ1':XA BVRm݆t Ms~s LT}jw 1Iq$A}A7pu n޺[$[hiT#ia=ާ 8 av`o9/~pisj%s$;mgk jy}%G *du3XCs)E0]TZ 2Qᴕ~zw(0D(r@)":qaB:Bi6/<<,lb:Aɧ,|<.7 ;,( ~_-U#M?u|<.r%ڜZuP uq(e>{WM?n.((p# 3Mjhj'ѓF+jwr+( YK 3}H0~@ t] C,P(lپAbnGt`3CT!sn+Q%n)BRddCەQ]]fٲey<ɳ1%=  ޚ`(c .s (4aXn) 3fjJPV"kQ DfԚ64(հ{׉IN_&taN+lU<7Ag H(`?:D+q=QRR P[&Гb1r) 5+cuj4i ?ʡQ<~%Xt~?ɧNJ:W|I49hW=%S!.qL ?0/hlRVʓ)qBD"xsウv׫>Tw!<~ӆ# /^\X=IENDB`atlas-0.7.2/doc/website/static/img/000077500000000000000000000000001431455511600171035ustar00rootroot00000000000000atlas-0.7.2/doc/website/static/img/docusaurus.png000066400000000000000000000120261431455511600220070ustar00rootroot00000000000000PNG  IHDRXIDATx] TWV\v7G3Iwܢ&g * ( 0 6sb3tZ61&vbq;QvGLt҉{Q&SXDWV󝃯PV",F5 _3ęj^c ®`P&1/$jd6Km!8wF@##5nrT.]jeg욻 /0&E2,|d-kj^X-#e" 6B2ComŘ#“n'x׽2 _4c#$}vQM׶ ?SR@d#b}Cu@6K/`aXaQYlD& '%~,Uۅ yi$_竹CkA&$ 1<#=a8*^ 2Ѫ90A,~*y)>Cut +Q "N{2+ZAp*225l5(&1Q:(ŧ6qB _xX dAS\ͥ0~,^].33hۑ2.]c[?sLgC uۙX~f998lh>琚@=} >sAp4FblniCY4[ո? d,D|5?+ߩџ$kvr0" J33'o2l"_a6iߦޠ}:!=N!̭r].8 =4}>L@mvzձ]g 0p鍓~5i*^y$qϴh3eimSV2 rViB[F/f<䳶u_΀<.3lcu8c!>jw^u[Ѷk{z$7]l3.p O@w߬s4(π4bDT.I`H'{=[T@ީww:RnT-׏S49r(GHI[O>{[!ܝI@pzɣ̓ 6HgrGNg̖)0(DӼCޱ^fn5oaP 7p+Mx|㟴aP:3Ma/Qd|^~V;6_ ͵ϒt4 /Pt>\C&q@M&n94+iٸg{3W%F J9V/—^տwũv8e;fL:}=̍kG nΤ֌=[Z~f34k3QDij3` 'B#HsZƫ}fxbÔbPBڵk1ƹO0p;4meeezj(zء)%%:t_ܪ=z;+n:m.\{W_}EN ;R۶m瞣[:WVV=V+}Q2-7߬)EC'NuwfSƜOVȌd2œԢ׷SO=E7n۷S@@@H^Qݻ?ÇG5;/;u 4i[sO?)w߭|(/˗/K.QII _^]cRL6MnݺUݻw4c E}UTTА!C,[ tBaX[nu(--DkF{8xvnӦ ۷v۳gOM=e1.^Hݻlogmx w2zay՝ fTYYiզ"##6ÇȨ4h ŋm[p߿Yc-шo[` ݶm25kƍ3ؿmG.Xfym~fF{̮XBn3l05rׯvΝ6۱ fV!Kƍ7dWm0:便Q5[?C֭6]]Rnnnk7&>c`mlقLިdiѣ>[C2Ywf. lҎ;(..Zn-s͊c  |MjVa9ªQ?C&] vYtT/gW q·>fmS>jݓސ?!WW(w\YEL ~kv2DAp^=KtQgRnj ]gXRSIwojc}ՐIܹaJ!qȤõR}TI; v;4>C .L[x2\ .IY/TXZpOA9>ϰVi/bYrdҽDw( U.MIGD&{"A&My:+{՟rk>R ^z(񰪤5d=^w`x^K,D8ݞ@3>/2;@}##nw-(,8X+F MqW@dXA. 4i^pu{.vl-qpHIpQF ɘl4솑;9dK l36VI q!8n2Ýrɑ@b9L$[Ι ZgZ6܍t^rF`>ЯvY nq",ő[l`3x-!z"Bdϱq('h!*RSzXkw&bG(5D\ +gEp4B̤_I?GѺx mtaB$kOvN^ M ^@@nIENDB`atlas-0.7.2/doc/website/static/img/tutorial/000077500000000000000000000000001431455511600207465ustar00rootroot00000000000000atlas-0.7.2/doc/website/static/img/tutorial/docsVersionDropdown.png000066400000000000000000000610161431455511600254730ustar00rootroot00000000000000PNG  IHDR)FiCCPICC Profile(c``I,(aa``+) rwRR` ɠ`\\TQk .Ȭ̭~,*l`63 S= JI-N8-1V./);l"9 v:NՄ97lD/l$$t$6^qW))JTp!\AIjE v/,L(QpRg^!(!?%B؀(Xa`ؒS p8 (o,iF6vig``d`{Áo-+^έ#үeXIfMM*>F(iNxASCIIScreenshot" pHYs%%IR$iTXtXML:com.adobe.xmp 276 494 Screenshot BiDOT(0*}~c/IDATx]E.r8AD FP@0g8`ÙLwW3SY"* E*J ORݝ}[{;[7f`f( jpq*"oʲ<QF9]`.Of VdNI] fx^M=9 wꔖgBfsW,\9oH)9-,%y@`Opegb.C`2 xCÒb]҇gJq[X3}x8f`E4 .Jn]r f(GAgN$w:|3fT(g.3D 0Eg 8 wOX9b3 0"YY#1v 0%@uo ~,>8\3 QY3zhgpY3Pr T7f)Ȣ%1@I3Pě;C" v`0:)j@u$: H)ion{=Mm0g·R/ ϑu)*=:r2e@)\=p'=*fu_ffp` B%4kbVd$ni/Ɏ*G3̀+Yf]P_jP6fiSeSIcͺ bk:@ xz/SXumLR_ w/Ũkڞz}.{a,36I ZTQ_1T]e󭭬;"P mYE@u`W(BRȵ\-sedɒpvy+V D*:׭:5jja3 80# B]uU8]J$܏ {&OÂۉ'RRzvBΝa5َ;t y=opUəԐp$Cw]/gƊ0lƗ~6^!^΂?| - [~dM6s7h앗f@π3{VPp+p9dxU(ׇ3.LBv#537nCY߁WE;Ҭ h:X:7iCkϼOnmU)CM!a傚Ί%3>sAۚ *ܴ}ԫWLQOڅ4_+5oޅ~O$u'fOuka5D.p]9ƀWmח{}*oZ׮[?|D=z4Z޽CӨϗpK̢9/[TɄpa7㢼7k;tm7h (cτ릌-0v^> 0U2!r=Fu__jݶm[xi*~ǟVaWEJa6]K$ej[~Z_A%6/_} XHlhhXN#7mB8g߁g°=+ax 0Uz>lLq]~<^>x]78pUVgŒf@ݺua6A֭?l]:38cn)lvm-XO\K+WyN0 -7w862n8X2?}{ 惈[w<o&MYwD/FK3_׹ G˯2}e˖=嫯`++*hq383π͛UɓGg <Wz 㝺 ?BK7OC)wA`=y/plp;C5B㞅K i)׼A#ߞ'@.{h0gr8a]p#\ +% 3݅:aQg9Adr: *7x3۴i~^}O= W^wX<6[} ;͚5+xLVf* hҋ#a=m1"ib˦Gn5Q+r}R;y泠zkaXZk޲qKxb#tP3ՎEL{ݤgi|\G?̙3/|*_yٕ6lv}wXpLtB%ACVyI_?W_WʵFAN믿vlx |Gxpn+ܢI$rr b5σ}[\GP{pan\58s+0a ?6nTlc?si$GX9]r yիׄƛozx%{w{ljՂ 8$zO{p-{ 1`ijã9 ]_qW&KC I_ \pŗ_TnQ|*:N|Ɖ契Cn9>=no͛>{9*kvVDsϏ?g}];RQxen W?>s'穧jV~f8\|IȮnq1C%˹Pg.ݜ#⏯v?h!-},2C32 vtݰE4>m<<3om[]]b`\/I.XWiQ厳ک nZiMw/_~5kr7!vp]w)@$[*zO _r8故cN]vu7x'pw~[Ed?/wMQpJVS>q>1&l}[_mSމ_K+p rlhcS֗xhkifGȧ$tƉ*b>1pe횵3gխS^}+իEA;E=.l@4ǁx8*Y [zs41>~-jСc*7ll>y\c\bݾ:rUZӍS?g^{M!:poWi- G)źX$k+Qrl^A_t)x!xo昊 3twUzyo?RWmt}y0jԨӼys{i];01k+x >5siY/Mq} Or އ>j}1 ~Nz5kԫ#,^sB ƺ9\\G4rP.]q?aj?ŤϠqq.j:ƏWWTBs_O&LCڷoo9&ʫ3)KI[UoTfゥ|bLG}ǝ@_r;E? rfWZ#a!I?{2}>͢mM/@>$InX,CԧrnKWթnbӟFq*١G<Ӹ9-pj6]+ʳnS#g-!\HB=~we׼[M%wEY @B5E]ÓFU^Vp!C:^xntS47 !L"j=u?{li犛~:\u?u䏃=oX<=Y`>-cp)Xk,C;"_hrpY؂vFϠ[k}#+&.6 xO,U'iW- J䲒gʼnĈ8N#ST3͟րw>qlaG|L߻n)KeQ$;vlߋoD!3*ާƯG瞻`]vI_)_ rW \X[XO cu\]坃/`Y9n{uR1磒zWog4IGroCu+aӘěE; 9 ߵ|lclq^*x9t۩+<<5 JF:7SsAs=`Ҥ/*U"ܮgOGD%<5;i8WC 6Gw:? *&.wL k?n~Vt/m ܼK4O{oY|i2*^}rѾm^yjI[mZG&жdXX~~h֬Y%I8א^T)I&מ{'onFkm/l)cږMmF>{_J3nB-X8LM|4"ܝt6ۉwi㟇ݚl +֭w|(l ~\A»ɩF̩jws/pKESyO$5 GڮOxO/yMI<nfמ 7~][K]Tk{;d0켋3  h1p"->LDݱwkŦEuQQh(>pjlW}4'.OxV--4v 6P T%25>m0X{82w46}ޅ?f`m򗮈7?81=ѣDwS&{G{U~0ϰ&-b:uݡ~9T(ñm0k͇8jb3ܳip e2 (H𙲘rO]~<||1j1=Zoۼ+4[_v(k<.x  #4b<4N$ܔ= ?}0c [lֺ mټKp෶GvݿNLGS2bmpiaD\,POۿ ؠ=69xi|Eᗫ sڻ1eLNf # $ -$Y%kK \"N]㺆p'rm0fLm!mh^969wF$|91x^nr6جa~KO |ԭvf i?>9]blY_6uV}4.[?QGgֱCܭ 4x(3p ]6u ^B8\Dcmials6VקC;=u%Ǧ@r{V[eoX\4r4xKVӸq㈋ڵk ˗-YfWz;[Wq%DžHEXO+-sf _ uW՗$m .ǽA<`PؗpuA^z r0d轱wwܮv!xF.6Zlpq8?Iꧽ!xyy T|Do<8\R?9'>1bSp%u?,q5" b>iu\̵]ke|0wIvaC\">1}I w>/>mcp&ɇ~Z\-{1i|3Yf 酟%&[Lҵ !#%#0䶍Ù&$$ +e-(7)x[l7p/IIbEXD0Sd`}rL~_|OS "NyT|V6f$|s$w\Kc-g䵍ÙYIbl09d/8i@)3+gx|ϒ92C}u[ guvSL_\1LG8u6P+`O92`+I]6.obg-b\aE+\M8ίbL>|fOwŋ8N/93P \}0&Χ^||ߜ"q9d>蕝pl&ƄI'O*Sia(inZsl0'w]SΧ#:nIXlus.1;틧O>8ίa\>L1æ#=`UVUn1bn% 6c|:;Ks"^ukߔ[ca"WVϸ]k:;Oeukǭcwz~_> 5b]nhlp&O#[*F|:{\!b]ekŲẶ)&·~l5sު*,i#f ^Mո0>4ctҲ׺|ϴp}ug7atRɤ5g NLeB\p$5aL~.&kv>U N%Tytb('XW3gbQ]D$uƚp>btyTv[G&0ԛBLf;͋k.#:nkU:.vl8Dx'@T˞/Ay&Ow'ź1%B Ė\q OesU6]N]C7ч͔3VmrbKbuM̬Ԉuiuv]:]֥Û裦|rofRӼ0:]Mۆ'*ήhfI?.|uˀ✄qXէûUشmȽ*C?6]ޜ[7u> VfM]vܘf2֭5kքpBؐ#:&{ƏjȜpJ܄%W\*l[Znݺbr0` ʕ+{)F _XLšq>?bϔpw]su46kРpscf:0zjߪ<QZ[Ux"2>3­*x۹k8ɯiTm~m{0`J|ֽb eU-eS:;M>?A/­!*صwga\*MװaCmד3P2 x/_\)y>l&;j$܋آ J]&ʗo.?&?7GdLRU^sMIxĂ]u~_7c3 0UQSlc.UNS/Iv]_-Lɯ8yu6y/G3P /~e爓mqsU ur֊82P4ue»TxFyF uxf`2%K,qsܐFNΚ{4DNe ]2V㚲w-A3@0Ms* ڰz?QVϸMu66dM23Pn peL{ZņXlr]9k䫄 D.:]miYә3PN /Ŀv w惑cĹ8ƃƍ3 0eŋb- Qat6cs欹GpEy[ *Ƨwe|vN9\,>g 0@u`(vX1Ɂlr-9n@?6QdLs1 x:`ʉnܳ(Xcʩ\cΪ? DdۥtV糵pen>nt13 pE1.>lzj7ń 1g , TdL)}n!Kf(+-ZTIeIJ{ʩq\Zu۷penŠ>pi̍`ʑnl&쳙bT8akYr:;a7mZu>&c\&,13 $ܸgQtcNU1: D$qm|*MƘiX 0Ȁp#/ry-:&daɯ ([m]ead8x`ĹiܤI:33PV ,\P+ʢc$H8W]l:QmڞΧdinq1 7ܘf@&/l*f|%(bA]3t>]ͱ#M>W ȳ`;.\؈ G|ydj&_s3nYpúuXqMcDnܾ}{ 33P6 L6-ګ(cL}9&#ʆ>g *ܢf\{&Χ}11\*l3m}"eܲeKO9`f`DKkudjbauk֬))E 7k&Χdi.1AӘz͚5Î3 0ey=Dc8O8y.ce?αp&WcݺunQfDŽd<ǚe\Ħ5nڴ/ 0@I3믿F1.$浙bT8Qn ,;*ll2Fیz7fF1@92-X"wf,ǨImM'bwNs \&Χ˶$s1wck~`2dqFcꑖr.*m:sGW/` mNm瘅<f wXIjԈ6]^C5W.Ȼp%3e©|61NĹ&Lf p^DSU68#żd-L `HmqM|"mƍ`c`(2vm"^Tj'ܢ҆Tv2iM9Xr 0ƀ(ܸwQ iLΟ.UaY*W2-W6L8Os\Esq,cE8K~[f<@ Z؄c|ƦهslڪyR.ƔpD8sUΆvlT{^[ܲ6LOemOōU~Hf(m+O#1SLN>%m>cSSu6|%!ܢhaf|*l3m}"~Tc[{5pqDž~0t hha'|*,Y8?Ѓиqf`$\"T<}2VSEDLsQU6q]S^jk={wTwܾ3,\0O 4oL`8x≧º:u~v`1@?2B*Xıf18n.Xl*N 7 .2T>1EXgeQxupQG}VaW=G~иv2@q 7VCH}M ;E($Ms[Sز*܍51[,2#]OԻU1-{ܹN܈g'&]{0g\hѢ9ԩSS3I $bЏMl0h&T-xIO?SM/ ovhCa?3 0g T8fd,αQݹYQe#? 71$d2E8XqNc]CK 7|~=!eC^$܃_w}ŗ_zqT/㰞@qmc0/直l 64^k /뚫{@IǪi,*h+pM2N5w! wĐL\_Uq&ʗUF&O <V"'t f=O^zq$ty}TE77ᤓO  5!}_=ѯ x8ࠃaՀϤ:ԭ[QQf ŒpʽgcN՚U6pgBE4El֯‘z3ϜfK^'=Ͼ|[#jX~׶ٶ]='q_2_0ۢ|7PV-1%rSMLp dnNm}*h8*mԪpBF{/+\"V5V5U~Q£EoxW9ޜq혿ܭ=.4uƦ N<1;T&M!Lipxh=/99Ю*VYPf ̙3'`UcM%lrʍ?T-IDAT} eUy*BB (H-pLt%QP҂F;+ ֤vű"Y"8$nQ0 5Y&n&%2X*j>߹{{={{}_{oT#LjٜuRiȭ5RXzizeKڵ+u1%K} lڴ)r-_~cMo<ԾKF |k wl~'YI9ɶ|&O?Q5\S|Cs|{Vxn1 |+0; h.bFq+1sR90:7nLosKq\8{+/g뎚qc}3᳟<ɡ{Ν֮]^}%/yIR}k Fz/|sᜳ_[%r.Zx~jÆp)?\wIuwyg8dYݩ gU IW]7vL#flJbȵ`#ozh56H͹8zSn1u(7^ מnFB=qA_Gcُ~;^ /ylS}&|8c?׽ _3 C9$_ڇy ;Qbc$y,1F;}/%$oⲯ#v1YYf߉R`ֶ6l?v+O-֯_߷6袋zt\W`ƍFkNӗSjTQ!kc kFc!z[p֊x KS̍9VOb2aZ溗S1eܸ{ԏZYd֌9Vg7?O t\W`X}"M4B*=ȑ=˾uަ)ǨҸsYx9.-n 遃;olڍ\W` hcܐFkd?UbC42LlxiKrc2wWUtw'qW 7Fkӄ~*=ܸ 4I!c1᝷7 @qCmкNqrsͳ\̍{D}ꩧ" )%bK֋s0͑uS۷xbX?qp\qT eCYWdε$uϪs1Nq(!&<S=rtsܸWG,4ܘ#{2ӹC9=MuL4WnPχ+ 0n ]iL͵zM#tnչ7~V|+0r qwpx7RwMkŎi5%1z1Jn ~ }8*0ƍk&ȭfa8$&semĚ7GW77taƢid9yTlNe ciKje3Z{:pݸWGJHmd9ӫks4ܸ'Rjc\W`5nhCm)ɜkJ 9쵩9iqCjh)d-sɕc9nPLJ+ M Mh15GdεaZ溗S,FָS[z+c\[91F\15g:{޽aȹ_LJ+ 0y慅 !݁]>笕X=1ĭ#dfcsniSΝ;î]:%\W`XhQXxqm]݁44ZY=1ĭ#·ynLYo߾=%\W(V`ҥ&p@MQKM[jJWw^,94m]u0eرp\Ws,YR}wL7cuަ6TfqCjh)d-sɕ8s\(בxWwPӇ+ L]yqwph<,5D5Sdzz+c\3gĞ3Z{9N.io۶ +|NvRL:XjRkg4VRǸgΈ3g0bsp~z {A4*p[׍;A Vi&,ոVӱJWsF c/q}WJw}z 3@wn\,IKM^lԱb=1ę3buL4/6m\ЇRŜ ](պ4n?xՋays0: `$?v:ƕ8sF\TOsa֭ > /5{O$*>aϞ=$Wp*0^sHdNKM^lԱb=1ę3buL,.~MK/woxիàߜu=ƴvp\+Z֍{ zi8,h&cz/c\3guLb\y'|rXjU-ƽe˖zp\W`8:@&p:XjRkg4VRǸgΈ3g0b15Ǎp\QT`=6Xl^ZSsb$ɤn4VRǸg(؋EkrݸWEܸqk(k17n*SE!!Y}֌`\#u:>\W`pjڤq]5-~id-sY3qX&׍p\QT0剗tjXOVNbSsܸy<)0*-%k k^Nmr*L5Jʉ1Zsً7npFQ7~5FoaRiqg(1Ϙꑣcj7 TƍTp1ĭ3ƭ׮+0* qq %Ajeq7T t{ec#{2'_b1dMmΩLcd Ŷo\F:J |(*0S-͓y,Bǜœr իksܸh2EV^(A)qC5+0 ̴qKeS0G5t^7U?S7CK115E7npFQ7~c5FkT9NdpZR,g)q{t\QS`[!se)&Zp gaoQϩal!>}Y\se9䦰98NaS;¡Cf+0 q4.7!"ǐS=%7ȉa4v=¶ݻ E8#V]۞;M /] >;>+;pf[7^ڤq4ƭ^pAB,Z3Ƹ3J8l}_*ݍӫ|Nxס'̎:<֡SGng|ot7\W`(m㖆\GEL&1b<ݫɊܸ h5\Xna1'u =|77;֫C;Z:Q8}ae'?yxCM?V?;#_T @m#b\'\:sv@mӰy:,\V.~Z뺍q_[0O83Ytr7oO&/eN֞+/U9Ⱥ$)a~۝?wnTK:}qt+5nm}wΉ;𦃟S%r]xHc^&۞+Хq̼4Zs0Ogչq7.4Taa1{$燳'>_ڸدxY W_]C O_כ<9sWøYl5-n&\:sR1a0+G,YmcWׇ+7UsÇ;8ov ;9ΊcT]W` IԙǢid?UbCk8؈qrqKձĭ#΅9`ֵ[R| ?zBx3Qcpou{In+ 3iڬ!FF\%65`L9Fe C,lb\\Jjr3>1T+E>+[!Ixѿ::nZ1W9͸A2g*VnaTP:NenTZT#RYزeK빩qz{ 3n xg?z#G_V. W5Mziњۄ%9֫(27n3$2G[KbMQg.7^ֵ/ÉS6~_V^YGspO["U4 KQcjj{8a Oš8/k+:7?~+c~7%?tϿk'嬃 9y}}/\W`xڸ2oPGs`rr WYIǜ1Ƶtkho~랉/o927VoGr?X[qW2˸i8mhq$&s!1Z=*pVHIdem&Y}b:Z4g.h~ukֆHao7};?=qwn.+ ̚nһqWz49Ⱥ)),G\4o?tO?/ۑO;(\%7V?T -`dWnfʸ!2Jbru͵c:Ɓ;^q^9.ɛ3J"sѸ7νk/Cq}#se)+ ]4@M8's%kUa1q+ $n[R{:JlT{o/[7WTS[M\##_޾gǖ[OjeW ]ebB^&YuV@K"붹5/Lj\Q5n}8Lݰs[m̻A;bMg>7\}wϫoJ 7-׽_rkc>\W`C hq05Ϫʸ!>Ajegl(dU3N[O?e@HFfo.U HsWӸi֐y,Jɗc}A!9u=ɘWϭL`7N:= Rs.>^l=f޽#M۷7qyM۴WշDy?V!'Utp_A{ 31bc$y,͒(M*67űzs k0BjܱkO?c2 Oo}rswypsXŧM9 @Wm(1N\G]0ܸ+h"kg0-Ψ7ɇ+ ]7 j2E#1s rkS[saqW"HcUQ˺$),+pFM2nh1B+M[u :}btl:)$Gd)(Iэp\QT`62khHQby{1zܸ+% Z7m$y9 j;w+ Ӭ%Kŋ: ;ט-6@Qb'͸qi4ێuR<5qdK. 6l05;3͡8qݻ7l۶y+ R`a޼yr6nxs1iqܟ\Ox?ƍ Өy+8V-&rG7n_&ǝ5hrAKv+ +hѢtҁׅqKsdΈXpc[c`cmA!j ܞY90LY>^6Xօq]w+ ̌]mcҸ[S<ݳٛVAIn;rI^9Z1Z=ohyAoqp\Twڸb<#Xfak^Nmqbp эkՃ&)qɺļ2nPM^q]Wk@ܓe473Jlr.j i76. 'ci\ֱ%G+{M=ul.Ҹw >\Wkh3kj\r-Cñx] " "iܶ9λɼ_s˝E獻oZgB*k_~P7 \+7׼Ð{!ٛsƍSf{ky'3ϗad /e9 |:=/ sF Lz…a M3f״5nՃbѷx꛼=^'].Vgbr^,^~ʼӸ;E\W&q[K9Cc'TIR$DܞҼ<7nyu,Ot5\Xk͙ƍfg,\c%\O\ca~|u ((>c}(M9#)'o:p U&4\Lϑ̱io~azt\W`,#޺ur)4t5\c5Ǎ[('͔p&92y'y<7>UҗWpF|E32IKck\r~I\ =?p7rƍ&)NiOKW~%c+ Ypn i]uu/88侉1X7ĐFIqbŕ̭5d_+{%9Jp\qP@L.Mm랮<0r^dA-թril`#kcMYw/X p\W`+O=T}Ժʱpl-ݳ,!lڍzБ^WA\+ɗ-[V|WpY|O>YoQ 9׽65<61F7ƍ1)Y|ܞr}X"̟?_+ sBqJ|ݳ \ ꍭqC>zY.y%u+{99/y++ 0mDDmf.͡Kl1Cc89VF skV/Y\kۓ<+EƇg|+0/|%=E!Cl,]zֿMarl R˹zz}'У+ ~'& syrE50qzho76"$ĉsq1Y\k+a/4u𹮀+ tovqXczOJzb1z^ oϨqc3t61wMO鲖k\k'{2OJzۦ V#|dUS9I̭9MO\YV^Y.Z.x]'qٽlF^*熺<\ZMX?xKչ\\4ת-LR7+>}a8N̸%77;wSvEJkXIvkkaÈ{t︱.sjƻSkt?D㦮6/umE+0 4=[{̝Y= 5T7I͟^^s`c8z2;nlCċKp-STOzz:ca=c'y2wm-+ hzTfԜX/xNM˼VC=7N8Տ4k}\/ST=`-XSy\rz&^{y MO~sxVp|:ZMk^M|9q#zY m︱N/)5wdn\s9f{lK.rcƭ P'kb:MX?x5Qclq>k-9/&9rǍw䘻V/շzm1=O2tJSҵ,cacu1Y[+0 ̠iO98V°:%\ҹz>j N >{Mss1_kL] 4iq&2 =ƌȵrqrsj^ZMX\]46H=.b CקּqSs8ՓHT)9&nobz^uZMkXsa=4p.clm:]{$x 'Vsx^ M5Ω#2/ۆo)dΉ;nL:Oz1][{֜a=ǰ68ps'[c\9 @sqJ8%ذоiL%CNj.9:qDJN[X<͙R0NGNYƝ:6ytM'qy)^ikLطfك)C=61'Y1nlCMc= 4GזN3hs RxS/-ܦ\+P@ɓsڥk5SXs6=]̱O!'59c8'&norҜKaqJ0p1u{t:XƝ3=â@sɼgs6=>m8ּ \ }ﱩ[ˍ[*"ԓ}gmyTXkQ>} 92nmS{o}WI'9vOc+sְ8%X C﹇{lcqc3]?QMjLZ9m9ai|1AW}p C@ɓnMϚ1]cukíるE&֬qsC]?Kc= 0umq,Lӵ5rYscpk}s8"jeܒߔwJoɜt~[muy]Cs׋7{Msc0nl'̒~gӍ嬟É] knÈ?p1GF.T\W2]m%@Jx1.p xNﱩqc]>k۷M76맮ulCO-ב6nɭ5s9Q:g=pJ7qb} nlOil[^A\K3TƍMvYN7ՏJp%VL5sX'c.OΑf\sW`Oڥ{ʝK^.pr*Ÿ)C76n:9'+-0ab?p^ >Г-9{Y9@J$J8^ nq ~RxS}غGmKIENDB`atlas-0.7.2/doc/website/static/img/tutorial/localeDropdown.png000066400000000000000000000725041431455511600244400ustar00rootroot00000000000000PNG  IHDRXFiCCPICC Profile(c``I,(aa``+) rwRR` ɠ`\\TQk .Ȭ̭~,*l`63 S= JI-N8-1V./);l"9 v:NՄ97lD/l$$t$6^qW))JTp!\AIjE v/,L(QpRg^!(!?%B؀(Xa`ؒS p8 (o,iF6vig``d`{Áo-+^έ#үeXIfMM*>F(iNxXASCIIScreenshot< pHYs%%IR$iTXtXML:com.adobe.xmp 344 418 Screenshot dhiDOT(>*=IDATx]>׽^Ƹ14lz N$B ԗ^B`nlccp wm={j%fv޻G|$GVHA@@QfA@  PT 4. D$A@" DIL***",D]+/ PMAU%LO ;keR]`X,.OjuD$S;2 A @c("ҩ/(tJ "*/P(uJ 4 Gz&TJʕ$ Ez$6J犖  ΗBz"BJŽ%"!^\i]G@H*R$ ՈTr $]<Av# &"!x /BH~> *V BF=&"!!;A /"r(ZA@2#IDBBvD#@h9HH HA@HQ.rF" r0"E@&AꛔED<ثR@9O\lRANR-U"IxMυA6!P&Rk{ ^~M^7j7YlA %Ju-~ܿDTIͫ/o/٠k' lo7N @)MֿDT @BD"rA(ɶ*ҿXDT,(V X("*1-ڍ@LHDXc,VXBd1p"RgsmpinŦ Ś`0j֪3 o'R\hӅoDTPm`*D \4n 5ھ;{Иj6sׯ. A݊="~u:c)oV"M>cS]jp&X ֭ Ij^'"av ҹвac0e6ݢNF1_F"rMЮA ۺ7m c:̒3'-F@fwеa ߢ-ج]^[!ݺ؞Kx;C]&K[ͅ>}!㹻k2u)T9QBOzEѯ,"*$A26n:ቬkXۣN͒kͧӖ/ ݛqCɋprxnX;MH'6Fn\ Nxь}gQ(QJBcSS@DJ!߆gLɹv;6o#'?;ng-Wn r$F.A֘G&$ DA$Fٟs@>{@W|SuOXq.TjZ8qBA-[wj)w(s?,[}7r!ۨhDEkj{h%mIѡ9T߈x>ЦA#/[1S·^NLA_Y\FMAdfmU7ؽ?\w/tb'HkCpnH{lMDq'qS'PV?ٻs/;`8Xic`vWEeMcl 5~[ܸ9vSwH>;BOYoݢA:uڢ6{| Ximy釭0qbhZl״54kbnXׯu`{{tf6ФN}[kn쫞'&gkc'ô/_:/( H#0y\5`pH8K?8;o/_͙説z`t>nx?TVFYYaLdwD//j&j;-aW$>!@ٶ[n+w̙ --jҦԲA詞P M9/ Tv`_9L_6U/FdlupӋ~ TREF]вCrqeUgN΃TkoFE;Y8^)#,|y\5}dٷxʪ6X;6n#o竗DK9zIkg1ӈAb& ُD5F 0_=-uwʘ2ꔀ^0 f ެ-/q`<z5ieDoWy|:YL ׮Ȓm/lפ5BNZ.n7xXӊCG,"M>Ǘljq{WN8:A=O{n8jsj)NϨFu o|;#m ܬKv/ԐȨD4mwއ_,7n QVѮַE蠞_ >V[&ھ>L'-:u+6Q: ZĶ)ݎTqͲvl7j Բ!'MB?F Aj cڤj ԝWjkxxqYuQDlz82˾Ԓ~w\.ߴ>Z6oBMh&w]˅ "A/CGzE>\)4 /6gl3A>C'Ol?v~ݲl8ap5_ήVG"ݮ{V>~6#;.pvYϴY-\w\ZȌϿf}f/5#Aܨ㮭OExrW;J7("ImszG$D?h&\yRC]ԒON||}m[\6,f*DGcJ#~I%C$%!$>|{]2w9wU`83Î؈OHRWϧ=?8W}٩aӌ.u+z禭g Q>W 07 1Pwzl #'9uA]Fè6U:?[I/zSTdutzDi"ιkBX5Q?2% ٤h]%|!|3zF1?'UQ`hޞo8ǖ.P$"L=[g\ԝ/a>h"#6pHŴ'=j'߸IPyV3N9D ?4j֝[#O%UN3wyn.Hu"¾.Um_9`o8݇TNYzENV&Y;*|v`_ ĉ:'fye75jh5_&xJWM͞HUx~ĉps&epam8 P?N6("}E"|xZL;Y( !tM{rP+oi1Ŏv%%$~I|886'- w mMW42 Mw,g.?V\bzlc#ng1/# e]7 K݁|7G~vZ.>Ƿ?d; jAΉbnQw1t UyGi>-W]N!\5bĽe("zulė3kܥ?Vg:yBd 2Ns7v$>~|i|iEDձ>qy>s~we L<y{Z=DvXc̕8uSwQG B4ˆN]X>Z&\_^ &jzEBj}34":jji.I8Z횻q&ɟ<_V̯\۱mcrڎ#R\Ƶ*[vЭUqw|HFD'[;O[C2mHY2P=_쭖P˓x^m7]eåZLӜċ> mf{Lz'ĵ uLTxyY݌1?m3P=K pyff1*NM}ಽ\dx9 MP.jfdz!M$"˗H_D D6[ը_4y&ۇA{CxW"Lӟb UU]+_lVKM6jkkguEPD v%IO{ ^V@ ] S1q8=X]AfQwDs}c\x]z*ܸϬzVۙm.1|I(Kk1O$q|ئ14cP<M1cMĦ@,-Ƶtj4KZSmPs7t@ƔG:F@6L}+0A¢gIU.N5LxSwwrYajY"'gDKgD.WG7=tRAֽr=T^3lpj7|L':޿_ 1K;/&]U$ԧ5+nDZg K)n7Oo"'|[}?-owN7#D/nym(  6`ۭ=.hzr\;WGÉ }0cGwJ.ҏ>4[D}/U-QzA:̍Gd zL{ϯzw¾ٻB\Q}/ޱܰc>rK6vY .E xpj``ڳMa|#H=W=9ψtq^ogww]۩#0*$p;'ZKk&Ϸݤq|m}8I|?ӈGQ׎)_苻ݺ9~3vE6܀e;FIUX.UKUQT+Z?@}snOhz [hzx:{65Ad8K =⽇AW=)M]NU PکmWUhjzm&~3?LȠ5P[q[5zQ9~¥93Na5B@9.Bu-:RۼJ.2JJB>}Kk2*d[Ikc^0n{T=N"53iQ{q>X{Z^Ǔʘq?GW#َ7cxgY5IuQo!B[J+:WnS!^;n"yo.lώ=ad=,W؜|J=PDm\a:@pv'+6X\ W4"p;{)}B;E`}FďhU/o')S)FvWD}DF$$}Nk¶Ċka<_=^O#|IXv ;,~queF~8lySF量g~/ n}~O#.]OV/ږ|u:{j)6D Mz1]huZەoeE۷;Yp;VGyj *vRL"OFC>?[Pטy릎 $1=ju=^cwDjKOQBV'&\v(5S[v|`ꪣ =Bڊ\Rote~S;. #NZ$>Ų#|'{=_i!bo4x=P\=hGe\>["T2w0x't:yyYvm'L}0EM涐q;܉/bQ4תh IDgއ:}ځ6v`Gszt:|Ԗ0`oxҮ~piEDxGyzu7f?9q:vXj>v>6~-q2:s$>x2cs5ԋ+OK@aT89'Cmm(b uj'gO&٧ ?QD::H3N=yYMrL8P%^YћtJTKu6]S_OLD >W 0Kˏh“;;˝Ջ< =3I 'o?}c7%=S/|wK;cK>K1piOq~Q6m~ӎ4,<ӆ&Rć|m9Ŝ~XMR%o&uR?^wݠ#T|L#H3mN>"['1L|FMQO7a[hiL#=zrM+VcB۪u\d6,PK/-:s( u~̓Nƨ8c|L4Q']MǙO 4m>>ʓ(jc(|8Ug\*RZqJ-ocs:6m Oˡq!$HqGqTG +-yo+'CDQl8>pr?*Oql;IySLm5R7&ƨK= 5$EZη8>Q6%ܖI|וǍEDϥ[A ryZqxL)W!!);=hIՋ@ ֻ$qDSoۄiTy*lj!"aGAԖQ9N[#y:!,P7n qK;[Gtc8ܖI|7*kbM7*6nqLB6'2A HuS]>B_8ܖI|7*]qF{D u8XW z9n{ڄϤo\(t?W_|}+&X;Q]T\=`r/0&3GK{DSLi_(|.$:bһt:ڑO\{}6!ؑ$$>4ho'&.|&8Gٸ6McKs$FՏ,"rM6MlrO\C ?OlnVXSDMev}qt69g.|tnh'7J$*f\MW]KӇjIzW܆>֕ՆF jRO8>Q6.Mgm$c[[w6z'G 5at&u¦+x6(m0-̟IђIKz.^D }!ۦIe'|lr3ɢ0sڡG ;ړ-Wa9$;7'JzL6Cmg\U4cUE PX&8ǍcIK+-9aoGz}lz9m#&bqPt>z_3%WL!+>UǸ˥!>FٸIt6R㵶|l羦}o"Ml&I+OC&[_Bmg\fI[ F5Y=ڸq]~6]yl ][|Իt>dsUv!"+NkK1űŏ#8v܇ʶ>XyXxv]9|KK$45N']moED W6n =1j(=r\{V/T\[{"L& ,n({>fGmt&&7ʼnL]mǖ|l-LU2)Mn٣Gm 7NĶ|M"'nKWg#7٦-CM1](18ٞ5Sl6Mԩ Dz$A@J[͛l@+*r6`G|hÓƧlCDޑuB @GGFꂀ PS@"R\z2N)ij%ц}XYD& aƍ$A@j6m>XӖb&&SC39J(2DL~ .A&"wE֭3vOh !D9&SBMXk: D&>2M&M8RZڵk2M4`e.9(C:=cB1{ 68"چ.s#Ĭ:bQ2l}zO`x kԨ7 |^;L..&3++L},tdIrڂ-xL.Ʊ%&oI͗P!vd$  PN"muYTm0% X6!""rwI 7nieQuSL\o<5qBACn6,gX䂀 Ps4N}hˢ&82ŤJ>Uѱ\n+#wc7oՎA@^:g':gN( ?]:nG"2 w酈8JRrB>)[iA&Pjs[&ED6 .$ DD8n}bNnj# m1 KVz\$Qz!"TrA@(789Ot zqZhjGt ZVZ3əuMrLz?Biq;,DZ}ܬ`"nғN#&eA@('0 u]uhIK(]>s[|n*W|YXU:!"BLrA@(7p|Re]W)MrLzC]NzbpWIDh02ꄈI Pp"󉙗t[S$CLq. aYD 2Fu,l؀A@\2k'e^e%)MGPϓ>*[ uSY( j @mEmeژP569r[YrnkǑlQIg(5Ʊrȗx[jJYA`ŊY6me'N5l.9ǣ\5kXodCLr!"BGrA@(708n|&%ҡ3|,"2 H.DDHI.@"Blm+_ۛ6KC=%?$+fϞ#"RA^zE%zA@j%3ǧOqLu#Irs[ n+ϟQ\{ǖ eY&q>SFl&`-}ez[O)xkxb'qc|H.]PUrA@  X 61s9/#p˺ΧkcC%$s_;Wl2#չKr۱cGWʂ  - 'Kיqd6[ueTWxB׎3D"2&QNe!"BIrA@(7莈OT1IJR7 g(tdžl1IBD$A@(G\Dxe S$}A=%.DZ lqi.|HQ$OԦIuWanG Mmq=/gem۶q,@ we&y>)DZ%`m>&=ʸf'Mmq=/5g"! &D! p|26}rߧnA&PRґM6j|0厈 \ $"L4Re綮rbmꁑ.nDDD.T6ɄK uA@(t"qӄO9rec:cmȂ㹩-rfiD.&:r ! ՗.\xIK@!@D#s/Gm\M>61qR2)erďB$`Pn]{ܝYa~'Ν:ó>;t'LSr@Ir7)qoeLu%ҡw(1wD&rAsSmڴ9XH-ң: N1W(8xiJ\mҀj)t)Mz&/d>znMdQ:S&=汈Ly)Q͡BJsLsZNvm?L $BlDC(')>>uMrL| 7nlFĥ bYA'x!:Lyqt[c %fl07fʕ^oGR9~Zlqtׯ_ƃf͚92MRXx1WUG=or%W8RƮN(DD>M'E ]S7٠ o#T}tde!Ioߞ-Z?#JrGtGE1<ԓn:kÏ>yAzu{_ X/_7߿m%ԩ}ԑσٳ'o߫=Y\p̱9szuV^{ϘeΧnP+?켈F"ڦ{wu' Ce˗/Ä[_|90`@P_(z*^>}z#`ʕ‚oaɁMDt/υ~&KisvI瞇'Lt JP""s݂NoV wN&@MDpǤ˨uXduzbg]`8*IWDtث~fdTͷp] ;"lnk=H ?` ̛??]?#p.ԯZGeX|0#Kl +ծFO?z.YrycyvS;(aefm͟o`mn&8䐃(I(klD$&2r1t\ۙqdhIo#~FDDDT$3JrȖ 8r"9gEc+7oW}o;}~҉'#ơG }Y Np q/l O['+r9q3τRz`tZ@ЉWM(]v똨a-4Hґ QvDd"Q F"{]}e:tR`zgzDi_~9=瑇`=sNfix \ w8зo{tџN9""".Mz3qoYKR7<.8PYϱ!])wf\5;py͕Oh{Bn]:(dA>&qƴi&ݧ?lڼ Tԁ/}M4 t{G$<(M"ǎju΀c=CZv3%""_gAqgy:@4j(x@a,I(wW$hsFQݤ2^6p^NRAxl*VX9r]Fu]WDd6'>x:wLdrRۤ1_>P/9|pc&a[o]7Czj{oDDocq4,54I%$eT#C[L4fqt)]'D^+xk/߲oǑ-&cX3FxN:(" /OAu2g =|`W" {S5n`u:󌌑Q )@"Dtdmx9Jm^t166Y`l'妘;*+?UQ. tw[Pnߤ! Rb!`#" B-7DɸUuI&QqQݔG$"""(lBD!3u@xяe˖o /ƊG`Jp[5\[30H%DAB @<t"Bo|r-22WY%|PFdzeCDD,ʶ۔":N]鐃<"cf G9|6uր/rqZD6Â#0Gq8 kqᩧo%Շ'{ v6A;"BBrA I[ (Y=bⱓM>('9ϣlDDlѦKͩӦ%\^?ﭶ<_~饰i&(;"wEW\wؠ~$Жp^ 'tbQR "BtmƤ2[iM1Po˩6!9)ر#Y\ȧ#""j{w2g;](@DDĂ*rM)z9WAw?xA>w~CM_G /ȧ 8LD 8*7Fɸ޷ۙqdhr?#X0 r" c~v;,Qhi;hMzꔅ0/;3gm A L8hMIdG/u18 %l&""AC*GBDU VnvaQ2=2&SX\o3KGDDJ*#D(HrDNo1鸌=au^u>u v9]. "BԩoSʂ  x1MqsI1Ȇx\/'&Pjt" 6<Cv!G:S.D! I]M(׻ʺT#٢\O4D1ca@ J]8DЄlM6Q2wuGfEh9r֯_  x;#!"Z @ CDMĶecqoY3l(;G.͙3'cGDםQ2)@! DTuŽw &NDXG22=3"Bt$ @9"""ă&euL1Uuzz} 刌t"ºH*eA@('LD㧉7wp/Sl.z:cml2u "N6T;|Oǟ68JRrBMqsI%z,c>eS=fr=>pp9aɈ鄈!A"J'*SNwFgFDFBDOO+@M'"~M64~-xn˺#"AG*cwFLdDr u|fHrA@ D lM6Q22&\yHDlwF=رcy\)  P6%"&d%z,cvZ)-\T3IDAT} eGu]n$ $5vKI I1ee/ aA",c;l/KZM a,$ԚA[jz=+o{Nϩ}ѽfm'ɷk.suי#G4xC~CTQe`)1#4˝ J8cu)~4+K1[чI)DTvΝ#i!%]-ʀ24"@p p)7lhuחҗ0[ч.D(<2b4BtIsرieEPlz˖-3+Vh xιSTWF7p9|p3NWD(: xQՕe@f`ʕfժUMqh!P!rj"W2 (y YƠ(lZ&Xp"kv^qDWZՒj"e@PXzusuTq"̗noٸKR l|J}NCWFW_};fQ_h e@d救: " yI>|&$e@u|fh"|&&tʀ2>+•Qצh"OAeWEZZ}U># ]o tT*8BBӆJqFsj{ǐ/;5ĉëҞ2 (=3|rvNh!j[h߾}ƄN4XPJ~A_;8(ʀ20N6li8-D-}Z:i2 ,e͜t8GٸKR l|Jfj>#+"i_PŀGKEC#BD T%ˀ"-D^-=@ L-DZ?B-DP, [[m8uחҗ0QsKFoqTW%ɀᢅ?hH6 b-DĐJe@X h!-۠q_I_ ZG>\!M_ (jQ ߈II0pr×t@~I&Uj!"JU*Bc`܅oȤ$t}1CG8*KvW9p"Άʀ2$B4\G  JH={؋'1J"P$cOڟu?~l\jIWT!36CgrH>RIu)gӤz c޹'sJ S20 ,B$TNN7tkWB+ۿ kaL_ |{7w8N]|׍Ԣ , ͌-hH6nR)DSA)PE/ ڃ]" b9{0+gg>|鋆S.D|&ݕl\l͇s} lxaȎ[s0 BfJVQ%7/pS,ns!9?nV/[Yʀ2 1hޚYWQxETy_wJ 56G+}0P'=WbnL!I hM#+"C!nм=x'Ű/caaw{+D /Bk~뾯7m)+טNӧ/D`79a|.55By_5!gM^>o<>'=8ć3j1_;siه .]sWx!;,0>"n6{7jc{ޙC>$?j;.fO20nQ&LOb/f~͇m[H)<.n"l>d9o^!>| _o>A6^`!>|؞yƹ#x5_xBb+߽qp.>pn5hP~i/7^P~/;殖~g?ƍϲ^{m:lVߺi7+[mAB 1oF!T;Ǿ|yWw m|ߝ_{<ޞ].mB7vI+Il+)}_C>"QM6Z$k ="rOܫξEDߚ[foK5?|`G[!<^o7v_mH>p ч!\{%wew4_~zeȏ}f{fۯm#`>?oN|Ҝf+_s42\>j7}W-Bc7iz[ڔigk!ڹs8911[c8m/bvCcxv7BDm[ B*=JVfjm[{۸ #܆k(x!ɹoxy k ڽ7F7>Go;Cxg\AP!ӝw?7,nesϷFTBj"ْ*ARѸmo_j8Hw!_I!B*@$]fFk8ky/v^^w[OtA"P|tQСp_!mO6zr{j`w#_nh9<|w6;np}j91{%&cs.ju;Fugo8\T"jTN  tIIPbJl<:uS1s 3c[Kڋ"DXrGSp[wvb\ž'2 gSk9U77(k施P+!b}V*i\ugJNYa/_e͝gf!}׷}">gv>؄m\uy-hڢ޾iN{DWc悫#ƻ>->e~O7iBS]'Zٖd/"BɥtE/O5M+|7\c?7GTˢ?gD ? LpGB;>߯boͽ嶿n0 fR}I>|k^*a`\oؤ x} ;ϱ5 <9GRgv)"~􂳶k/~>9|#f!B{jp>Ǐ/?yl={m_{_d ÷?psӭU WcoORb)dF!-150us[~ X4>2|y!B(tK}~m7|Wzh[w}y=pU!1} WW5W.WzsB;n6~36\Hw7?m-g/i%/^Mҗu?ۙh=Lo$bb6O]ϱFso{Ǩ?aH-BB`3&솾g쫂n:|_D]7'4ty6{g5oGG{ƫ9skٻ瞺˸/qOY[0E'[>5WۗOߑzxjs.[)C4BkC> |IIPb6O]ϱFso{ǨaRqvKإT}s?4{7#*yjśbQ_Ãnp傫o<޾ȍpfoѓy9(^ k Z]~ է @o¤$H>K?@GRMGv)%cI/bZ|| cl߼s־]z&ɾNm)_hR0l>Fz(~}of?A}o.<1؟f:ْ/"mCߵoy_qUK/Ƿ`M] Ѯ] ̟6z a$tK9`㍏GB)0PhxsI{]z &ǣ)]g[[y}(t}w]BªM pIIxK9`Fk$cmRA:\)ar!n߷?s?\G;^K%ڔe @B7^}s}uq[Hw}%})6j|Ndse U-JBR?>2x/B~ܾ_8o&Je@gB7`]YxLH\`G= ©dH>˶̾5afu?tx"R|Bd⤅(`s}¯z[}WB:2QN+Ak㶮/slc ,DH+@B4J,x[x)̷X2 3PMtWbX CG1^/ldҝAymitI]i!3ixb߻h| e+6>} RPF~N9p]Ĺ6UG<͡%A>drpEci!B֭[!ہ̉'5PP/_n֮][:yߢ$@R]ϱZc N,DH[#GÇ+Q2 (=1zjjժN٩Mttc]/l9y衇lܝ.@۵q}Rg c>ܹcsEsjp߲e D+"\iSeOp%+m݃4|%$@䶮җ0>h|E>&+"MVa|}p,hST&L:Ic||\Jcq?+"ؐ6KcXЦ (y%DsB7^I+͇unr}裥`$\!'-D|tɆ]n|J_Z!R0?lkIH> MرcF2 (l(:+VhGP+6)K8)g-'|.3B0m$}"MPie"KdúR8⍏k ǥ4s=!In~BhSe`17`ITKr}Rg c>u'BGڕe`3@o.M=\?h|}PLj<Ǒ>(D0P!m>Z6e@X o1sb8B'K6<ڔe@VO<1MtXG^s㤾|S#[Hf"$M6UrhSe`ԨMܾ\1Gk-g<#> dlhSe`!1W9$=l-͇Ő4$]D*F)ʀ2'߀kҗ% lXd#`K)DH%׮Vʀ2&Eoxy|h6I2wrqJp:묳qԦ (c?yf]|#/96vj|.dse f$ 5ƍi掣}e@PxC5ͷdxWҗb`&ɐ0.yN| Օe@X, ܒZ]ϱZ!R0r_p˗/7g}#"U2 ,PpK{~|14>Fki>;ab~R0`!0IL3tmՕe`A3oa!|C9>>Okm)>_>%)I)DKŵ}_ `j$/զ (8UiclTwcjPnB`_SOm~ o>e@Pb G9"t="ڣ#Ϟ.rIR,D"/j͝րk׮WJy{ m[8)FQ%JjL'$(q5\?w>dHx B,}zW~j9v^|qK"}cSSs*n3ڀJV/SεC8Wnd0648s\7BG΃b|v7煈I]b(Je`mPScB8'0kc1qbsMνǐ/Kyb93;wKO] qP+%;>0hv"_LQ>yQԪ 7~J6PϗjpK?SzJ}Ď4wul-B ucG ͥEG'+")/s3˜ψB1RJ6ԘNI6~ ƍ}r!_n~jd R+p>"8C C>wK}-׎+"+Dș|MW2 1uӠybx?.\Z\s}Rظsmv GA!!P?ŧ5]q %WbqR|[ln)q1lkw)7&uƣOXl Ǹzr!B!_n.t,Ï: 9+D%rb 6Xϟ+s.c-KKlH9`CsZvb9&P!6RC>X5bhnݝxj* m[vTE"|Yh1!ϗcw>p?scKRL X4wޭu#8֧ڵO͹}7cl%vĠj( 4/Yӎ' (e &97O އɱKX6}ƒ=CP,aRplH bݗ!a|6ѤgjBR6R7%wީq!\O»6yqS arlkkǘű!)DۗyHq4_ r)f#POB䋓${g)+7?C1 db$9(Dpۗ~n^)gyK9\_JR_R! /P20 n69q!lO§J0nLYrlcc?ؘ>Tv7K/%t1~8)y\4G7n >JZ(0\rT86\1\r6X\)KK8k096v4wέu#86?`#M#kSbJ1CsZcñ.:0TQɝnNl[b\]OHy}6]X/$;rql>sa?"D)6K|vi\``( cԯ po$sZ1ldwmn4V35o<ŖxLHB i#L CzF9s/=|('vtd(e@ Sg9v bo)rp>,hx=X'4nS6R[i\ya&͍|$S0n!]ʙe Rܜ)}Ώ ӹťsqI>A!Bմ:Z4|cp?<xKue`G|Rc_ڰl &5W· c?0Ԥ/Gy I4|s c>ѤuB$G((y A^7|9v ;M6-'dGmT|IyF B8>jze*@G!B,}F ,T$u-)8&.a%'a|Io=Tpܓr9rO!<Ֆ#o|ةI㒏d 1^qTF {ܰhq.\J ϥ20 6yp!W>7nI(G И#Ih3 %kC #L(7ǐ(7/W @h2^NTlXl>lݗ;7B9s0%XBD RK!|_,~ju'BY.ͭVn3~B?KÅ%>_L=Kl GyIQޘ -DH&e)7;zS¥梜*I0@r瓛+”|1}eĿ/$Sq`)H=zoSBo;0 IHCcjJÆ%>_L-;΍/W?Gz.|2)Dj1NnPLlOQq\Ƿ9/BIsr`cٱVKJ9\ŧC9 ,5sB}9S8“O8*DOeW"S-ɭ1&KM}v[sK~R†$US20 bKr`cR(w],,'"$ m>^[CL㡗ܱ 2 RJJ (X`b<257 )C}и9(WtUZRS20M l6M͕ ŏG% d*tsP?e3ǎ{hSs$wr;P!J/ڕq0Σ$wjL . C>PW 0PcSv?'l9YsZBAs6iG,笋AO-Dn@͋x͗,֙3KK"lӀyĊǥZRYTbc4N4`i9s8##Z0ІN4Hv7&%Jb[GZ$"TF-%b]Y2g7BDqɚלu8GzJrS̏;l+Qp-O1$Kƣؐ; ()dֆιKޒ؜,_WirtsHkWT0܍9ODQ|\yl~{Ss5N$654Gn\.x(C|X?GoĉkJ7"K, Y3$Qs5\ $B͹wU[~Kt<9昃B tJcK㈰Ǖ]]j_XH -/ܻֈ]*D O*)GW Q], ܜR81^%nO9JdqB6ؔIw5rt]ͥD QuIc\jo8Z󩑧K.I+G1ĺl]bi|Zy(g|K_!Օe@ V.yrj9Sƍ"L]#G5GBcY0}mnVyj䨹՚4RR!u4xn玣RgMs[{j5E9Sdf"pLqGyK[in%ј@QvkϭVZy(oLcܢBD·yn! a&.sǦX_휵>r߸ -Ͱ}ΗrW!T*>7>r/'}7vBēU@8Z\f 20ͮq =z&-z!)+G!]s)~&UGs3S3'Oe󺹔9cyE4.Sשe`$:3uJsh 8Y|c]X* k\l>ƵViBDZ(Ƶ.Q 1R7q7qbg`)D7qG'q*<&){q; 0 YqHx4$E Ĩڔ> ?Mxu2h!wٔ)GԸ%s1 W^mW|+Mz&Ij\hA. xI^bخIuj|iX3wҜLz|6B6-̃s3BU]X L:-L:"Z4II uT30LV8aU q5͟㘧YP2f`Z7Mw:O~BDÏcjZsBThB%ܧ}":i6u7풯W Ѵ-_-}NʳֻPk!7in_[y]Z(Z_g/=.-D!.ԧ (Bȉ;I""b+LZ̪T ~RP 0i!3R>M.uLk!"I.¤ΜJe` , \tY(Ni/LZLTbۨEZФΌJeM--BD',GY * ,Mw~qmp-hv8IENDB`atlas-0.7.2/doc/website/static/img/undraw_docusaurus_mountain.svg000066400000000000000000000753411431455511600253250ustar00rootroot00000000000000 atlas-0.7.2/doc/website/static/img/undraw_docusaurus_react.svg000066400000000000000000001062001431455511600245560ustar00rootroot00000000000000 atlas-0.7.2/doc/website/static/img/undraw_docusaurus_tree.svg000066400000000000000000000270101431455511600244200ustar00rootroot00000000000000docu_treeatlas-0.7.2/go.mod000066400000000000000000000015121431455511600137360ustar00rootroot00000000000000module ariga.io/atlas go 1.18 require ( github.com/DATA-DOG/go-sqlmock v1.5.0 github.com/go-openapi/inflect v0.19.0 github.com/hashicorp/hcl/v2 v2.10.0 github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 github.com/zclconf/go-cty v1.8.0 golang.org/x/mod v0.5.1 ) require ( github.com/agext/levenshtein v1.2.1 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/google/go-cmp v0.5.6 // indirect github.com/kr/text v0.2.0 // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect golang.org/x/text v0.3.7 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) atlas-0.7.2/go.sum000066400000000000000000000165041431455511600137720ustar00rootroot00000000000000github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/hashicorp/hcl/v2 v2.10.0 h1:1S1UnuhDGlv3gRFV4+0EdwB+znNP5HmcGbIqwnSCByg= github.com/hashicorp/hcl/v2 v2.10.0/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= atlas-0.7.2/internal/000077500000000000000000000000001431455511600144455ustar00rootroot00000000000000atlas-0.7.2/internal/ci/000077500000000000000000000000001431455511600150405ustar00rootroot00000000000000atlas-0.7.2/internal/ci/ci.tmpl000066400000000000000000000117001431455511600163300ustar00rootroot00000000000000# # # # # # # # # # # # # # # # # CODE GENERATED - DO NOT EDIT # # # # # # # # # # # # # # # # name: Continuous Integration on: pull_request: paths-ignore: - 'doc/**' push: branches: - master paths-ignore: - 'doc/**' jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/setup-go@v2 with: go-version: 1.18 - uses: actions/checkout@v2 - name: Run linters uses: golangci/golangci-lint-action@v3 with: args: --verbose generate-cmp: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.4.0 - uses: actions/setup-go@v2 with: go-version: '1.19' - uses: actions/cache@v2.1.5 with: path: ~/go/pkg/mod key: {{ "${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}" }} restore-keys: | {{ "${{ runner.os }}-go-" }} - name: run "go generate ./..." run: go generate ./... - name: go generate cmd/atlas working-directory: cmd/atlas run: go generate ./... - name: Verify generated files are checked in properly run: | status=$(git status --porcelain | grep -v "go.\(sum\|mod\)" | cat) if [ -n "$status" ]; then echo "you need to run 'go generate ./...' and commit the changes" echo "$status" exit 1 fi unit: runs-on: ubuntu-latest strategy: matrix: go: [ '1.18', '1.19' ] steps: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: go-version: {{ "${{ matrix.go }}" }} - uses: actions/cache@v2.1.5 with: path: ~/go/pkg/mod key: {{ "${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}" }} restore-keys: | {{ "${{ runner.os }}-go-" }} - name: Run sql tests run: go test -race ./... working-directory: sql - name: Run schemahcl tests run: go test -race ./... working-directory: schemahcl cli: runs-on: ubuntu-latest strategy: matrix: go: [ '1.19' ] steps: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: go-version: {{ "${{ matrix.go }}" }} - uses: actions/cache@v2.1.5 with: path: ~/go/pkg/mod key: {{ "${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}" }} restore-keys: | {{ "${{ runner.os }}-go-" }} - name: Run cli tests run: go test -race ./... working-directory: cmd/atlas integration: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: {{ "${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}" }} restore-keys: | {{ "${{ runner.os }}-go-" }} - name: Run integration tests for HCL working-directory: internal/integration/hclsqlspec run: go test -race -count=2 -v ./... revisions: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.3.4 with: fetch-depth: 0 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: {{ "${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}" }} restore-keys: | {{ "${{ runner.os }}-go-" }} - name: Checkout origin/master run: git checkout origin/master - name: Create revisions from master run: go run main.go migrate apply --dir file://internal/cmdapi/testdata/sqlite --url sqlite://db?_fk=1 working-directory: cmd/atlas - name: Checkout previous HEAD run: git checkout - - name: Migrate revisions table to HEAD run: go run main.go migrate apply --dir file://internal/cmdapi/testdata/sqlite --url sqlite://db?_fk=1 working-directory: cmd/atlas {{ range $ }} integration-{{ .Version }}: runs-on: ubuntu-latest {{ if .Image }}services: {{ .Version }}: image: {{ .Image }} {{ with .Env }}env:{{ range . }} {{ . }}{{ end }} {{- end }} {{ with .Ports }}ports:{{ range . }} - {{ . }}{{ end }} {{- end }} {{ with .Options }}options: >-{{ range . }} {{ . }}{{ end }} {{- end }}{{ end }} steps: - uses: actions/checkout@v2.3.4 - uses: actions/setup-go@v2 with: go-version: 1.19 - uses: actions/cache@v2.1.6 with: path: ~/go/pkg/mod key: {{ "${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}" }} restore-keys: | {{ "${{ runner.os }}-go-" }} - name: Run integration tests for {{ .Version }} working-directory: internal/integration run: go test -race -count=2 -v -run="{{ .Regex }}" -version="{{ .Version }}" ./... {{ end }}atlas-0.7.2/internal/ci/cockroach/000077500000000000000000000000001431455511600167745ustar00rootroot00000000000000atlas-0.7.2/internal/ci/cockroach/Dockerfile.tmpl000066400000000000000000000002121431455511600217340ustar00rootroot00000000000000FROM cockroachdb/cockroach:{{ .Version}} EXPOSE 8080 EXPOSE 26257 ENTRYPOINT ["/cockroach/cockroach", "start-single-node", "--insecure"]atlas-0.7.2/internal/ci/cockroach/main.go000066400000000000000000000012341431455511600202470ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package main import ( _ "embed" "fmt" "os" "text/template" ) type params struct { Version string } //go:embed Dockerfile.tmpl var dockerTmpl string func main() { if len(os.Args) < 2 { fmt.Println("please supply version as argument e.g. 'v22.1.0'") os.Exit(1) } p := params{ Version: os.Args[1], } t, err := template.New("docker").Parse(dockerTmpl) if err != nil { panic(err) } err = t.Execute(os.Stdout, p) if err != nil { panic(err) } } atlas-0.7.2/internal/ci/main.go000066400000000000000000000071421431455511600163170ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package main import ( "bytes" _ "embed" "log" "os" "text/template" ) //go:generate go run main.go // Job defines an integration job to run. type Job struct { Version string // version to test (passed to go test as flag which database dialect/version) Image string // name of service Regex string // run regex Env []string // env of service Ports []string // port mappings Options []string // other options } var ( //go:embed ci.tmpl t string mysqlOptions = []string{ `--health-cmd "mysqladmin ping -ppass"`, `--health-interval 10s`, `--health-start-period 10s`, `--health-timeout 5s`, `--health-retries 10`, } mysqlEnv = []string{ "MYSQL_DATABASE: test", "MYSQL_ROOT_PASSWORD: pass", } pgOptions = []string{ "--health-cmd pg_isready", "--health-interval 10s", "--health-timeout 5s", "--health-retries 5", } pgEnv = []string{ "POSTGRES_DB: test", "POSTGRES_PASSWORD: pass", } jobs = []Job{ { Version: "mysql56", Image: "mysql:5.6.35", Regex: "MySQL", Env: mysqlEnv, Ports: []string{"3306:3306"}, Options: mysqlOptions, }, { Version: "mysql57", Image: "mysql:5.7.26", Regex: "MySQL", Env: mysqlEnv, Ports: []string{"3307:3306"}, Options: mysqlOptions, }, { Version: "mysql8", Image: "mysql:8", Regex: "MySQL", Env: mysqlEnv, Ports: []string{"3308:3306"}, Options: mysqlOptions, }, { Version: "maria107", Image: "mariadb:10.7", Regex: "MySQL", Env: mysqlEnv, Ports: []string{"4306:3306"}, Options: mysqlOptions, }, { Version: "maria102", Image: "mariadb:10.2.32", Regex: "MySQL", Env: mysqlEnv, Ports: []string{"4307:3306"}, Options: mysqlOptions, }, { Version: "maria103", Image: "mariadb:10.3.13", Regex: "MySQL", Env: mysqlEnv, Ports: []string{"4308:3306"}, Options: mysqlOptions, }, { Version: "postgres10", Image: "postgres:10", Regex: "Postgres", Env: pgEnv, Ports: []string{"5430:5432"}, Options: pgOptions, }, { Version: "postgres11", Image: "postgres:11", Regex: "Postgres", Env: pgEnv, Ports: []string{"5431:5432"}, Options: pgOptions, }, { Version: "postgres12", Image: "postgres:12.3", Regex: "Postgres", Env: pgEnv, Ports: []string{"5432:5432"}, Options: pgOptions, }, { Version: "postgres13", Image: "postgres:13.1", Regex: "Postgres", Env: pgEnv, Ports: []string{"5433:5432"}, Options: pgOptions, }, { Version: "postgres14", Image: "postgres:14", Regex: "Postgres", Env: pgEnv, Ports: []string{"5434:5432"}, Options: pgOptions, }, { Version: "tidb5", Image: "pingcap/tidb:v5.4.0", Regex: "TiDB", Ports: []string{"4309:4000"}, }, { Version: "tidb6", Image: "pingcap/tidb:v6.0.0", Regex: "TiDB", Ports: []string{"4310:4000"}, }, { Version: "sqlite", Regex: "SQLite.*", }, { Version: "cockroach", Image: "ghcr.io/ariga/cockroachdb-single-node:v21.2.11", Regex: "Cockroach", Ports: []string{"26257:26257"}, }, } ) func main() { var buf bytes.Buffer if err := template.Must(template.New("").Parse(t)).Execute(&buf, jobs); err != nil { log.Fatalln(err) } if err := os.WriteFile("../../.github/workflows/ci.yml", buf.Bytes(), 0600); err != nil { log.Fatalln(err) } } atlas-0.7.2/internal/integration/000077500000000000000000000000001431455511600167705ustar00rootroot00000000000000atlas-0.7.2/internal/integration/README.md000066400000000000000000000031371431455511600202530ustar00rootroot00000000000000### This directory contains all integration tests for Atlas. The provided `docker-compose.yaml` file contains images for each database the integration tests are run on. You can start them by calling: ```shell docker-compose --project-name atlas-integration up -d ``` The whole integration suite is then run by executing within this directory: ```shell go test ./... ``` #### Selectively running tests Running all integration tests (and keeping all database containers up all the time) consumes time and resources (and power). You can execute only some of the tests by using the `-run` and `-dialect` flags: The below examples don't require for you to have all docker containers running, instead only the ones used in the tests have to be up. Consider the following test in `mysql_test.go`: ```go func TestMySQL_Executor(t *testing.T) { myRun(t, func(t *myTest) { testExecutor(t) }) } ``` If you'd wanted to run that test only for mysql56, simply pass its full name into the `-run` flag: ```shell # Run TestMySQL_Executor for all mysql versions go test -run='MySQL_Executor' ./... # Run TestMySQL_Executor for mysql 5.6 only go test -run='MySQL_Executor/mysql56' ./... ``` If you'd like to run the above for Postgres 10, change the name respectively: ```shell # Run TestMySQL_Executor for all mysql versions go test -run='Postgres_Executor' ./... # Run TestMySQL_Executor for postgres 10 only go test -run='Postgres_Executor/postgres10' ./... ``` If you want to run all tests for one specific dialect, like only TiDB 5, you can use the `-dialect` flag: ```shell go test -run='TiDB' -dialect='tidb5' ./... ```atlas-0.7.2/internal/integration/cockroach_test.go000066400000000000000000001026561431455511600223240ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "context" "database/sql" "fmt" "log" "strings" "sync" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/schema" "entgo.io/ent/dialect" entschema "entgo.io/ent/dialect/sql/schema" entmigrate "entgo.io/ent/entc/integration/ent/migrate" _ "github.com/lib/pq" "github.com/stretchr/testify/require" ) type crdbTest struct { *testing.T db *sql.DB drv migrate.Driver rrw migrate.RevisionReadWriter version string port int once sync.Once } var crdbTests = map[string]*crdbTest{ "cockroach": {port: 26257}, } func crdbRun(t *testing.T, fn func(*crdbTest)) { for version, tt := range crdbTests { if flagVersion == "" || flagVersion == version { t.Run(version, func(t *testing.T) { tt.once.Do(func() { var err error tt.version = version tt.rrw = &rrw{} tt.db, err = sql.Open("postgres", fmt.Sprintf("host=localhost port=%d user=root dbname=defaultdb password=pass sslmode=disable", tt.port)) if err != nil { log.Fatalln(err) } dbs = append(dbs, tt.db) // close connection after all tests have been run tt.drv, err = postgres.Open(tt.db) if err != nil { log.Fatalln(err) } }) tt := &crdbTest{T: t, db: tt.db, drv: tt.drv, version: version, port: tt.port, rrw: tt.rrw} fn(tt) }) } } } func TestCockroach_Executor(t *testing.T) { crdbRun(t, func(t *crdbTest) { testExecutor(t) }) } func TestCockroach_AddDropTable(t *testing.T) { crdbRun(t, func(t *crdbTest) { testAddDrop(t) }) } func TestCockroach_Relation(t *testing.T) { crdbRun(t, func(t *crdbTest) { testRelation(t) }) } func TestCockroach_AddIndexedColumns(t *testing.T) { crdbRun(t, func(t *crdbTest) { s := &schema.Schema{ Name: "public", } usersT := &schema.Table{ Name: "users", Schema: s, Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{&postgres.Identity{}}}}, } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "a", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }, &schema.Column{ Name: "b", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }, &schema.Column{ Name: "c", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }) parts := usersT.Columns[len(usersT.Columns)-3:] usersT.Indexes = append(usersT.Indexes, &schema.Index{ Unique: true, Name: "a_b_c_unique", Parts: []*schema.IndexPart{{C: parts[0]}, {C: parts[1]}, {C: parts[2]}}, }) changes := t.diff(t.loadUsers(), usersT) require.NotEmpty(t, changes, "usersT contains 3 new columns and 1 new index") t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Dropping a column involves in a multi-column // index causes the index to be dropped as well. usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] changes = t.diff(t.loadUsers(), usersT) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) usersT = t.loadUsers() _, ok := usersT.Index("a_b_c_unique") require.False(t, ok) }) } func TestCockroach_AddColumns(t *testing.T) { crdbRun(t, func(t *crdbTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) usersT.Columns = append( usersT.Columns, &schema.Column{Name: "a", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "bytea"}}}, &schema.Column{Name: "b", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 10}}, Default: &schema.Literal{V: "10.1"}}, &schema.Column{Name: "c", Type: &schema.ColumnType{Type: &schema.StringType{T: "character"}}, Default: &schema.Literal{V: "'y'"}}, &schema.Column{Name: "d", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 10, Scale: 2}}, Default: &schema.Literal{V: "0.99"}}, &schema.Column{Name: "e", Type: &schema.ColumnType{Type: &schema.JSONType{T: "json"}}, Default: &schema.Literal{V: "'{}'"}}, &schema.Column{Name: "f", Type: &schema.ColumnType{Type: &schema.JSONType{T: "jsonb"}}, Default: &schema.Literal{V: "'1'"}}, &schema.Column{Name: "g", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 10}}, Default: &schema.Literal{V: "'1'"}}, &schema.Column{Name: "h", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 30}}, Default: &schema.Literal{V: "'1'"}}, &schema.Column{Name: "i", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 53}}, Default: &schema.Literal{V: "1"}}, &schema.Column{Name: "j", Type: &schema.ColumnType{Type: &postgres.SerialType{T: "serial"}}}, &schema.Column{Name: "m", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Null: true}, Default: &schema.Literal{V: "false"}}, &schema.Column{Name: "n", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "geometry"}, Null: true}, Default: &schema.Literal{V: "'POINT(1 2)'"}}, &schema.Column{Name: "o", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "geometry"}, Null: true}, Default: &schema.Literal{V: "'LINESTRING(0 0, 1440 900)'"}}, &schema.Column{Name: "q", Type: &schema.ColumnType{Type: &postgres.ArrayType{Type: &schema.StringType{T: "text"}, T: "text[]"}, Null: true}, Default: &schema.Literal{V: "'{}'"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 14) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } func TestCockroach_ColumnInt(t *testing.T) { ctx := context.Background() run := func(t *testing.T, change func(*schema.Column)) { crdbRun(t, func(t *crdbTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}}, } err := t.drv.ApplyChanges(ctx, []schema.Change{&schema.AddTable{T: usersT}}) require.NoError(t, err) t.dropTables(usersT.Name) change(usersT.Columns[0]) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } t.Run("ChangeNull", func(t *testing.T) { run(t, func(c *schema.Column) { c.Type.Null = true }) }) t.Run("ChangeDefault", func(t *testing.T) { run(t, func(c *schema.Column) { c.Default = &schema.RawExpr{X: "0"} }) }) } func TestCockroach_ColumnArray(t *testing.T) { crdbRun(t, func(t *crdbTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) // Add column. usersT.Columns = append( usersT.Columns, &schema.Column{Name: "a", Type: &schema.ColumnType{Raw: "bigint[]", Type: &postgres.ArrayType{Type: &schema.IntegerType{T: "bigint"}, T: "bigint[]"}}, Default: &schema.Literal{V: "'{1}'"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Check default. usersT.Columns[2].Default = &schema.RawExpr{X: "ARRAY[1]"} ensureNoChange(t, usersT) // Change default. usersT.Columns[2].Default = &schema.RawExpr{X: "ARRAY[1,2]"} changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } func TestCockroach_Enums(t *testing.T) { crdbRun(t, func(t *crdbTest) { ctx := context.Background() usersT := &schema.Table{ Name: "users", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ {Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}}, }, } t.Cleanup(func() { _, err := t.drv.ExecContext(ctx, "DROP TYPE IF EXISTS state, day") require.NoError(t, err) }) // Create table with an enum column. err := t.drv.ApplyChanges(ctx, []schema.Change{&schema.AddTable{T: usersT}}) require.NoError(t, err, "create a new table with an enum column") t.dropTables(usersT.Name) ensureNoChange(t, usersT) // Add another enum column. usersT.Columns = append( usersT.Columns, &schema.Column{Name: "day", Type: &schema.ColumnType{Type: &schema.EnumType{T: "day", Values: []string{"sunday", "monday"}}}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) err = t.drv.ApplyChanges(ctx, []schema.Change{&schema.ModifyTable{T: usersT, Changes: changes}}) require.NoError(t, err, "add a new enum column to existing table") ensureNoChange(t, usersT) // Add a new value to an existing enum. e := usersT.Columns[2].Type.Type.(*schema.EnumType) e.Values = append(e.Values, "tuesday") changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) err = t.drv.ApplyChanges(ctx, []schema.Change{&schema.ModifyTable{T: usersT, Changes: changes}}) require.NoError(t, err, "append a value to existing enum") ensureNoChange(t, usersT) // Add multiple new values to an existing enum. e = usersT.Columns[2].Type.Type.(*schema.EnumType) e.Values = append(e.Values, "wednesday", "thursday", "friday", "saturday") changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) err = t.drv.ApplyChanges(ctx, []schema.Change{&schema.ModifyTable{T: usersT, Changes: changes}}) require.NoError(t, err, "append multiple values to existing enum") ensureNoChange(t, usersT) }) } func TestCockroach_Ent(t *testing.T) { crdbRun(t, func(t *crdbTest) { // Cockroach doesn't support macaddr but its in the integration tests of ent for _, ff := range entmigrate.FieldTypesColumns { if st := ff.SchemaType; st[dialect.Postgres] == "macaddr" { c := ff t.Cleanup(func() { c.SchemaType = st }) c.SchemaType = nil } } testEntIntegration(t, dialect.Postgres, t.db, entschema.WithAtlas(true)) }) } func TestCockroach_HCL(t *testing.T) { full := ` schema "public" { } table "users" { schema = schema.public column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.public column "id" { type = int } column "tags" { type = sql("text[]") } column "author_id" { type = int } foreign_key "author" { columns = [ table.posts.column.author_id, ] ref_columns = [ table.users.column.id, ] } primary_key { columns = [table.users.column.id] } } ` empty := ` schema "public" { } ` crdbRun(t, func(t *crdbTest) { testHCLIntegration(t, full, empty) }) } func TestCockroach_HCL_Realm(t *testing.T) { crdbRun(t, func(t *crdbTest) { t.dropSchemas("second") realm := t.loadRealm() hcl, err := postgres.MarshalHCL(realm) require.NoError(t, err) wa := string(hcl) + ` schema "second" { } ` t.applyRealmHcl(wa) realm, err = t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{}) require.NoError(t, err) _, ok := realm.Schema("public") require.True(t, ok) _, ok = realm.Schema("second") require.True(t, ok) }) } func TestCockroach_CLI(t *testing.T) { h := ` schema "public" { } table "users" { schema = schema.public column "id" { type = bigint } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { testCLISchemaInspect(t, h, t.url(""), postgres.EvalHCL, "-s", "public") }) }) t.Run("SchemaApply", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { testCLISchemaApply(t, h, t.url(""), "-s", "public") }) }) t.Run("SchemaApplyDryRun", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { testCLISchemaApplyDry(t, h, t.url("")) }) }) t.Run("SchemaApplyWithVars", func(t *testing.T) { h := ` variable "tenant" { type = string } schema "tenant" { name = var.tenant } table "users" { schema = schema.tenant column "id" { type = int } } ` crdbRun(t, func(t *crdbTest) { testCLISchemaApply(t, h, t.url(""), "--var", "tenant=public", "-s", "public") }) }) t.Run("SchemaDiffRun", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { testCLISchemaDiff(t, t.url("")) }) }) t.Run("SchemaApplyAutoApprove", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { testCLISchemaApplyAutoApprove(t, h, t.url(""), "-s", "public") }) }) } func TestCockroach_CLI_MultiSchema(t *testing.T) { h := ` schema "public" { } table "users" { schema = schema.public column "id" { type = bigint } primary_key { columns = [table.users.column.id] } } schema "test2" { } table "users" { schema = schema.test2 column "id" { type = bigint } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { t.dropSchemas("test2") t.dropTables("users") testCLIMultiSchemaInspect(t, h, t.url(""), []string{"public", "test2"}, postgres.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { t.dropSchemas("test2") t.dropTables("users") testCLIMultiSchemaApply(t, h, t.url(""), []string{"public", "test2"}, postgres.EvalHCL) }) }) } func TestCockroach_DefaultsHCL(t *testing.T) { n := "atlas_defaults" crdbRun(t, func(t *crdbTest) { ddl := ` create table atlas_defaults ( string varchar(255) default 'hello_world', quoted varchar(100) default 'never say "never"', tBit bit(10) default B'10101', ts timestamp default CURRENT_TIMESTAMP, tstz timestamp with time zone default CURRENT_TIMESTAMP, number int default 42 ) ` t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() spec, err := postgres.MarshalHCL(realm.Schemas[0]) require.NoError(t, err) var s schema.Schema err = postgres.EvalHCLBytes(spec, &s, nil) require.NoError(t, err) t.dropTables(n) t.applyHcl(string(spec)) ensureNoChange(t, realm.Schemas[0].Tables[0]) }) } func TestCockroach_Sanity(t *testing.T) { n := "atlas_types_sanity" ddl := ` create table atlas_types_sanity ( "tBit" bit(10) default B'100' null, "tBitVar" bit varying(10) default B'100' null, "tBoolean" boolean default false not null, "tBool" bool default false not null, "tBytea" bytea default E'\\001' not null, "tCharacter" character(10) default 'atlas' null, "tChar" char(10) default 'atlas' null, "tCharVar" character varying(10) default 'atlas' null, "tVarChar" varchar(10) default 'atlas' null, "tText" text default 'atlas' null, "tSmallInt" smallint default '10' null, "tInteger" integer default '10' null, "tBigInt" bigint default '10' null, "tInt" int default '10' null, "tInt2" int2 default '10' null, "tInt4" int4 default '10' null, "tInt8" int8 default '10' null, "tInet" inet default '127.0.0.1' null, "tGeometry" geometry default null, "tDate" date default current_date null, "tTime" time default current_time null, "tTimeWTZ" time with time zone default current_time null, "tTimeWOTZ" time without time zone default current_time null, "tTimestamp" timestamp default now() null, "tTimestampTZ" timestamptz default now() null, "tTimestampWTZ" timestamp with time zone default now() null, "tTimestampWOTZ" timestamp without time zone default now() null, "tTimestampPrec" timestamp(4) default now() null, "tDouble" double precision default 0 null, "tReal" real default 0 null, "tFloat8" float8 default 0 null, "tFloat4" float4 default 0 null, "tNumeric" numeric default 0 null, "tDecimal" decimal default 0 null, "tSmallSerial" smallserial , "tSerial" serial , "tBigSerial" bigserial , "tSerial2" serial2 , "tSerial4" serial4 , "tSerial8" serial8 , "tArray" text[10] default '{}' null, "tJSON" json default '{"key":"value"}' null, "tJSONB" jsonb default '{"key":"value"}' null, "tUUID" uuid default 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11' null, "tInterval" interval default '4 hours' null ); ` crdbRun(t, func(t *crdbTest) { t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Schema: realm.Schemas[0], Columns: []*schema.Column{ { Name: "tBit", Type: &schema.ColumnType{Type: &postgres.BitType{T: "bit", Len: 10}, Raw: "bit", Null: true}, Default: &schema.RawExpr{X: "B'100'"}, }, { Name: "tBitVar", Type: &schema.ColumnType{Type: &postgres.BitType{T: "bit varying", Len: 10}, Raw: "bit varying", Null: true}, Default: &schema.RawExpr{X: "B'100'"}, }, { Name: "tBoolean", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Raw: "boolean", Null: false}, Default: &schema.Literal{V: "false"}, }, { Name: "tBool", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Raw: "boolean", Null: false}, Default: &schema.Literal{V: "false"}, }, { Name: "tBytea", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "bytea"}, Raw: "bytea", Null: false}, Default: &schema.RawExpr{X: "'\\x01':::BYTES"}, }, { Name: "tCharacter", Type: &schema.ColumnType{Type: &schema.StringType{T: "character", Size: 10}, Raw: "character", Null: true}, Default: &schema.RawExpr{X: "'atlas':::STRING"}, }, { Name: "tChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "character", Size: 10}, Raw: "character", Null: true}, Default: &schema.RawExpr{X: "'atlas':::STRING"}, }, { Name: "tCharVar", Type: &schema.ColumnType{Type: &schema.StringType{T: "character varying", Size: 10}, Raw: "character varying", Null: true}, Default: &schema.RawExpr{X: "'atlas':::STRING"}, }, { Name: "tVarChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "character varying", Size: 10}, Raw: "character varying", Null: true}, Default: &schema.RawExpr{X: "'atlas':::STRING"}, }, { Name: "tText", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Raw: "text", Null: true}, Default: &schema.RawExpr{X: "'atlas':::STRING"}, }, { Name: "tSmallInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint"}, Raw: "smallint", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tInteger", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Raw: "bigint", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tBigInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Raw: "bigint", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Raw: "bigint", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tInt2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint"}, Raw: "smallint", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tInt4", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tInt8", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Raw: "bigint", Null: true}, Default: &schema.RawExpr{X: "10:::INT8"}, }, { Name: "tInet", Type: &schema.ColumnType{Type: &postgres.NetworkType{T: "inet"}, Raw: "inet", Null: true}, Default: &schema.RawExpr{X: "'127.0.0.1':::INET"}, }, { Name: "tGeometry", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "geometry"}, Raw: "geometry", Null: true}, }, { Name: "tDate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "date"}, Raw: "date", Null: true}, Default: &schema.RawExpr{X: "current_date()"}, }, { Name: "tTime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time without time zone", Precision: intp(6)}, Raw: "time without time zone", Null: true}, Default: &schema.RawExpr{X: "current_time():::TIME"}, }, { Name: "tTimeWTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time with time zone", Precision: intp(6)}, Raw: "time with time zone", Null: true}, Default: &schema.RawExpr{X: "current_time():::TIMETZ"}, }, { Name: "tTimeWOTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time without time zone", Precision: intp(6)}, Raw: "time without time zone", Null: true}, Default: &schema.RawExpr{X: "current_time():::TIME"}, }, { Name: "tTimestamp", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp without time zone", Precision: intp(6)}, Raw: "timestamp without time zone", Null: true}, Default: &schema.RawExpr{X: "now():::TIMESTAMP"}, }, { Name: "tTimestampTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp with time zone", Precision: intp(6)}, Raw: "timestamp with time zone", Null: true}, Default: &schema.RawExpr{X: "now():::TIMESTAMPTZ"}, }, { Name: "tTimestampWTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp with time zone", Precision: intp(6)}, Raw: "timestamp with time zone", Null: true}, Default: &schema.RawExpr{X: "now():::TIMESTAMPTZ"}, }, { Name: "tTimestampWOTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp without time zone", Precision: intp(6)}, Raw: "timestamp without time zone", Null: true}, Default: &schema.RawExpr{X: "now():::TIMESTAMP"}, }, { Name: "tTimestampPrec", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp without time zone", Precision: intp(4)}, Raw: "timestamp without time zone", Null: true}, Default: &schema.RawExpr{X: "now():::TIMESTAMP"}, }, { Name: "tDouble", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 53}, Raw: "double precision", Null: true}, Default: &schema.RawExpr{X: "0.0:::FLOAT8"}, }, { Name: "tReal", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 24}, Raw: "real", Null: true}, Default: &schema.RawExpr{X: "0.0:::FLOAT8"}, }, { Name: "tFloat8", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 53}, Raw: "double precision", Null: true}, Default: &schema.RawExpr{X: "0.0:::FLOAT8"}, }, { Name: "tFloat4", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 24}, Raw: "real", Null: true}, Default: &schema.RawExpr{X: "0.0:::FLOAT8"}, }, { Name: "tNumeric", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 0}, Raw: "numeric", Null: true}, Default: &schema.RawExpr{X: "0:::DECIMAL"}, }, { Name: "tDecimal", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 0}, Raw: "numeric", Null: true}, Default: &schema.RawExpr{X: "0:::DECIMAL"}, }, { Name: "tSmallSerial", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "unique_rowid()", }, }, { Name: "tSerial", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "unique_rowid()", }, }, { Name: "tBigSerial", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "unique_rowid()", }, }, { Name: "tSerial2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "unique_rowid()", }, }, { Name: "tSerial4", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "unique_rowid()", }, }, { Name: "tSerial8", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "unique_rowid()", }, }, { Name: "tArray", Type: &schema.ColumnType{Type: &postgres.ArrayType{Type: &schema.StringType{T: "text"}, T: "text[]"}, Raw: "ARRAY", Null: true}, Default: &schema.RawExpr{ X: "ARRAY[]:::STRING[]", }, }, { Name: "tJSON", Type: &schema.ColumnType{Type: &schema.JSONType{T: "jsonb"}, Raw: "jsonb", Null: true}, Default: &schema.RawExpr{ X: "'{\"key\": \"value\"}':::JSONB", }, }, { Name: "tJSONB", Type: &schema.ColumnType{Type: &schema.JSONType{T: "jsonb"}, Raw: "jsonb", Null: true}, Default: &schema.RawExpr{ X: "'{\"key\": \"value\"}':::JSONB", }, }, { Name: "tUUID", Type: &schema.ColumnType{Type: &postgres.UUIDType{T: "uuid"}, Raw: "uuid", Null: true}, Default: &schema.RawExpr{ X: "'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11':::UUID", }, }, { Name: "tInterval", Type: &schema.ColumnType{Type: &postgres.IntervalType{T: "interval", Precision: intp(6)}, Raw: "interval", Null: true}, Default: &schema.RawExpr{ X: "'04:00:00':::INTERVAL", }, }, }, } for i, c := range expected.Columns { require.EqualValues(t, ts.Columns[i], c, c.Name) } }) t.Run("ImplicitIndexes", func(t *testing.T) { crdbRun(t, func(t *crdbTest) { testImplicitIndexes(t, t.db) }) }) } func (t *crdbTest) url(_ string) string { return fmt.Sprintf("postgres://root:pass@localhost:%d/defaultdb?sslmode=disable", t.port) } func (t *crdbTest) driver() migrate.Driver { return t.drv } func (t *crdbTest) revisionsStorage() migrate.RevisionReadWriter { return t.rrw } func (t *crdbTest) applyHcl(spec string) { realm := t.loadRealm() var desired schema.Schema err := postgres.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) existing := realm.Schemas[0] diff, err := t.drv.SchemaDiff(existing, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } func (t *crdbTest) valueByVersion(values map[string]string, defaults string) string { if v, ok := values[t.version]; ok { return v } return defaults } func (t *crdbTest) loadRealm() *schema.Realm { r, err := t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{ Schemas: []string{"public"}, }) require.NoError(t, err) return r } func (t *crdbTest) loadUsers() *schema.Table { return t.loadTable("users") } func (t *crdbTest) loadPosts() *schema.Table { return t.loadTable("posts") } func (t *crdbTest) loadTable(name string) *schema.Table { realm := t.loadRealm() require.Len(t, realm.Schemas, 1) table, ok := realm.Schemas[0].Table(name) require.True(t, ok) return table } func (t *crdbTest) users() *schema.Table { usersT := &schema.Table{ Name: "users", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&postgres.Identity{}}, }, { Name: "x", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, }, }, } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} return usersT } func (t *crdbTest) posts() *schema.Table { usersT := t.users() postsT := &schema.Table{ Name: "posts", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&postgres.Identity{}}, }, { Name: "author_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "ctime", Type: &schema.ColumnType{Raw: "timestamp", Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP", }, }, }, Attrs: []schema.Attr{ &schema.Comment{Text: "posts comment"}, }, } postsT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: postsT.Columns[0]}}} postsT.Indexes = []*schema.Index{ {Name: "author_id", Parts: []*schema.IndexPart{{C: postsT.Columns[1]}}}, {Name: "id_author_id_unique", Unique: true, Parts: []*schema.IndexPart{{C: postsT.Columns[1]}, {C: postsT.Columns[0]}}}, } postsT.ForeignKeys = []*schema.ForeignKey{ {Symbol: "author_id", Table: postsT, Columns: postsT.Columns[1:2], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction}, } return postsT } func (t *crdbTest) realm() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "public", }, }, Attrs: []schema.Attr{ &schema.Collation{V: "C.UTF-8"}, &postgres.CType{V: "C.UTF-8"}, }, } r.Schemas[0].Realm = r return r } func (t *crdbTest) diff(t1, t2 *schema.Table) []schema.Change { changes, err := t.drv.TableDiff(t1, t2) require.NoError(t, err) return changes } func (t *crdbTest) migrate(changes ...schema.Change) { err := t.drv.ApplyChanges(context.Background(), changes) require.NoError(t, err) } func (t *crdbTest) dropTables(names ...string) { t.Cleanup(func() { _, err := t.db.Exec("DROP TABLE IF EXISTS " + strings.Join(names, ", ")) require.NoError(t.T, err, "drop tables %q", names) }) } func (t *crdbTest) dropSchemas(names ...string) { t.Cleanup(func() { _, err := t.db.Exec("DROP SCHEMA IF EXISTS " + strings.Join(names, ", ") + " CASCADE") require.NoError(t.T, err, "drop schema %q", names) }) } func (t *crdbTest) applyRealmHcl(spec string) { realm := t.loadRealm() var desired schema.Realm err := postgres.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) diff, err := t.drv.RealmDiff(realm, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } atlas-0.7.2/internal/integration/docker-compose.yaml000066400000000000000000000054331431455511600225730ustar00rootroot00000000000000version: "3.9" services: mysql56: platform: linux/amd64 image: mysql:5.6.35 environment: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass healthcheck: test: mysqladmin ping -ppass ports: - "3306:3306" mysql57: platform: linux/amd64 image: mysql:5.7.26 environment: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass healthcheck: test: mysqladmin ping -ppass ports: - "3307:3306" mysql8: platform: linux/amd64 image: mysql:8.0.19 environment: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass healthcheck: test: mysqladmin ping -ppass ports: - "3308:3306" postgres10: platform: linux/amd64 image: postgres:10 environment: POSTGRES_DB: test POSTGRES_PASSWORD: pass healthcheck: test: pg_isready -U postgres ports: - "5430:5432" postgres11: platform: linux/amd64 image: postgres:11 environment: POSTGRES_DB: test POSTGRES_PASSWORD: pass healthcheck: test: pg_isready -U postgres ports: - "5431:5432" postgres12: platform: linux/amd64 image: postgres:12 environment: POSTGRES_DB: test POSTGRES_PASSWORD: pass healthcheck: test: pg_isready -U postgres ports: - "5432:5432" postgres13: platform: linux/amd64 image: postgres:13 environment: POSTGRES_DB: test POSTGRES_PASSWORD: pass healthcheck: test: pg_isready -U postgres ports: - "5433:5432" postgres14: platform: linux/amd64 image: postgres:14 environment: POSTGRES_DB: test POSTGRES_PASSWORD: pass healthcheck: test: pg_isready -U postgres ports: - "5434:5432" mariadb: platform: linux/amd64 image: mariadb environment: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass healthcheck: test: mysqladmin ping -ppass ports: - "4306:3306" mariadb102: platform: linux/amd64 image: mariadb:10.2.32 environment: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass healthcheck: test: mysqladmin ping -ppass ports: - "4307:3306" mariadb103: platform: linux/amd64 image: mariadb:10.3.13 environment: MYSQL_DATABASE: test MYSQL_ROOT_PASSWORD: pass healthcheck: test: mysqladmin ping -ppass ports: - "4308:3306" # Default DB test, No Password tidb5: platform: linux/amd64 image: pingcap/tidb:v5.4.0 ports: - "4309:4000" tidb6: platform: linux/amd64 image: pingcap/tidb:v6.0.0 ports: - "4310:4000" cockroach21.2.11: platform: linux/amd64 image: cockroachdb/cockroach:v21.2.11 ports: - "26257:26257" command: "start-single-node --insecure"atlas-0.7.2/internal/integration/go.mod000066400000000000000000000072051431455511600201020ustar00rootroot00000000000000module ariga.io/atlas/internal/integration go 1.19 replace ariga.io/atlas => ../../ replace ariga.io/atlas/cmd/atlas => ../../cmd/atlas require ( ariga.io/atlas v0.7.1-0.20220925103247-db83ea0cd178 entgo.io/ent v0.11.1 github.com/go-sql-driver/mysql v1.6.0 github.com/hashicorp/hcl/v2 v2.13.0 github.com/lib/pq v1.10.6 github.com/mattn/go-sqlite3 v1.14.14 github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e github.com/rogpeppe/go-internal v1.9.0 github.com/stretchr/testify v1.8.0 github.com/zclconf/go-cty v1.10.0 // indirect ) require ( ariga.io/atlas/cmd/atlas v0.7.1-0.20220925103247-db83ea0cd178 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220816024939-bc8df83d7b9d // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/auxten/postgresql-parser v1.0.1 // indirect github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/cockroachdb/apd v1.1.1-0.20181017181144-bced77f817b4 // indirect github.com/cockroachdb/errors v1.8.2 // indirect github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f // indirect github.com/cockroachdb/redact v1.0.8 // indirect github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2 // indirect github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dustin/go-humanize v1.0.0 // indirect github.com/fatih/color v1.13.0 // indirect github.com/getsentry/raven-go v0.2.0 // indirect github.com/go-openapi/inflect v0.19.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/protobuf v1.4.3 // indirect github.com/google/go-cmp v0.5.6 // indirect github.com/google/uuid v1.3.0 // indirect github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/konsorten/go-windows-terminal-sequences v1.0.3 // indirect github.com/kr/pretty v0.2.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/manifoldco/promptui v0.9.0 // indirect github.com/mattn/go-colorable v0.1.9 // indirect github.com/mattn/go-isatty v0.0.14 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63 // indirect github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7 // indirect github.com/pingcap/tidb/parser v0.0.0-20220817134052-9709249e523a // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect github.com/sergi/go-diff v1.2.0 // indirect github.com/sirupsen/logrus v1.6.0 // indirect github.com/spf13/cobra v1.5.0 // indirect github.com/spf13/pflag v1.0.5 // indirect go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect go.uber.org/zap v1.18.1 // indirect golang.org/x/exp v0.0.0-20220428152302-39d4317da171 // indirect golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 // indirect golang.org/x/text v0.3.7 // indirect google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f // indirect google.golang.org/grpc v1.33.1 // indirect google.golang.org/protobuf v1.25.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/errgo.v2 v2.1.0 // indirect gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) atlas-0.7.2/internal/integration/go.sum000066400000000000000000001271141431455511600201310ustar00rootroot00000000000000cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= entgo.io/ent v0.11.1 h1:im67R+2W3Nee2bNS2YnoYz8oAF0Qz4AOlIvKRIAEISY= entgo.io/ent v0.11.1/go.mod h1:X5b1YfMayrRTgKGO//8IqpL7XJx0uqdeReEkxNpXROA= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/CloudyKit/fastprinter v0.0.0-20170127035650-74b38d55f37a/go.mod h1:EFZQ978U7x8IRnstaskI3IysnWY5Ao3QgZUKOXlsAdw= github.com/CloudyKit/jet v2.1.3-0.20180809161101-62edd43e4f88+incompatible/go.mod h1:HPYO+50pSWkPoj9Q/eq0aRGByCL6ScRlUmiEX5Zgm+w= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= github.com/Joker/jade v1.0.1-0.20190614124447-d475f43051e7/go.mod h1:6E6s8o2AE4KhCrqr6GRJjdC/gNfTdxkIXvuGZZda2VM= github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220816024939-bc8df83d7b9d h1:0xIrH2lJbraclvJT3pvTf3u2oCAL60cAqiv4qRpz4EI= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220816024939-bc8df83d7b9d/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/auxten/postgresql-parser v1.0.1 h1:x+qiEHAe2cH55Kly64dWh4tGvUKEQwMmJgma7a1kbj4= github.com/auxten/postgresql-parser v1.0.1/go.mod h1:Nf27dtv8EU1C+xNkoLD3zEwfgJfDDVi8Zl86gznxPvI= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054 h1:uH66TXeswKn5PW5zdZ39xEwfS9an067BirqA+P4QaLI= github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cockroachdb/apd v1.1.1-0.20181017181144-bced77f817b4 h1:XWEdfNxDkZI3DXXlpo0hZJ1xdaH/f3CKuZpk93pS/Y0= github.com/cockroachdb/apd v1.1.1-0.20181017181144-bced77f817b4/go.mod h1:mdGz2CnkJrefFtlLevmE7JpL2zB9tKofya/6w7wWzNA= github.com/cockroachdb/datadriven v1.0.0/go.mod h1:5Ib8Meh+jk1RlHIXej6Pzevx/NLlNvQB9pmSBZErGA4= github.com/cockroachdb/errors v1.6.1/go.mod h1:tm6FTP5G81vwJ5lC0SizQo374JNCOPrHyXGitRJoDqM= github.com/cockroachdb/errors v1.8.2 h1:rnnWK9Nn5kEMOGz9531HuDx/FOleL4NVH20VsDexVC8= github.com/cockroachdb/errors v1.8.2/go.mod h1:qGwQn6JmZ+oMjuLwjWzUNqblqk0xl4CVV3SQbGwK7Ac= github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f h1:o/kfcElHqOiXqcou5a3rIlMc7oJbMQkeLk0VQJ7zgqY= github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f/go.mod h1:i/u985jwjWRlyHXQbwatDASoW0RMlZ/3i9yJHE2xLkI= github.com/cockroachdb/redact v1.0.8 h1:8QG/764wK+vmEYoOlfobpe12EQcS81ukx/a4hdVMxNw= github.com/cockroachdb/redact v1.0.8/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2 h1:IKgmqgMQlVJIZj19CdocBeSfSaiCbEBZGKODaixqtHM= github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2/go.mod h1:8BT+cPK6xvFOcRlk0R8eg+OTkcqI6baNH4xAkpiYVvQ= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 h1:iwZdTE0PVqJCos1vaoKsclOGD3ADKpshg3SRtYBbwso= github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= github.com/getsentry/raven-go v0.2.0 h1:no+xWJRb5ZI7eE8TWgIq1jLulQiIoLG0IfYxv5JYMGs= github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= github.com/gogo/googleapis v0.0.0-20180223154316-0cd9801be74a/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY1WM= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hydrogen18/memlistener v0.0.0-20141126152155-54553eb933fb/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0= github.com/iris-contrib/i18n v0.0.0-20171121225848-987a633949d0/go.mod h1:pMCz62A0xJL6I+umB2YTlFRwWXaDFA0jy+5HzGiJjqI= github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/kataras/golog v0.0.9/go.mod h1:12HJgwBIZFNGL0EJnMRhmvGA0PQGx8VFwrZtM4CqbAk= github.com/kataras/iris/v12 v12.0.1/go.mod h1:udK4vLQKkdDqMGJJVd/msuMtN6hpYJhg/lSzuxjhO+U= github.com/kataras/neffos v0.0.10/go.mod h1:ZYmJC07hQPW67eKuzlfY7SO3bC0mw83A3j6im82hfqw= github.com/kataras/pio v0.0.0-20190103105442-ea782b38602d/go.mod h1:NV88laa9UiiDuX9AhMbDPkGYSPugBOV6yTZB1l2K9Z0= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-sqlite3 v1.14.14 h1:qZgc/Rwetq+MtyE18WhzjokPD93dNqLGNT3QJuLvBGw= github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/mediocregopher/mediocre-go-lib v0.0.0-20181029021733-cb65787f37ed/go.mod h1:dSsfyI2zABAdhcbvkXqgxOxrCsbYeHCPgrZkku60dSg= github.com/mediocregopher/radix/v3 v3.3.0/go.mod h1:EmfVyvspXz1uZEyPBMyGK+kjWiKQGvsUt6O3Pj+LDCQ= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= github.com/nats-io/nats.go v1.8.1/go.mod h1:BrFz9vVn0fU3AcH9Vn4Kd7W0NpJ651tD5omQ3M8LwxM= github.com/nats-io/nkeys v0.0.2/go.mod h1:dab7URMsZm6Z/jp9Z5UGa87Uutgc2mVpXLC4B7TDb/4= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.13.0/go.mod h1:+REjRxOmWfHCjfv9TTWB1jD1Frx4XydAD3zm1lskyM0= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= github.com/pingcap/errors v0.11.0/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63 h1:+FZIDR/D97YOPik4N4lPDaUcLDF/EQPogxtlHB2ZZRM= github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63/go.mod h1:X2r9ueLEUZgtx2cIogM0v4Zj5uvvzhuuiu7Pn8HzMPg= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7 h1:k2BbABz9+TNpYRwsCCFS8pEEnFVOdbgEjL/kTlLuzZQ= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7/go.mod h1:8AanEdAHATuRurdGxZXBz0At+9avep+ub7U1AGYLIMM= github.com/pingcap/tidb/parser v0.0.0-20220817134052-9709249e523a h1:KhEhzJLyVSFT13yk/M2BQ7eE6Ofs7jBnALjc0ICZbsQ= github.com/pingcap/tidb/parser v0.0.0-20220817134052-9709249e523a/go.mod h1:wjvp+T3/T9XYt0nKqGX3Kc1AKuyUcfno6LTc6b2A6ew= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.8.2-0.20220112175052-f3cb5c2c6412 h1:BVgfta5l+NeAx6l597tEbF2TkiVi+Mw+2vxJjwSfyRU= github.com/rogpeppe/go-internal v1.8.2-0.20220112175052-f3cb5c2c6412/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU= github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0= github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.18.1 h1:CSUJ2mjFszzEWt4CdKISEuChVIXGBn3lAPwkRGyVrc4= go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20220428152302-39d4317da171 h1:TfdoLivD44QwvssI9Sv1xwa5DcL5XQr4au4sZ2F2NV4= golang.org/x/exp v0.0.0-20220428152302-39d4317da171/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 h1:LQmS1nU0twXLA96Kt7U9qtHJEbBk3z6Q0V4UXjZkpr4= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 h1:v6hYoSR9T5oet+pMXwUWkbiVqx/63mlHjefrHmxwfeY= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180518175338-11a468237815/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f h1:Yv4xsIx7HZOoyUGSJ2ksDyWE2qIBXROsZKt2ny3hCGM= google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.33.1 h1:DGeFlSan2f+WEtCERJ4J9GJWk15TxUi8QGagfI87Xyc= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0 h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= atlas-0.7.2/internal/integration/hclsqlspec/000077500000000000000000000000001431455511600211315ustar00rootroot00000000000000atlas-0.7.2/internal/integration/hclsqlspec/hclsqlspec_test.go000066400000000000000000000236501431455511600246660ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package hclsqlspec import ( "testing" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/mysql" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlite" "ariga.io/atlas/sql/sqlspec" "github.com/stretchr/testify/require" ) var dialects = []struct { name string schemahcl.Marshaler Eval func(b []byte, v any, inp map[string]string) error }{ { name: "mysql", Marshaler: mysql.MarshalHCL, Eval: mysql.EvalHCLBytes, }, { name: "postgres", Marshaler: postgres.MarshalHCL, Eval: postgres.EvalHCLBytes, }, { name: "sqlite", Marshaler: sqlite.MarshalHCL, Eval: sqlite.EvalHCLBytes, }, } func TestHCL_SQL(t *testing.T) { file, err := decode(` schema "hi" { } table "users" { schema = schema.hi column "id" { type = int null = false default = 123 } column "age" { type = int null = false default = 10 } column "active" { type = boolean default = true } column "account_active" { type = boolean default = true } primary_key { columns = [table.users.column.id, table.users.column.age] } index "age" { unique = true columns = [table.users.column.age] } index "active" { unique = false columns = [table.users.column.active] } foreign_key "fk" { columns = [table.users.column.account_active] ref_columns = [table.accounts.column.active] on_delete = "SET NULL" } } table "accounts" { schema = schema.hi column "id" { type = int null = false default = 123 } column "age" { type = int null = false default = 10 } column "active" { type = boolean default = true } column "user_active" { type = boolean default = true } primary_key { columns = [table.accounts.column.id] } index "age" { unique = true columns = [table.accounts.column.age] } index "active" { unique = false columns = [table.accounts.column.active] } foreign_key "fk" { columns = [table.accounts.column.user_active] ref_columns = [table.users.column.active] on_delete = "SET NULL" } } `) require.NoError(t, err) expected := &db{ Schemas: []*sqlspec.Schema{ {Name: "hi"}, }, Tables: []*sqlspec.Table{ { Name: "users", Schema: &schemahcl.Ref{V: "$schema.hi"}, Columns: []*sqlspec.Column{ { Name: "id", Type: &schemahcl.Type{T: "int"}, Null: false, Default: &schemahcl.LiteralValue{V: "123"}, }, { Name: "age", Type: &schemahcl.Type{T: "int"}, Null: false, Default: &schemahcl.LiteralValue{V: "10"}, }, { Name: "active", Type: &schemahcl.Type{T: "boolean"}, Null: false, Default: &schemahcl.LiteralValue{V: "true"}, }, { Name: "account_active", Type: &schemahcl.Type{T: "boolean"}, Null: false, Default: &schemahcl.LiteralValue{V: "true"}, }, }, PrimaryKey: &sqlspec.PrimaryKey{ Columns: []*schemahcl.Ref{ { V: "$table.users.$column.id", }, { V: "$table.users.$column.age", }, }, }, Indexes: []*sqlspec.Index{ { Name: "age", Unique: true, Columns: []*schemahcl.Ref{ { V: "$table.users.$column.age", }, }, }, { Name: "active", Unique: false, Columns: []*schemahcl.Ref{ { V: "$table.users.$column.active", }, }, }, }, ForeignKeys: []*sqlspec.ForeignKey{ { Symbol: "fk", Columns: []*schemahcl.Ref{ { V: "$table.users.$column.account_active", }, }, RefColumns: []*schemahcl.Ref{ { V: "$table.accounts.$column.active", }, }, OnDelete: &schemahcl.Ref{V: string(schema.SetNull)}, }, }, }, { Name: "accounts", Schema: &schemahcl.Ref{V: "$schema.hi"}, Columns: []*sqlspec.Column{ { Name: "id", Type: &schemahcl.Type{T: "int"}, Null: false, Default: &schemahcl.LiteralValue{V: "123"}, }, { Name: "age", Type: &schemahcl.Type{T: "int"}, Null: false, Default: &schemahcl.LiteralValue{V: "10"}, }, { Name: "active", Type: &schemahcl.Type{T: "boolean"}, Null: false, Default: &schemahcl.LiteralValue{V: "true"}, }, { Name: "user_active", Type: &schemahcl.Type{T: "boolean"}, Null: false, Default: &schemahcl.LiteralValue{V: "true"}, }, }, PrimaryKey: &sqlspec.PrimaryKey{ Columns: []*schemahcl.Ref{ { V: "$table.accounts.$column.id", }, }, }, Indexes: []*sqlspec.Index{ { Name: "age", Unique: true, Columns: []*schemahcl.Ref{ { V: "$table.accounts.$column.age", }, }, }, { Name: "active", Unique: false, Columns: []*schemahcl.Ref{ { V: "$table.accounts.$column.active", }, }, }, }, ForeignKeys: []*sqlspec.ForeignKey{ { Symbol: "fk", Columns: []*schemahcl.Ref{ { V: "$table.accounts.$column.user_active", }, }, RefColumns: []*schemahcl.Ref{ { V: "$table.users.$column.active", }, }, OnDelete: &schemahcl.Ref{V: string(schema.SetNull)}, }, }, }, }, } require.EqualValues(t, expected, file) } func TestWithRemain(t *testing.T) { file, err := decode(` schema "hi" { x = 1 }`) require.NoError(t, err) require.EqualValues(t, &db{ Schemas: []*sqlspec.Schema{ { Name: "hi", DefaultExtension: schemahcl.DefaultExtension{ Extra: schemahcl.Resource{ Attrs: []*schemahcl.Attr{ {K: "x", V: &schemahcl.LiteralValue{V: "1"}}, }, }, }, }, }, }, file) } func TestMultiTable(t *testing.T) { _, err := decode(` schema "hi" { } table "users" { schema = schema.hi column "id" { type = int unsigned = true null = false default = 123 } } table "accounts" { schema = schema.hi column "id" { type = varchar(255) } index "name" { unique = true } } `) require.NoError(t, err) } var hcl = schemahcl.New(schemahcl.WithTypes(postgres.TypeRegistry.Specs())) func TestMarshalTopLevel(t *testing.T) { c := &sqlspec.Schema{ Name: "schema", } h, err := hcl.MarshalSpec(c) require.NoError(t, err) require.EqualValues(t, `schema "schema" { } `, string(h)) } func TestRealm(t *testing.T) { f := `schema "account_a" { } table "t1" { schema = schema.account_a } schema "account_b" { } table "t2" { schema = schema.account_b } ` for _, tt := range dialects { t.Run(tt.name, func(t *testing.T) { var r schema.Realm err := tt.Eval([]byte(f), &r, nil) require.NoError(t, err) exp := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "account_a", Realm: &r, Tables: []*schema.Table{ {Name: "t1"}, }, }, { Name: "account_b", Realm: &r, Tables: []*schema.Table{ {Name: "t2"}, }, }, }, } exp.Schemas[0].Tables[0].Schema = exp.Schemas[0] exp.Schemas[1].Tables[0].Schema = exp.Schemas[1] require.EqualValues(t, exp, &r) hcl, err := tt.MarshalSpec(&r) require.NoError(t, err) var after schema.Realm err = tt.Eval(hcl, &after, nil) require.NoError(t, err) require.EqualValues(t, exp, &after) }) } } func TestUnsignedImmutability(t *testing.T) { f := `table "users" { schema = schema.test column "id" { type = bigint unsigned = true } column "shouldnt" { type = bigint } } schema "test" { }` var s schema.Schema err := mysql.EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) tbl := s.Tables[0] require.EqualValues(t, &schema.IntegerType{T: "bigint", Unsigned: true}, tbl.Columns[0].Type.Type) require.EqualValues(t, &schema.IntegerType{T: "bigint", Unsigned: false}, tbl.Columns[1].Type.Type) } func TestTablesWithQualifiers(t *testing.T) { h := ` schema "a" {} schema "b" {} table "a" "users" { schema = schema.a column "id" { type = int } column "friend_id" { type = int } foreign_key "friend_b" { columns = [column.friend_id] ref_columns = [table.b.users.column.id] } } table "b" "users" { schema = schema.b column "id" { type = int } column "friend_id" { type = int } foreign_key "friend_a" { columns = [column.friend_id] ref_columns = [table.a.users.column.id] } } ` var r schema.Realm err := mysql.EvalHCLBytes([]byte(h), &r, nil) require.NoError(t, err) require.EqualValues(t, r.Schemas[0].Tables[0].Columns[0], r.Schemas[1].Tables[0].ForeignKeys[0].RefColumns[0]) require.EqualValues(t, "b", r.Schemas[0].Tables[0].ForeignKeys[0].RefTable.Schema.Name) } func TestQualifyMarshal(t *testing.T) { for _, tt := range dialects { t.Run(tt.name, func(t *testing.T) { r := schema.NewRealm( schema.New("a"). AddTables( schema.NewTable("users"), schema.NewTable("tbl_a"), ), schema.New("b"). AddTables( schema.NewTable("users"), schema.NewTable("tbl_b"), ), schema.New("c"). AddTables( schema.NewTable("users"), schema.NewTable("tbl_c"), ), ) h, err := tt.Marshaler.MarshalSpec(r) require.NoError(t, err) expected := `table "a" "users" { schema = schema.a } table "tbl_a" { schema = schema.a } table "b" "users" { schema = schema.b } table "tbl_b" { schema = schema.b } table "c" "users" { schema = schema.c } table "tbl_c" { schema = schema.c } schema "a" { } schema "b" { } schema "c" { } ` require.EqualValues(t, expected, string(h)) }) } } func decode(f string) (*db, error) { d := &db{} if err := hcl.EvalBytes([]byte(f), d, nil); err != nil { return nil, err } return d, nil } type db struct { Schemas []*sqlspec.Schema `spec:"schema"` Tables []*sqlspec.Table `spec:"table"` } atlas-0.7.2/internal/integration/integration_test.go000066400000000000000000000474401431455511600227120ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "bytes" "context" "database/sql" "flag" "fmt" "io" "os" "os/exec" "path/filepath" "strings" "sync" "testing" "text/template" "time" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" entsql "entgo.io/ent/dialect/sql" entschema "entgo.io/ent/dialect/sql/schema" "entgo.io/ent/entc/integration/ent" "github.com/hashicorp/hcl/v2/hclparse" "github.com/stretchr/testify/require" ) var ( dbs []io.Closer flagVersion string ) func TestMain(m *testing.M) { flag.StringVar(&flagVersion, "version", "", "[mysql56, postgres10, tidb5, ...] what version to test") flag.Parse() code := m.Run() for _, db := range dbs { db.Close() } os.Exit(code) } // T holds the elements common between dialect tests. type T interface { testing.TB url(string) string driver() migrate.Driver revisionsStorage() migrate.RevisionReadWriter realm() *schema.Realm loadRealm() *schema.Realm users() *schema.Table loadUsers() *schema.Table posts() *schema.Table loadPosts() *schema.Table loadTable(string) *schema.Table dropTables(...string) dropSchemas(...string) migrate(...schema.Change) diff(*schema.Table, *schema.Table) []schema.Change applyHcl(spec string) applyRealmHcl(spec string) } func testAddDrop(t T) { usersT := t.users() postsT := t.posts() petsT := &schema.Table{ Name: "pets", Schema: usersT.Schema, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "owner_id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } petsT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: postsT.Columns[0]}}} petsT.ForeignKeys = []*schema.ForeignKey{ {Symbol: "owner_id", Table: petsT, Columns: petsT.Columns[1:], RefTable: usersT, RefColumns: usersT.Columns[:1]}, } t.dropTables(postsT.Name, usersT.Name, petsT.Name) t.migrate(&schema.AddTable{T: petsT}, &schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, usersT, petsT, postsT) t.migrate(&schema.DropTable{T: usersT}, &schema.DropTable{T: postsT}, &schema.DropTable{T: petsT}) // Ensure the realm is empty. require.EqualValues(t, t.realm(), t.loadRealm()) } func testRelation(t T) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) t.migrate( &schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}, ) ensureNoChange(t, postsT, usersT) } func testEntIntegration(t T, dialect string, db *sql.DB, opts ...entschema.MigrateOption) { ctx := context.Background() drv := entsql.OpenDB(dialect, db) client := ent.NewClient(ent.Driver(drv)) require.NoError(t, client.Schema.Create(ctx, opts...)) sanity(client) realm := t.loadRealm() ensureNoChange(t, realm.Schemas[0].Tables...) // Drop tables. changes := make([]schema.Change, len(realm.Schemas[0].Tables)) for i, t := range realm.Schemas[0].Tables { changes[i] = &schema.DropTable{T: t} } t.migrate(changes...) // Add tables. for i, t := range realm.Schemas[0].Tables { changes[i] = &schema.AddTable{T: t} } t.migrate(changes...) ensureNoChange(t, realm.Schemas[0].Tables...) sanity(client) // Drop tables. for i, t := range realm.Schemas[0].Tables { changes[i] = &schema.DropTable{T: t} } t.migrate(changes...) } func testImplicitIndexes(t T, db *sql.DB) { const ( name = "implicit_indexes" ddl = "create table implicit_indexes(c1 int unique, c2 int unique, unique(c1,c2), unique(c2,c1))" ) t.dropTables(name) _, err := db.Exec(ddl) require.NoError(t, err) current := t.loadTable(name) c1, c2 := schema.NewNullIntColumn("c1", "int"), schema.NewNullIntColumn("c2", "int") desired := schema.NewTable(name). AddColumns(c1, c2). AddIndexes( schema.NewUniqueIndex("").AddColumns(c1), schema.NewUniqueIndex("").AddColumns(c2), schema.NewUniqueIndex("").AddColumns(c1, c2), schema.NewUniqueIndex("").AddColumns(c2, c1), ) changes := t.diff(current, desired) require.Empty(t, changes) desired.AddIndexes( schema.NewIndex("c1_key").AddColumns(c1), schema.NewIndex("c2_key").AddColumns(c2), ) changes = t.diff(current, desired) require.NotEmpty(t, changes) t.migrate(&schema.ModifyTable{T: desired, Changes: changes}) ensureNoChange(t, desired) } func testHCLIntegration(t T, full string, empty string) { t.applyHcl(full) users := t.loadUsers() posts := t.loadPosts() t.dropTables(users.Name, posts.Name) column, ok := users.Column("id") require.True(t, ok, "expected id column") require.Equal(t, "users", users.Name) column, ok = posts.Column("author_id") require.Equal(t, "author_id", column.Name) t.applyHcl(empty) require.Empty(t, t.realm().Schemas[0].Tables) } func testCLIMigrateApplyBC(t T, dialect string) { require.NoError(t, initCLI()) ctx := context.Background() t.dropSchemas("bc_test", "bc_test_2", "atlas_schema_revisions") t.dropTables("bc_tbl", "atlas_schema_revisions") t.migrate(&schema.AddSchema{S: schema.New("bc_test")}) // Connection to schema with flag will respect flag (also mimics "old" behavior). out, err := exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--allow-dirty", // since database does contain more than one schema "--dir", "file://testdata/migrations/"+dialect, "--url", t.url("bc_test"), "--revisions-schema", "atlas_schema_revisions", ).CombinedOutput() require.NoError(t, err, string(out)) s, err := t.driver().InspectSchema(ctx, "atlas_schema_revisions", nil) require.NoError(t, err) _, ok := s.Table("atlas_schema_revisions") require.True(t, ok) // Connection to realm will see the existing schema and will not attempt to migrate. out, err = exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--dir", "file://testdata/migrations/"+dialect, "--url", t.url(""), ).CombinedOutput() require.NoError(t, err, string(out)) require.Equal(t, "No migration files to execute\n", string(out)) // Connection to schema without flag will error. out, err = exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--dir", "file://testdata/migrations/"+dialect, "--url", t.url("bc_test"), ).CombinedOutput() require.Error(t, err) require.Contains(t, string(out), "We couldn't find a revision table in the connected schema but found one in") // Providing the flag and we are good. out, err = exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--dir", "file://testdata/migrations/"+dialect, "--url", t.url("bc_test"), "--revisions-schema", "atlas_schema_revisions", ).CombinedOutput() require.NoError(t, err) require.Equal(t, "No migration files to execute\n", string(out)) // Providing the flag to the schema instead will work as well. t.migrate( &schema.DropSchema{S: schema.New("bc_test")}, &schema.AddSchema{S: schema.New("bc_test")}, ) out, err = exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--dir", "file://testdata/migrations/"+dialect, "--url", t.url("bc_test"), "--revisions-schema", "bc_test", ).CombinedOutput() require.NoError(t, err, string(out)) require.NotContains(t, string(out), "No migration files to execute\n") // Consecutive attempts do not need the flag anymore. out, err = exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--dir", "file://testdata/migrations/"+dialect, "--url", t.url("bc_test"), ).CombinedOutput() require.NoError(t, err) require.Equal(t, "No migration files to execute\n", string(out)) // Last, if bound to schema and no "old" behavior extra schema does // exist, the revision table will be saved in the connected one. t.migrate( &schema.DropSchema{S: schema.New("atlas_schema_revisions")}, &schema.DropSchema{S: schema.New("bc_test")}, &schema.AddSchema{S: schema.New("bc_test_2")}, ) out, err = exec.Command( "go", "run", "ariga.io/atlas/cmd/atlas", "migrate", "apply", "--allow-dirty", // since database does contain more than one schema "--dir", "file://testdata/migrations/"+dialect, "--url", t.url("bc_test_2"), ).CombinedOutput() require.NoError(t, err, string(out)) s, err = t.driver().InspectSchema(ctx, "atlas_schema_revisions", nil) require.True(t, schema.IsNotExistError(err)) s, err = t.driver().InspectSchema(ctx, "bc_test_2", nil) require.NoError(t, err) _, ok = s.Table("atlas_schema_revisions") require.True(t, ok) } func testCLISchemaInspect(t T, h string, dsn string, eval schemahcl.Evaluator, args ...string) { require.NoError(t, initCLI()) t.dropTables("users") var expected schema.Schema err := evalBytes([]byte(h), &expected, eval) require.NoError(t, err) t.applyHcl(h) runArgs := []string{ "run", "ariga.io/atlas/cmd/atlas", "schema", "inspect", "-d", dsn, } runArgs = append(runArgs, args...) cmd := exec.Command("go", runArgs...) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout require.NoError(t, cmd.Run(), stderr.String()) var actual schema.Schema err = evalBytes(stdout.Bytes(), &actual, eval) require.NoError(t, err) require.Empty(t, stderr.String()) require.Equal(t, expected, actual) } func testCLISchemaInspectEnv(t T, h string, env string, eval schemahcl.Evaluator) { err := initCLI() require.NoError(t, err) t.dropTables("users") var expected schema.Schema err = evalBytes([]byte(h), &expected, eval) require.NoError(t, err) t.applyHcl(h) cmd := exec.Command("go", "run", "ariga.io/atlas/cmd/atlas", "schema", "inspect", "--env", env, ) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout require.NoError(t, cmd.Run(), stderr.String()) var actual schema.Schema err = evalBytes(stdout.Bytes(), &actual, eval) require.NoError(t, err) require.Empty(t, stderr.String()) require.Equal(t, expected, actual) } // initOnce controls that the cli will only be built once. var initOnce sync.Once func initCLI() error { var err error initOnce.Do(func() { err = exec.Command("go", "run", "-mod=mod", "ariga.io/atlas/cmd/atlas").Run() }) return err } func testCLIMultiSchemaApply(t T, h string, dsn string, schemas []string, eval schemahcl.Evaluator) { err := initCLI() f := filepath.Join(t.TempDir(), "schema.hcl") err = os.WriteFile(f, []byte(h), 0644) require.NoError(t, err) require.NoError(t, err) var expected schema.Realm err = evalBytes([]byte(h), &expected, eval) require.NoError(t, err) cmd := exec.Command("go", "run", "ariga.io/atlas/cmd/atlas", "schema", "apply", "-f", f, "-d", dsn, "-s", strings.Join(schemas, ","), ) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout stdin, err := cmd.StdinPipe() require.NoError(t, err) defer stdin.Close() _, err = io.WriteString(stdin, "\n") require.NoError(t, cmd.Run(), stderr.String()) require.Contains(t, stdout.String(), `-- Add new schema named "test2"`) } func testCLIMultiSchemaInspect(t T, h string, dsn string, schemas []string, eval schemahcl.Evaluator) { err := initCLI() require.NoError(t, err) var expected schema.Realm err = evalBytes([]byte(h), &expected, eval) require.NoError(t, err) t.applyRealmHcl(h) cmd := exec.Command("go", "run", "ariga.io/atlas/cmd/atlas", "schema", "inspect", "-d", dsn, "-s", strings.Join(schemas, ","), ) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout require.NoError(t, cmd.Run(), stderr.String()) var actual schema.Realm err = evalBytes(stdout.Bytes(), &actual, eval) require.NoError(t, err) require.Empty(t, stderr.String()) require.Equal(t, expected, actual) } func testCLISchemaApply(t T, h string, dsn string, args ...string) { err := initCLI() require.NoError(t, err) t.dropTables("users") f := filepath.Join(t.TempDir(), "schema.hcl") err = os.WriteFile(f, []byte(h), 0644) require.NoError(t, err) runArgs := []string{ "run", "ariga.io/atlas/cmd/atlas", "schema", "apply", "-u", dsn, "-f", f, "--dev-url", dsn, } runArgs = append(runArgs, args...) cmd := exec.Command("go", runArgs...) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout stdin, err := cmd.StdinPipe() require.NoError(t, err) defer stdin.Close() _, err = io.WriteString(stdin, "\n") require.NoError(t, err) require.NoError(t, cmd.Run(), stderr.String(), stdout.String()) require.Empty(t, stderr.String(), stderr.String()) require.Contains(t, stdout.String(), "-- Planned") u := t.loadUsers() require.NotNil(t, u) } func testCLISchemaApplyDry(t T, h string, dsn string) { err := initCLI() require.NoError(t, err) t.dropTables("users") f := filepath.Join(t.TempDir(), "schema.hcl") err = os.WriteFile(f, []byte(h), 0644) require.NoError(t, err) cmd := exec.Command("go", "run", "ariga.io/atlas/cmd/atlas", "schema", "apply", "-d", dsn, "-f", f, "--dry-run", ) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout stdin, err := cmd.StdinPipe() require.NoError(t, err) defer stdin.Close() _, err = io.WriteString(stdin, "\n") require.NoError(t, err) require.NoError(t, cmd.Run(), stderr.String(), stdout.String()) require.Empty(t, stderr.String(), stderr.String()) require.Contains(t, stdout.String(), "-- Planned") require.NotContains(t, stdout.String(), "Are you sure?", "dry run should not prompt") realm := t.loadRealm() _, ok := realm.Schemas[0].Table("users") require.False(t, ok, "expected users table not to be created") } func testCLISchemaApplyAutoApprove(t T, h string, dsn string, args ...string) { err := initCLI() require.NoError(t, err) t.dropTables("users") f := filepath.Join(t.TempDir(), "schema.hcl") err = os.WriteFile(f, []byte(h), 0644) require.NoError(t, err) runArgs := []string{ "run", "ariga.io/atlas/cmd/atlas", "schema", "apply", "-d", dsn, "-f", f, "--auto-approve", } runArgs = append(runArgs, args...) cmd := exec.Command("go", runArgs...) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout require.NoError(t, err) require.NoError(t, cmd.Run(), stderr.String(), stdout.String()) require.Empty(t, stderr.String(), stderr.String()) require.Contains(t, stdout.String(), "-- Planned") u := t.loadUsers() require.NotNil(t, u) } func testCLISchemaDiff(t T, dsn string) { err := initCLI() require.NoError(t, err) t.dropTables("users") cmd := exec.Command("go", "run", "ariga.io/atlas/cmd/atlas", "schema", "diff", "--from", dsn, "--to", dsn, ) stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) cmd.Stderr = stderr cmd.Stdout = stdout require.NoError(t, cmd.Run(), stderr.String(), stdout.String()) require.Empty(t, stderr.String(), stderr.String()) require.Contains(t, stdout.String(), "Schemas are synced, no changes to be made.") } func ensureNoChange(t T, tables ...*schema.Table) { realm := t.loadRealm() require.Equal(t, len(realm.Schemas[0].Tables), len(tables)) for i := range tables { tt, ok := realm.Schemas[0].Table(tables[i].Name) require.True(t, ok) changes := t.diff(tt, tables[i]) require.Emptyf(t, changes, "changes should be empty for table %s, but instead was %#v", tt.Name, changes) } } func sanity(c *ent.Client) { ctx := context.Background() u := c.User.Create(). SetName("foo"). SetAge(20). AddPets( c.Pet.Create().SetName("pedro").SaveX(ctx), c.Pet.Create().SetName("xabi").SaveX(ctx), ). AddFiles( c.File.Create().SetName("a").SetSize(10).SaveX(ctx), c.File.Create().SetName("b").SetSize(20).SaveX(ctx), ). SaveX(ctx) c.Group.Create(). SetName("Github"). SetExpire(time.Now()). AddUsers(u). SetInfo(c.GroupInfo.Create().SetDesc("desc").SaveX(ctx)). SaveX(ctx) } func testAdvisoryLock(t *testing.T, l schema.Locker) { t.Run("One", func(t *testing.T) { unlock, err := l.Lock(context.Background(), "migrate", 0) require.NoError(t, err) _, err = l.Lock(context.Background(), "migrate", 0) require.Equal(t, schema.ErrLocked, err) require.NoError(t, unlock()) }) t.Run("Multi", func(t *testing.T) { var unlocks []schema.UnlockFunc for _, name := range []string{"a", "b", "c"} { unlock, err := l.Lock(context.Background(), name, 0) require.NoError(t, err) unlocks = append(unlocks, unlock) } for _, unlock := range unlocks { require.NoError(t, unlock()) } }) } func testExecutor(t T) { usersT, postsT := t.users(), t.posts() petsT := &schema.Table{ Name: "pets", Schema: usersT.Schema, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "owner_id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } petsT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: postsT.Columns[0]}}} petsT.ForeignKeys = []*schema.ForeignKey{ {Symbol: "owner_id", Table: petsT, Columns: petsT.Columns[1:], RefTable: usersT, RefColumns: usersT.Columns[:1]}, } t.dropTables(petsT.Name, postsT.Name, usersT.Name) t.Cleanup(func() { t.revisionsStorage().(*rrw).clean() }) dir, err := migrate.NewLocalDir(t.TempDir()) require.NoError(t, err) f, err := migrate.NewTemplateFormatter( template.Must(template.New("").Parse("{{ .Name }}.sql")), template.Must(template.New("").Parse( `{{ range .Changes }}{{ with .Comment }}-- {{ println . }}{{ end }}{{ printf "%s;\n" .Cmd }}{{ end }}`, )), ) require.NoError(t, err) pl := migrate.NewPlanner(t.driver(), dir, migrate.PlanFormat(f)) require.NoError(t, err) require.NoError(t, pl.WritePlan(plan(t, "1_users", &schema.AddTable{T: usersT}))) require.NoError(t, pl.WritePlan(plan(t, "2_posts", &schema.AddTable{T: postsT}))) require.NoError(t, pl.WritePlan(plan(t, "3_pets", &schema.AddTable{T: petsT}))) ex, err := migrate.NewExecutor(t.driver(), dir, t.revisionsStorage()) require.NoError(t, err) require.NoError(t, ex.ExecuteN(context.Background(), 2)) // usersT and postsT require.Len(t, *t.revisionsStorage().(*rrw), 2) ensureNoChange(t, postsT, usersT) require.NoError(t, ex.ExecuteN(context.Background(), 1)) // petsT require.Len(t, *t.revisionsStorage().(*rrw), 3) ensureNoChange(t, petsT, postsT, usersT) require.ErrorIs(t, ex.ExecuteN(context.Background(), 1), migrate.ErrNoPendingFiles) } func plan(t T, name string, changes ...schema.Change) *migrate.Plan { p, err := t.driver().PlanChanges(context.Background(), name, changes) require.NoError(t, err) return p } type rrw []*migrate.Revision func (r *rrw) Ident() *migrate.TableIdent { return &migrate.TableIdent{} } func (r *rrw) WriteRevision(_ context.Context, rev *migrate.Revision) error { for i, rev2 := range *r { if rev2.Version == rev.Version { (*r)[i] = rev return nil } } *r = append(*r, rev) return nil } func (r *rrw) ReadRevision(_ context.Context, v string) (*migrate.Revision, error) { for _, rev := range *r { if rev.Version == v { return rev, nil } } return nil, migrate.ErrRevisionNotExist } func (r *rrw) DeleteRevision(_ context.Context, v string) error { i := -1 for j, r := range *r { if r.Version == v { i = j break } } if i == -1 { return nil } copy((*r)[i:], (*r)[i+1:]) *r = (*r)[:len(*r)-1] return nil } func (r *rrw) ReadRevisions(context.Context) ([]*migrate.Revision, error) { return *r, nil } func (r *rrw) clean() { *r = []*migrate.Revision{} } var _ migrate.RevisionReadWriter = (*rrw)(nil) func buildCmd(t *testing.T) (string, error) { td := t.TempDir() if b, err := exec.Command("go", "build", "-o", td, "ariga.io/atlas/cmd/atlas").CombinedOutput(); err != nil { return "", fmt.Errorf("%w: %s", err, b) } return filepath.Join(td, "atlas"), nil } func evalBytes(b []byte, v any, ev schemahcl.Evaluator) error { p := hclparse.NewParser() if _, diag := p.ParseHCL(b, ""); diag.HasErrors() { return diag } return ev.Eval(p, v, nil) } atlas-0.7.2/internal/integration/mysql_test.go000066400000000000000000001417071431455511600215350ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "context" "database/sql" "fmt" "log" "strings" "sync" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/mysql" "ariga.io/atlas/sql/schema" "entgo.io/ent/dialect" _ "github.com/go-sql-driver/mysql" "github.com/stretchr/testify/require" ) type myTest struct { *testing.T db *sql.DB drv migrate.Driver rrw migrate.RevisionReadWriter version string port int once sync.Once } var myTests = map[string]*myTest{ "mysql56": {port: 3306}, "mysql57": {port: 3307}, "mysql8": {port: 3308}, "maria107": {port: 4306}, "maria102": {port: 4307}, "maria103": {port: 4308}, } func myRun(t *testing.T, fn func(*myTest)) { for version, tt := range myTests { if flagVersion == "" || flagVersion == version { t.Run(version, func(t *testing.T) { tt.once.Do(func() { var err error tt.version = version tt.rrw = &rrw{} tt.db, err = sql.Open("mysql", fmt.Sprintf("root:pass@tcp(localhost:%d)/test?parseTime=True", tt.port)) if err != nil { log.Fatalln(err) } dbs = append(dbs, tt.db) // close connection after all tests have been run tt.drv, err = mysql.Open(tt.db) if err != nil { log.Fatalln(err) } }) tt := &myTest{T: t, db: tt.db, drv: tt.drv, version: version, port: tt.port, rrw: tt.rrw} fn(tt) }) } } } func TestMySQL_Executor(t *testing.T) { myRun(t, func(t *myTest) { testExecutor(t) }) } func TestMySQL_AddDropTable(t *testing.T) { myRun(t, func(t *myTest) { testAddDrop(t) }) } func TestMySQL_Relation(t *testing.T) { myRun(t, func(t *myTest) { testRelation(t) }) } func TestMySQL_AddIndexedColumns(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "a", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }, &schema.Column{ Name: "b", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }, &schema.Column{ Name: "c", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }) parts := usersT.Columns[len(usersT.Columns)-3:] usersT.Indexes = append(usersT.Indexes, &schema.Index{ Unique: true, Name: "a_b_c_unique", Parts: []*schema.IndexPart{{C: parts[0]}, {C: parts[1]}, {C: parts[2]}}, }) changes := t.diff(t.loadUsers(), usersT) require.NotEmpty(t, changes, "usersT contains 2 new columns and 1 new index") t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // In MySQL, dropping a column should remove it from the key. // However, on MariaDB an explicit DROP/ADD INDEX is required. if t.mariadb() { idx, ok := usersT.Index("a_b_c_unique") require.True(t, ok) idx.Parts = idx.Parts[:len(idx.Parts)-1] } usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] changes = t.diff(t.loadUsers(), usersT) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) // Dropping a column from both table and index. usersT = t.loadUsers() idx, ok := usersT.Index("a_b_c_unique") require.True(t, ok) require.Len(t, idx.Parts, 2) usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] idx.Parts = idx.Parts[:len(idx.Parts)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) // Dropping a column should remove // single-column keys as well. usersT = t.loadUsers() idx, ok = usersT.Index("a_b_c_unique") require.True(t, ok) require.Len(t, idx.Parts, 1) usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) idx, ok = t.loadUsers().Index("a_b_c_unique") require.False(t, ok) }) } func TestMySQL_AddColumns(t *testing.T) { myRun(t, func(t *myTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) usersT.Columns = append( usersT.Columns, &schema.Column{Name: "a", Type: &schema.ColumnType{Raw: "tinyblob", Type: &schema.BinaryType{T: "tinyblob"}}}, &schema.Column{Name: "b", Type: &schema.ColumnType{Raw: "mediumblob", Type: &schema.BinaryType{T: "mediumblob"}}}, &schema.Column{Name: "c", Type: &schema.ColumnType{Raw: "blob", Type: &schema.BinaryType{T: "blob"}}}, &schema.Column{Name: "d", Type: &schema.ColumnType{Raw: "longblob", Type: &schema.BinaryType{T: "longblob"}}}, &schema.Column{Name: "e", Type: &schema.ColumnType{Raw: "binary", Type: &schema.BinaryType{T: "binary"}}}, &schema.Column{Name: "f", Type: &schema.ColumnType{Raw: "varbinary(255)", Type: &schema.BinaryType{T: "varbinary(255)"}}, Default: &schema.Literal{V: "foo"}}, &schema.Column{Name: "g", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 255}}}, &schema.Column{Name: "h", Type: &schema.ColumnType{Raw: "varchar(255)", Type: &schema.StringType{T: "varchar(255)"}}}, &schema.Column{Name: "i", Type: &schema.ColumnType{Raw: "tinytext", Type: &schema.StringType{T: "tinytext"}}}, &schema.Column{Name: "j", Type: &schema.ColumnType{Raw: "mediumtext", Type: &schema.StringType{T: "mediumtext"}}}, &schema.Column{Name: "k", Type: &schema.ColumnType{Raw: "text", Type: &schema.StringType{T: "text"}}}, &schema.Column{Name: "l", Type: &schema.ColumnType{Raw: "longtext", Type: &schema.StringType{T: "longtext"}}}, &schema.Column{Name: "m", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 6}}}, &schema.Column{Name: "m1", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal"}}}, &schema.Column{Name: "m2", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 2}}}, &schema.Column{Name: "n", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 10, Scale: 2}}}, &schema.Column{Name: "n1", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric"}}}, &schema.Column{Name: "n2", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 2}}}, &schema.Column{Name: "o", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 2}}}, &schema.Column{Name: "p", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double", Precision: 14}}}, &schema.Column{Name: "q", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 14}}}, &schema.Column{Name: "r", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, &schema.Column{Name: "s", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, &schema.Column{Name: "t", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint"}}}, &schema.Column{Name: "u", Type: &schema.ColumnType{Type: &schema.EnumType{T: "enum", Values: []string{"a", "b", "c"}}}}, &schema.Column{Name: "v", Type: &schema.ColumnType{Type: &schema.StringType{T: "char(36)"}}}, &schema.Column{Name: "x", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "line"}}}, &schema.Column{Name: "y", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "point"}}}, &schema.Column{Name: "z", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{X: "CURRENT_TIMESTAMP"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 28) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } func TestMySQL_ColumnInt(t *testing.T) { t.Run("ChangeType", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) for _, typ := range []string{"tinyint", "smallint", "mediumint", "bigint"} { usersT.Columns[0].Type.Type = &schema.IntegerType{T: typ} changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) t.Run("ChangeDefault", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Default: &schema.RawExpr{X: "1"}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) for _, x := range []string{"2", "'3'", "10.1"} { usersT.Columns[0].Default.(*schema.RawExpr).X = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) } func TestMySQL_ColumnString(t *testing.T) { t.Run("ChangeType", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(20)"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) for _, typ := range []string{"varchar(255)", "char(120)", "tinytext", "mediumtext", "longtext"} { usersT.Columns[0].Type.Type = &schema.StringType{T: typ} changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) t.Run("AddWithDefault", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}, Default: &schema.RawExpr{X: "hello"}}, {Name: "b", Type: &schema.ColumnType{Type: &schema.StringType{T: "char(255)"}}, Default: &schema.RawExpr{X: "'world'"}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) }) }) t.Run("ChangeDefault", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}, Default: &schema.RawExpr{X: "hello"}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) for _, x := range []string{"2", "'3'", "'world'"} { usersT.Columns[0].Default.(*schema.RawExpr).X = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) } func TestMySQL_ColumnBool(t *testing.T) { t.Run("Add", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}}, {Name: "b", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.BoolType{T: "tinyint"}}}, {Name: "d", Type: &schema.ColumnType{Type: &schema.BoolType{T: "tinyint(1)"}}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) }) }) t.Run("AddWithDefault", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "1"}}, {Name: "b", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "0"}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "'1'"}}, {Name: "d", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "'0'"}}, {Name: "e", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "true"}}, {Name: "f", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "false"}}, {Name: "g", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "TRUE"}}, {Name: "h", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "FALSE"}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) }) }) t.Run("ChangeDefault", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "1"}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) // Change default from "true" to "false" to "true". for _, x := range []string{"false", "true"} { usersT.Columns[0].Default.(*schema.RawExpr).X = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) t.Run("ChangeNull", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}, Null: true}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) usersT.Columns[0].Type.Null = false changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) } func TestMySQL_ColumnCheck(t *testing.T) { myRun(t, func(t *myTest) { // Checks are not supported in all versions. if t.version == "mysql56" || t.version == "mysql57" { t.Skip() } usersT := &schema.Table{ Name: "users", Attrs: []schema.Attr{schema.NewCheck().SetName("users_c_check").SetExpr("c > 5")}, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, }, } t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) }) } func TestMySQL_ForeignKey(t *testing.T) { t.Run("ChangeAction", func(t *testing.T) { myRun(t, func(t *myTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() fk, ok := postsT.ForeignKey("author_id") require.True(t, ok) fk.OnUpdate = schema.SetNull fk.OnDelete = schema.Cascade changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 1) modifyF, ok := changes[0].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("UnsetNull", func(t *testing.T) { myRun(t, func(t *myTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) fk, ok := postsT.ForeignKey("author_id") require.True(t, ok) fk.OnDelete = schema.SetNull fk.OnUpdate = schema.SetNull t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() c, ok := postsT.Column("author_id") require.True(t, ok) c.Type.Null = false fk, ok = postsT.ForeignKey("author_id") require.True(t, ok) fk.OnUpdate = schema.NoAction fk.OnDelete = schema.NoAction changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 2) modifyC, ok := changes[0].(*schema.ModifyColumn) require.True(t, ok) require.True(t, modifyC.Change == schema.ChangeNull) modifyF, ok := changes[1].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("AddDrop", func(t *testing.T) { myRun(t, func(t *myTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) // Add foreign key. usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "spouse_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, }) usersT.ForeignKeys = append(usersT.ForeignKeys, &schema.ForeignKey{ Symbol: "spouse_id", Table: usersT, Columns: usersT.Columns[len(usersT.Columns)-1:], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction, }) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) addC, ok := changes[0].(*schema.AddColumn) require.True(t, ok) require.Equal(t, "spouse_id", addC.C.Name) addF, ok := changes[1].(*schema.AddForeignKey) require.True(t, ok) require.Equal(t, "spouse_id", addF.F.Symbol) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Drop foreign keys. usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] usersT.ForeignKeys = usersT.ForeignKeys[:len(usersT.ForeignKeys)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) } func TestMySQL_Ent(t *testing.T) { myRun(t, func(t *myTest) { testEntIntegration(t, dialect.MySQL, t.db) }) } func TestMySQL_AdvisoryLock(t *testing.T) { myRun(t, func(t *myTest) { testAdvisoryLock(t.T, t.drv.(schema.Locker)) }) } func TestMySQL_HCL(t *testing.T) { full := ` schema "test" { } table "users" { schema = schema.test column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.test column "id" { type = int } column "author_id" { type = int } foreign_key "author" { columns = [ table.posts.column.author_id, ] ref_columns = [ table.users.column.id, ] } primary_key { columns = [table.users.column.id] } } ` empty := ` schema "test" { } ` myRun(t, func(t *myTest) { testHCLIntegration(t, full, empty) }) } func TestMySQL_Snapshot(t *testing.T) { myRun(t, func(t *myTest) { db, err := sql.Open("mysql", fmt.Sprintf("root:pass@tcp(localhost:%d)/", t.port)) require.NoError(t, err) t.Cleanup(func() { require.NoError(t, db.Close()) }) drv, err := mysql.Open(db) require.NoError(t, err) _, err = drv.(migrate.Snapshoter).Snapshot(context.Background()) require.ErrorAs(t, err, &migrate.NotCleanError{}) r, err := t.driver().InspectRealm(context.Background(), nil) require.NoError(t, err) restore, err := t.driver().(migrate.Snapshoter).Snapshot(context.Background()) require.NoError(t, err) // connected to test schema t.migrate(&schema.AddTable{T: schema.NewTable("my_table").AddColumns( schema.NewIntColumn("col_1", "integer").SetNull(true), schema.NewIntColumn("col_2", "bigint"), )}) t.Cleanup(func() { t.dropTables("my_table") }) require.NoError(t, restore(context.Background())) r1, err := t.driver().InspectRealm(context.Background(), nil) require.NoError(t, err) diff, err := t.driver().RealmDiff(r1, r) require.NoError(t, err) require.Zero(t, diff) }) } func TestMySQL_CLI_MigrateApplyBC(t *testing.T) { myRun(t, func(t *myTest) { testCLIMigrateApplyBC(t, "mysql") }) } func TestMySQL_CLI(t *testing.T) { h := ` schema "test" { charset = "%s" collation = "%s" } table "users" { schema = schema.test column "id" { type = int } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { myRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaInspect(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test"), mysql.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { myRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaApply(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test")) }) }) t.Run("SchemaApplyWithVars", func(t *testing.T) { h := ` variable "tenant" { type = string } schema "tenant" { name = var.tenant } table "users" { schema = schema.tenant column "id" { type = int } } ` myRun(t, func(t *myTest) { testCLISchemaApply(t, h, t.url("test"), "--var", "tenant=test") }) }) t.Run("SchemaApplyDryRun", func(t *testing.T) { myRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaApplyDry(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test")) }) }) t.Run("SchemaDiffRun", func(t *testing.T) { myRun(t, func(t *myTest) { testCLISchemaDiff(t, t.url("test")) }) }) t.Run("SchemaApplyAutoApprove", func(t *testing.T) { myRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaApplyAutoApprove(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test")) }) }) } func TestMySQL_CLI_MultiSchema(t *testing.T) { h := ` schema "test" { charset = "%s" collation = "%s" } table "users" { schema = schema.test column "id" { type = int } primary_key { columns = [table.users.column.id] } } schema "test2" { charset = "%s" collation = "%s" } table "users" { schema = schema.test2 column "id" { type = int } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { myRun(t, func(t *myTest) { t.dropSchemas("test2") t.dropTables("users") attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLIMultiSchemaInspect(t, fmt.Sprintf(h, charset.V, collate.V, charset.V, collate.V), t.url(""), []string{"test", "test2"}, mysql.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { myRun(t, func(t *myTest) { t.dropSchemas("test2") t.dropTables("users") attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLIMultiSchemaApply(t, fmt.Sprintf(h, charset.V, collate.V, charset.V, collate.V), t.url(""), []string{"test", "test2"}, mysql.EvalHCL) }) }) } func TestMySQL_HCL_Realm(t *testing.T) { myRun(t, func(t *myTest) { t.dropSchemas("second") realm := t.loadRealm() hcl, err := mysql.MarshalHCL(realm) require.NoError(t, err) wa := string(hcl) + ` schema "second" { } ` t.applyRealmHcl(wa) realm, err = t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{}) require.NoError(t, err) _, ok := realm.Schema("test") require.True(t, ok) _, ok = realm.Schema("second") require.True(t, ok) }) } func TestMySQL_HCL_ForeignKeyCrossSchema(t *testing.T) { const expected = `table "credit_cards" { schema = schema.financial column "id" { null = false type = int } column "user_id" { null = false type = int } primary_key { columns = [column.id] } foreign_key "user_id_fkey" { columns = [column.user_id] ref_columns = [table.users.users.column.id] on_update = NO_ACTION on_delete = NO_ACTION } index "user_id_fkey" { columns = [column.user_id] } } table "financial" "users" { schema = schema.financial column "id" { null = false type = int } } table "users" "users" { schema = schema.users column "id" { null = false type = int } column "email" { null = false type = varchar(255) } primary_key { columns = [column.id] } } schema "financial" { charset = "utf8mb4" collate = "utf8mb4_general_ci" } schema "users" { charset = "utf8mb4" collate = "utf8mb4_general_ci" } ` myRun(t, func(t *myTest) { t.dropSchemas("financial", "users") realm := t.loadRealm() hcl, err := mysql.MarshalHCL(realm) require.NoError(t, err) t.applyRealmHcl(string(hcl) + "\n" + expected) realm, err = t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Schemas: []string{"users", "financial"}}) require.NoError(t, err) actual, err := mysql.MarshalHCL(realm) require.NoError(t, err) require.Equal(t, expected, string(actual)) }) } func TestMySQL_DefaultsHCL(t *testing.T) { n := "atlas_defaults" myRun(t, func(t *myTest) { ddl := ` create table atlas_defaults ( string varchar(255) default "hello_world", quoted varchar(100) default 'never say "never"', tBit bit(10) default b'10101', ts timestamp default CURRENT_TIMESTAMP, number int default 42 ) ` t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() spec, err := mysql.MarshalHCL(realm.Schemas[0]) require.NoError(t, err) var s schema.Realm err = mysql.EvalHCLBytes(spec, &s, nil) require.NoError(t, err) t.dropTables(n) t.applyHcl(string(spec)) ensureNoChange(t, realm.Schemas[0].Tables[0]) }) } func TestMySQL_Sanity(t *testing.T) { n := "atlas_types_sanity" t.Run("Common", func(t *testing.T) { ddl := ` create table atlas_types_sanity ( tBit bit(10) default b'100' null, tInt int(10) default 4 not null, tTinyInt tinyint(10) default 8 null, tSmallInt smallint(10) default 2 null, tMediumInt mediumint(10) default 11 null, tBigInt bigint(10) default 4 null, tDecimal decimal default 4 null, tNumeric numeric default 4 not null, tFloat float(10, 0) default 4 null, tDouble double(10, 0) default 4 null, tReal double(10, 0) default 4 null, tTimestamp timestamp default CURRENT_TIMESTAMP null, tTimestampFraction timestamp(6) default CURRENT_TIMESTAMP(6) null, tTimestampOnUpdate timestamp default CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP null, tTimestampFractionOnUpdate timestamp(6) default CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6) null, tDate date null, tTime time null, tDateTime datetime null, tYear year null, tVarchar varchar(10) default 'Titan' null, tChar char(25) default 'Olimpia' not null, tVarBinary varbinary(30) default 'Titan' null, tBinary binary(5) default 'Titan' null, tBlob blob(5) default null, tTinyBlob tinyblob null, tMediumBlob mediumblob default null, tLongBlob longblob default null, tText text(13) default null, tTinyText tinytext default null, tMediumText mediumtext default null, tLongText longtext default null, tEnum enum('a','b') default null, tSet set('a','b') default null, tGeometry geometry default null, tPoint point default null, tMultiPoint multipoint default null, tLineString linestring default null, tMultiLineString multilinestring default null, tPolygon polygon default null, tMultiPolygon multipolygon default null, tGeometryCollection geometrycollection default null ) CHARSET = latin1 COLLATE latin1_swedish_ci; ` myRun(t, func(t *myTest) { t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, Schema: realm.Schemas[0], Columns: []*schema.Column{ { Name: "tBit", Type: &schema.ColumnType{Type: &mysql.BitType{T: "bit", Size: 10}, Raw: "bit(10)", Null: true}, Default: &schema.Literal{V: "b'100'"}, }, { Name: "tInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "int"}, "int(10)"), Null: false}, Default: &schema.Literal{V: "4"}, }, { Name: "tTinyInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "tinyint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "tinyint"}, "tinyint(10)"), Null: true}, Default: &schema.Literal{V: "8"}, }, { Name: "tSmallInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "smallint"}, "smallint(10)"), Null: true}, Default: &schema.Literal{V: "2"}, }, { Name: "tMediumInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "mediumint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "mediumint"}, "mediumint(10)"), Null: true}, Default: &schema.Literal{V: "11"}, }, { Name: "tBigInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "bigint"}, "bigint(10)"), Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tDecimal", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10}, Raw: "decimal(10,0)", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tNumeric", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10}, Raw: "decimal(10,0)", Null: false}, Default: &schema.Literal{V: "4"}, }, { Name: "tFloat", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 10}, Raw: "float(10,0)", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tDouble", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double", Precision: 10}, Raw: "double(10,0)", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tReal", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double", Precision: 10}, Raw: "double(10,0)", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tTimestamp", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp"}, Raw: "timestamp", Null: true}, Default: &schema.RawExpr{ X: func() string { if t.mariadb() { return "(current_timestamp())" } return "CURRENT_TIMESTAMP" }(), }, }, { Name: "tTimestampFraction", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp", Precision: intp(6)}, Raw: "timestamp(6)", Null: true}, Default: &schema.RawExpr{ X: func() string { if t.mariadb() { return "(current_timestamp(6))" } return "CURRENT_TIMESTAMP(6)" }(), }, }, { Name: "tTimestampOnUpdate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp"}, Raw: "timestamp", Null: true}, Default: &schema.RawExpr{ X: func() string { if t.mariadb() { return "(current_timestamp())" } return "CURRENT_TIMESTAMP" }(), }, Attrs: []schema.Attr{ &mysql.OnUpdate{ A: func() string { if t.mariadb() { return "current_timestamp()" } return "CURRENT_TIMESTAMP" }(), }, }, }, { Name: "tTimestampFractionOnUpdate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp", Precision: intp(6)}, Raw: "timestamp(6)", Null: true}, Default: &schema.RawExpr{ X: func() string { if t.mariadb() { return "(current_timestamp(6))" } return "CURRENT_TIMESTAMP(6)" }(), }, Attrs: []schema.Attr{ &mysql.OnUpdate{ A: func() string { if t.mariadb() { return "current_timestamp(6)" } return "CURRENT_TIMESTAMP(6)" }(), }, }, }, { Name: "tDate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "date"}, Raw: "date", Null: true}, }, { Name: "tTime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time"}, Raw: "time", Null: true}, }, { Name: "tDateTime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "datetime"}, Raw: "datetime", Null: true}, }, { Name: "tYear", Type: &schema.ColumnType{ Type: &schema.TimeType{ T: "year", Precision: func() *int { // From MySQL 8.0.19, display width is deprecated in YEAR types. if t.version == "mysql8" { return nil } p := 4 return &p }(), }, Raw: t.valueByVersion(map[string]string{"mysql8": "year"}, "year(4)"), Null: true}, }, { Name: "tVarchar", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 10}, Raw: "varchar(10)", Null: true}, Default: &schema.Literal{V: t.quoted("Titan")}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "char", Size: 25}, Raw: "char(25)", Null: false}, Default: &schema.Literal{V: t.quoted("Olimpia")}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tVarBinary", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "varbinary", Size: intp(30)}, Raw: "varbinary(30)", Null: true}, Default: &schema.Literal{V: t.valueByVersion(map[string]string{"mysql8": "0x546974616E"}, t.quoted("Titan"))}, }, { Name: "tBinary", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "binary", Size: intp(5)}, Raw: "binary(5)", Null: true}, Default: &schema.Literal{V: t.valueByVersion(map[string]string{"mysql8": "0x546974616E"}, t.quoted("Titan"))}, }, { Name: "tBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "tinyblob"}, Raw: "tinyblob", Null: true}, }, { Name: "tTinyBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "tinyblob"}, Raw: "tinyblob", Null: true}, }, { Name: "tMediumBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "mediumblob"}, Raw: "mediumblob", Null: true}, }, { Name: "tLongBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "longblob"}, Raw: "longblob", Null: true}, }, { Name: "tText", Type: &schema.ColumnType{Type: &schema.StringType{T: "tinytext", Size: 0}, Raw: "tinytext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tTinyText", Type: &schema.ColumnType{Type: &schema.StringType{T: "tinytext", Size: 0}, Raw: "tinytext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tMediumText", Type: &schema.ColumnType{Type: &schema.StringType{T: "mediumtext", Size: 0}, Raw: "mediumtext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tLongText", Type: &schema.ColumnType{Type: &schema.StringType{T: "longtext", Size: 0}, Raw: "longtext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tEnum", Type: &schema.ColumnType{Type: &schema.EnumType{T: "enum", Values: []string{"a", "b"}}, Raw: "enum('a','b')", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tSet", Type: &schema.ColumnType{Type: &mysql.SetType{Values: []string{"a", "b"}}, Raw: "set('a','b')", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "tGeometry", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "geometry"}, Raw: "geometry", Null: true}, }, { Name: "tPoint", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "point"}, Raw: "point", Null: true}, }, { Name: "tMultiPoint", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "multipoint"}, Raw: "multipoint", Null: true}, }, { Name: "tLineString", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "linestring"}, Raw: "linestring", Null: true}, }, { Name: "tMultiLineString", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "multilinestring"}, Raw: "multilinestring", Null: true}, }, { Name: "tPolygon", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "polygon"}, Raw: "polygon", Null: true}, }, { Name: "tMultiPolygon", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "multipolygon"}, Raw: "multipolygon", Null: true}, }, { Name: "tGeometryCollection", Type: &schema.ColumnType{Type: &schema.SpatialType{T: t.valueByVersion( map[string]string{"mysql8": "geomcollection"}, "geometrycollection")}, Raw: t.valueByVersion(map[string]string{"mysql8": "geomcollection"}, "geometrycollection"), Null: true}, }, }, } rmCreateStmt(ts) require.EqualValues(t, &expected, ts) t.hclDriftTest(n, realm, expected) }) }) t.Run("JSON", func(t *testing.T) { ddl := ` create table atlas_types_sanity ( tJSON json default null ) CHARSET = latin1 COLLATE latin1_swedish_ci; ` myRun(t, func(t *myTest) { if t.version == "mysql56" { return } t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Attrs: func() []schema.Attr { if t.version == "maria107" { return []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, &schema.Check{Name: "tJSON", Expr: "json_valid(`tJSON`)"}, } } return []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, } }(), Schema: realm.Schemas[0], Columns: []*schema.Column{ func() *schema.Column { c := &schema.Column{Name: "tJSON", Type: &schema.ColumnType{Type: &schema.JSONType{T: "json"}, Raw: "json", Null: true}} switch t.version { case "maria107": c.Attrs = []schema.Attr{} case "maria102", "maria103": c.Type.Raw = "longtext" c.Type.Type = &schema.StringType{T: "longtext"} c.Attrs = []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_bin"}, } } return c }(), }, } rmCreateStmt(ts) require.EqualValues(t, &expected, ts) }) }) t.Run("ImplicitIndexes", func(t *testing.T) { myRun(t, func(t *myTest) { testImplicitIndexes(t, t.db) }) }) } func (t *myTest) url(dbname string) string { d := "mysql" pass := ":pass" if t.tidb() { pass = "" } if t.mariadb() { d = "mariadb" } return fmt.Sprintf("%s://root%s@localhost:%d/%s?parseTime=true", d, pass, t.port, dbname) } func (t *myTest) driver() migrate.Driver { return t.drv } func (t *myTest) revisionsStorage() migrate.RevisionReadWriter { return t.rrw } func (t *myTest) applyHcl(spec string) { realm := t.loadRealm() var desired schema.Schema err := mysql.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) existing := realm.Schemas[0] require.NoError(t, err) diff, err := t.drv.SchemaDiff(existing, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } func (t *myTest) applyRealmHcl(spec string) { realm := t.loadRealm() var desired schema.Realm err := mysql.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) diff, err := t.drv.RealmDiff(realm, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } func (t *myTest) diff(t1, t2 *schema.Table) []schema.Change { changes, err := t.drv.TableDiff(t1, t2) require.NoError(t, err) return changes } func (t *myTest) migrate(changes ...schema.Change) { err := t.drv.ApplyChanges(context.Background(), changes) require.NoError(t, err) } func (t *myTest) dropTables(names ...string) { t.Cleanup(func() { _, err := t.db.Exec("DROP TABLE IF EXISTS " + strings.Join(names, ", ")) require.NoError(t.T, err, "drop tables %q", names) }) } func (t *myTest) dropSchemas(names ...string) { t.Cleanup(func() { for _, n := range names { _, err := t.db.Exec("DROP DATABASE IF EXISTS " + n) require.NoError(t.T, err, "drop db %q", names) } }) } func (t *myTest) realm() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", Attrs: t.defaultAttrs(), }, }, Attrs: t.defaultAttrs(), } r.Schemas[0].Realm = r return r } func (t *myTest) users() *schema.Table { usersT := &schema.Table{ Name: "users", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&mysql.AutoIncrement{}}, }, { Name: "x", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, }, }, Attrs: t.defaultAttrs(), } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} return usersT } func (t *myTest) posts() *schema.Table { usersT := t.users() postsT := &schema.Table{ Name: "posts", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&mysql.AutoIncrement{}}, }, { Name: "author_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }, { Name: "ctime", Type: &schema.ColumnType{Raw: "timestamp", Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP", }, Attrs: []schema.Attr{ &mysql.OnUpdate{ A: "CURRENT_TIMESTAMP", }, }, }, }, Attrs: t.defaultAttrs(), } postsT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: postsT.Columns[0]}}} postsT.Indexes = []*schema.Index{ {Name: "author_id", Parts: []*schema.IndexPart{{C: postsT.Columns[1]}}}, {Name: "id_author_id_unique", Unique: true, Parts: []*schema.IndexPart{{C: postsT.Columns[1]}, {C: postsT.Columns[0]}}}, } postsT.ForeignKeys = []*schema.ForeignKey{ {Symbol: "author_id", Table: postsT, Columns: postsT.Columns[1:2], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction}, } return postsT } func (t *myTest) valueByVersion(values map[string]string, defaults string) string { if v, ok := values[t.version]; ok { return v } return defaults } func (t *myTest) intByVersion(values map[string]int, defaults int) int { if v, ok := values[t.version]; ok { return v } return defaults } func (t *myTest) quoted(s string) string { c := "\"" if t.mariadb() { c = "'" } return c + s + c } func (t *myTest) loadRealm() *schema.Realm { r, err := t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{ Schemas: []string{"test"}, }) require.NoError(t, err) return r } func (t *myTest) loadUsers() *schema.Table { return t.loadTable("users") } func (t *myTest) loadPosts() *schema.Table { return t.loadTable("posts") } func (t *myTest) loadTable(name string) *schema.Table { realm := t.loadRealm() require.Len(t, realm.Schemas, 1) table, ok := realm.Schemas[0].Table(name) require.True(t, ok) return table } func (t *myTest) mariadb() bool { return strings.HasPrefix(t.version, "maria") } func (t *myTest) tidb() bool { return strings.HasPrefix(t.version, "tidb") } // defaultConfig returns the default charset and // collation configuration based on the MySQL version. func (t *myTest) defaultAttrs() []schema.Attr { var ( charset = "latin1" collation = "latin1_swedish_ci" ) switch { case strings.Contains(t.version, "tidb"): charset = "utf8mb4" collation = "utf8mb4_bin" case t.version == "mysql8": charset = "utf8mb4" collation = "utf8mb4_0900_ai_ci" case t.version == "maria107": charset = "utf8mb4" collation = "utf8mb4_general_ci" } return []schema.Attr{ &schema.Charset{ V: charset, }, &schema.Collation{ V: collation, }, } } func (t *myTest) hclDriftTest(n string, realm *schema.Realm, expected schema.Table) { spec, err := mysql.MarshalHCL(realm.Schemas[0]) require.NoError(t, err) t.dropTables(n) t.applyHcl(string(spec)) realm = t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) rmCreateStmt(ts) require.EqualValues(t, &expected, ts) } func rmCreateStmt(t *schema.Table) { for i := range t.Attrs { if _, ok := t.Attrs[i].(*mysql.CreateStmt); ok { t.Attrs = append(t.Attrs[:i], t.Attrs[i+1:]...) return } } } func intp(i int) *int { return &i } atlas-0.7.2/internal/integration/postgres_test.go000066400000000000000000001447301431455511600222350ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "context" "database/sql" "fmt" "log" "os" "os/exec" "path/filepath" "strings" "sync" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" "entgo.io/ent/dialect" _ "github.com/lib/pq" "github.com/stretchr/testify/require" ) type pgTest struct { *testing.T db *sql.DB drv migrate.Driver rrw migrate.RevisionReadWriter version string port int once sync.Once } var pgTests = map[string]*pgTest{ "postgres10": {port: 5430}, "postgres11": {port: 5431}, "postgres12": {port: 5432}, "postgres13": {port: 5433}, "postgres14": {port: 5434}, } func pgRun(t *testing.T, fn func(*pgTest)) { for version, tt := range pgTests { if flagVersion == "" || flagVersion == version { t.Run(version, func(t *testing.T) { tt.once.Do(func() { var err error tt.version = version tt.rrw = &rrw{} tt.db, err = sql.Open("postgres", fmt.Sprintf("host=localhost port=%d user=postgres dbname=test password=pass sslmode=disable", tt.port)) if err != nil { log.Fatalln(err) } dbs = append(dbs, tt.db) // close connection after all tests have been run tt.drv, err = postgres.Open(tt.db) if err != nil { log.Fatalln(err) } }) tt := &pgTest{T: t, db: tt.db, drv: tt.drv, version: version, port: tt.port, rrw: tt.rrw} fn(tt) }) } } } func TestPostgres_Executor(t *testing.T) { pgRun(t, func(t *pgTest) { testExecutor(t) }) } func TestPostgres_AddDropTable(t *testing.T) { pgRun(t, func(t *pgTest) { testAddDrop(t) }) } func TestPostgres_Relation(t *testing.T) { pgRun(t, func(t *pgTest) { testRelation(t) }) } func TestPostgres_NoSchema(t *testing.T) { pgRun(t, func(t *pgTest) { t.Cleanup(func() { _, err := t.db.Exec("CREATE SCHEMA IF NOT EXISTS public") require.NoError(t, err) }) _, err := t.db.Exec("DROP SCHEMA IF EXISTS public CASCADE") require.NoError(t, err) r, err := t.drv.InspectRealm(context.Background(), nil) require.NoError(t, err) require.Nil(t, r.Schemas) }) } func TestPostgres_AddIndexedColumns(t *testing.T) { pgRun(t, func(t *pgTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "a", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }, &schema.Column{ Name: "b", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }, &schema.Column{ Name: "c", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }) parts := usersT.Columns[len(usersT.Columns)-3:] usersT.Indexes = append(usersT.Indexes, &schema.Index{ Unique: true, Name: "a_b_c_unique", Parts: []*schema.IndexPart{{C: parts[0]}, {C: parts[1]}, {C: parts[2]}}, }) changes := t.diff(t.loadUsers(), usersT) require.NotEmpty(t, changes, "usersT contains 3 new columns and 1 new index") t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Dropping a column involves in a multi-column // index causes the index to be dropped as well. usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] changes = t.diff(t.loadUsers(), usersT) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) usersT = t.loadUsers() _, ok := usersT.Index("a_b_c_unique") require.False(t, ok) }) } func TestPostgres_ColumnCheck(t *testing.T) { pgRun(t, func(t *pgTest) { usersT := &schema.Table{ Name: "users", Attrs: []schema.Attr{schema.NewCheck().SetName("users_c_check").SetExpr("c > 5")}, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, }, } t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) }) } func TestPostgres_AddColumns(t *testing.T) { pgRun(t, func(t *pgTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) _, err := t.db.Exec("CREATE EXTENSION IF NOT EXISTS hstore") require.NoError(t, err) usersT.Columns = append( usersT.Columns, &schema.Column{Name: "a", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "bytea"}}}, &schema.Column{Name: "b", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 10}}, Default: &schema.Literal{V: "10.1"}}, &schema.Column{Name: "c", Type: &schema.ColumnType{Type: &schema.StringType{T: "character"}}, Default: &schema.Literal{V: "'y'"}}, &schema.Column{Name: "d", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 10, Scale: 2}}, Default: &schema.Literal{V: "0.99"}}, &schema.Column{Name: "e", Type: &schema.ColumnType{Type: &schema.JSONType{T: "json"}}, Default: &schema.Literal{V: "'{}'"}}, &schema.Column{Name: "f", Type: &schema.ColumnType{Type: &schema.JSONType{T: "jsonb"}}, Default: &schema.Literal{V: "'1'"}}, &schema.Column{Name: "g", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 10}}, Default: &schema.Literal{V: "'1'"}}, &schema.Column{Name: "h", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 30}}, Default: &schema.Literal{V: "'1'"}}, &schema.Column{Name: "i", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 53}}, Default: &schema.Literal{V: "1"}}, &schema.Column{Name: "j", Type: &schema.ColumnType{Type: &postgres.SerialType{T: "serial"}}}, &schema.Column{Name: "k", Type: &schema.ColumnType{Type: &postgres.CurrencyType{T: "money"}}, Default: &schema.Literal{V: "'100'"}}, &schema.Column{Name: "l", Type: &schema.ColumnType{Type: &postgres.CurrencyType{T: "money"}, Null: true}, Default: &schema.RawExpr{X: "'52093.89'::money"}}, &schema.Column{Name: "m", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Null: true}, Default: &schema.Literal{V: "false"}}, &schema.Column{Name: "n", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "point"}, Null: true}, Default: &schema.Literal{V: "'(1,2)'"}}, &schema.Column{Name: "o", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "line"}, Null: true}, Default: &schema.Literal{V: "'{1,2,3}'"}}, &schema.Column{Name: "p", Type: &schema.ColumnType{Type: &postgres.UserDefinedType{T: "hstore"}, Null: true}, Default: &schema.RawExpr{X: "'a => 1'"}}, &schema.Column{Name: "q", Type: &schema.ColumnType{Type: &postgres.ArrayType{Type: &schema.StringType{T: "text"}, T: "text[]"}, Null: true}, Default: &schema.Literal{V: "'{}'"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 17) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } func TestPostgres_ColumnInt(t *testing.T) { ctx := context.Background() run := func(t *testing.T, change func(*schema.Column)) { pgRun(t, func(t *pgTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}}, } err := t.drv.ApplyChanges(ctx, []schema.Change{&schema.AddTable{T: usersT}}) require.NoError(t, err) t.dropTables(usersT.Name) change(usersT.Columns[0]) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } t.Run("ChangeNull", func(t *testing.T) { run(t, func(c *schema.Column) { c.Type.Null = true }) }) t.Run("ChangeType", func(t *testing.T) { run(t, func(c *schema.Column) { c.Type.Type.(*schema.IntegerType).T = "integer" }) }) t.Run("ChangeDefault", func(t *testing.T) { run(t, func(c *schema.Column) { c.Default = &schema.RawExpr{X: "0"} }) }) } func TestPostgres_ColumnArray(t *testing.T) { pgRun(t, func(t *pgTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) // Add column. usersT.Columns = append( usersT.Columns, &schema.Column{Name: "a", Type: &schema.ColumnType{Raw: "int[]", Type: &postgres.ArrayType{Type: &schema.IntegerType{T: "int"}, T: "int[]"}}, Default: &schema.Literal{V: "'{1}'"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Check default. usersT.Columns[2].Default = &schema.RawExpr{X: "ARRAY[1]"} ensureNoChange(t, usersT) // Change default. usersT.Columns[2].Default = &schema.RawExpr{X: "ARRAY[1,2]"} changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } func TestPostgres_Enums(t *testing.T) { pgRun(t, func(t *pgTest) { ctx := context.Background() usersT := &schema.Table{ Name: "users", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ {Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}}, }, } t.Cleanup(func() { _, err := t.drv.ExecContext(ctx, "DROP TYPE IF EXISTS state, day") require.NoError(t, err) }) // Create table with an enum column. err := t.drv.ApplyChanges(ctx, []schema.Change{&schema.AddTable{T: usersT}}) require.NoError(t, err, "create a new table with an enum column") t.dropTables(usersT.Name) ensureNoChange(t, usersT) // Add another enum column. usersT.Columns = append( usersT.Columns, &schema.Column{Name: "day", Type: &schema.ColumnType{Type: &schema.EnumType{T: "day", Values: []string{"sunday", "monday"}}}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) err = t.drv.ApplyChanges(ctx, []schema.Change{&schema.ModifyTable{T: usersT, Changes: changes}}) require.NoError(t, err, "add a new enum column to existing table") ensureNoChange(t, usersT) // Add a new value to an existing enum. e := usersT.Columns[1].Type.Type.(*schema.EnumType) e.Values = append(e.Values, "tuesday") changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) err = t.drv.ApplyChanges(ctx, []schema.Change{&schema.ModifyTable{T: usersT, Changes: changes}}) require.NoError(t, err, "append a value to existing enum") ensureNoChange(t, usersT) // Add multiple new values to an existing enum. e = usersT.Columns[1].Type.Type.(*schema.EnumType) e.Values = append(e.Values, "wednesday", "thursday", "friday", "saturday") changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) err = t.drv.ApplyChanges(ctx, []schema.Change{&schema.ModifyTable{T: usersT, Changes: changes}}) require.NoError(t, err, "append multiple values to existing enum") ensureNoChange(t, usersT) }) } func TestPostgres_ForeignKey(t *testing.T) { t.Run("ChangeAction", func(t *testing.T) { pgRun(t, func(t *pgTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() fk, ok := postsT.ForeignKey("author_id") require.True(t, ok) fk.OnUpdate = schema.SetNull fk.OnDelete = schema.Cascade changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 1) modifyF, ok := changes[0].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("UnsetNull", func(t *testing.T) { pgRun(t, func(t *pgTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) fk, ok := postsT.ForeignKey("author_id") require.True(t, ok) fk.OnDelete = schema.SetNull fk.OnUpdate = schema.SetNull t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() c, ok := postsT.Column("author_id") require.True(t, ok) c.Type.Null = false fk, ok = postsT.ForeignKey("author_id") require.True(t, ok) fk.OnUpdate = schema.NoAction fk.OnDelete = schema.NoAction changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 2) modifyC, ok := changes[0].(*schema.ModifyColumn) require.True(t, ok) require.True(t, modifyC.Change == schema.ChangeNull) modifyF, ok := changes[1].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("AddDrop", func(t *testing.T) { pgRun(t, func(t *pgTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) // Add foreign key. usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "spouse_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, }) usersT.ForeignKeys = append(usersT.ForeignKeys, &schema.ForeignKey{ Symbol: "spouse_id", Table: usersT, Columns: usersT.Columns[len(usersT.Columns)-1:], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction, }) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) addC, ok := changes[0].(*schema.AddColumn) require.True(t, ok) require.Equal(t, "spouse_id", addC.C.Name) addF, ok := changes[1].(*schema.AddForeignKey) require.True(t, ok) require.Equal(t, "spouse_id", addF.F.Symbol) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Drop foreign keys. usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] usersT.ForeignKeys = usersT.ForeignKeys[:len(usersT.ForeignKeys)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) } func TestPostgres_Ent(t *testing.T) { pgRun(t, func(t *pgTest) { testEntIntegration(t, dialect.Postgres, t.db) }) // Migration to global unique identifiers. t.Run("GlobalUniqueID", func(t *testing.T) { pgRun(t, func(t *pgTest) { ctx := context.Background() t.dropTables("global_id") _, err := t.driver().ExecContext(ctx, "CREATE TABLE global_id (id int NOT NULL GENERATED BY DEFAULT AS IDENTITY, PRIMARY KEY(id))") require.NoError(t, err) _, err = t.driver().ExecContext(ctx, "ALTER TABLE global_id ALTER COLUMN id RESTART WITH 1024") require.NoError(t, err) _, err = t.driver().ExecContext(ctx, "INSERT INTO global_id VALUES (default), (default)") require.NoError(t, err) var id int require.NoError(t, t.db.QueryRow("SELECT id FROM global_id").Scan(&id)) require.Equal(t, 1024, id) _, err = t.driver().ExecContext(ctx, "DELETE FROM global_id WHERE id = 1024") require.NoError(t, err) globalT := t.loadTable("global_id") c, ok := globalT.Column("id") require.True(t, ok) require.EqualValues(t, 1, c.Attrs[0].(*postgres.Identity).Sequence.Start) t.migrate(&schema.ModifyTable{ T: globalT, Changes: []schema.Change{ &schema.ModifyColumn{ From: globalT.Columns[0], To: schema.NewIntColumn("id", "int"). AddAttrs(&postgres.Identity{ Generation: "BY DEFAULT", Sequence: &postgres.Sequence{ Start: 1024, }, }), Change: schema.ChangeAttr, }, }, }) _, err = t.driver().ExecContext(ctx, "INSERT INTO global_id VALUES (default), (default)") require.NoError(t, err) globalT = t.loadTable("global_id") c, ok = globalT.Column("id") require.True(t, ok) require.EqualValues(t, 1024, c.Attrs[0].(*postgres.Identity).Sequence.Start) }) }) } func TestPostgres_AdvisoryLock(t *testing.T) { pgRun(t, func(t *pgTest) { testAdvisoryLock(t.T, t.drv.(schema.Locker)) }) } func TestPostgres_HCL(t *testing.T) { full := ` schema "public" { } table "users" { schema = schema.public column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.public column "id" { type = int } column "tags" { type = sql("text[]") } column "author_id" { type = int } foreign_key "author" { columns = [ table.posts.column.author_id, ] ref_columns = [ table.users.column.id, ] } primary_key { columns = [table.users.column.id] } } ` empty := ` schema "public" { } ` pgRun(t, func(t *pgTest) { testHCLIntegration(t, full, empty) }) } func TestPostgres_HCL_Realm(t *testing.T) { pgRun(t, func(t *pgTest) { t.dropSchemas("second") realm := t.loadRealm() hcl, err := postgres.MarshalHCL(realm) require.NoError(t, err) wa := string(hcl) + ` schema "second" { } ` t.applyRealmHcl(wa) realm, err = t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{}) require.NoError(t, err) _, ok := realm.Schema("public") require.True(t, ok) _, ok = realm.Schema("second") require.True(t, ok) }) } func TestPostgres_HCL_ForeignKeyCrossSchema(t *testing.T) { const expected = `table "credit_cards" { schema = schema.financial column "id" { null = false type = serial } column "user_id" { null = false type = integer } primary_key { columns = [column.id] } foreign_key "user_id_fkey" { columns = [column.user_id] ref_columns = [table.users.users.column.id] on_update = NO_ACTION on_delete = NO_ACTION } } table "financial" "users" { schema = schema.financial column "id" { null = false type = serial } } table "users" "users" { schema = schema.users column "id" { null = false type = bigserial } column "email" { null = false type = character_varying } primary_key { columns = [column.id] } } schema "financial" { } schema "users" { } ` pgRun(t, func(t *pgTest) { t.dropSchemas("financial", "users") realm := t.loadRealm() hcl, err := postgres.MarshalHCL(realm) require.NoError(t, err) t.applyRealmHcl(string(hcl) + "\n" + expected) realm, err = t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Schemas: []string{"users", "financial"}}) require.NoError(t, err) actual, err := postgres.MarshalHCL(realm) require.NoError(t, err) require.Equal(t, expected, string(actual)) }) } func (t *pgTest) applyRealmHcl(spec string) { realm := t.loadRealm() var desired schema.Realm err := postgres.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) diff, err := t.drv.RealmDiff(realm, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } func TestPostgres_Snapshot(t *testing.T) { pgRun(t, func(t *pgTest) { client, err := sqlclient.Open(context.Background(), fmt.Sprintf("postgres://postgres:pass@localhost:%d/test?sslmode=disable&search_path=another", t.port)) require.NoError(t, err) _, err = client.ExecContext(context.Background(), "CREATE SCHEMA another") require.NoError(t, err) t.Cleanup(func() { _, err = client.ExecContext(context.Background(), "DROP SCHEMA IF EXISTS another") require.NoError(t, client.Close()) }) drv := client.Driver _, err = t.driver().(migrate.Snapshoter).Snapshot(context.Background()) require.ErrorAs(t, err, &migrate.NotCleanError{}) r, err := drv.InspectRealm(context.Background(), nil) require.NoError(t, err) restore, err := drv.(migrate.Snapshoter).Snapshot(context.Background()) require.NoError(t, err) // connected to test schema require.NoError(t, drv.ApplyChanges(context.Background(), []schema.Change{ &schema.AddTable{T: schema.NewTable("my_table"). AddColumns( schema.NewIntColumn("col_1", "integer").SetNull(true), schema.NewIntColumn("col_2", "bigint"), ), }, })) t.Cleanup(func() { t.dropTables("my_table") }) require.NoError(t, restore(context.Background())) r1, err := drv.InspectRealm(context.Background(), nil) require.NoError(t, err) diff, err := drv.RealmDiff(r1, r) require.NoError(t, err) require.Zero(t, diff) }) } func TestPostgres_CLI_MigrateApplyBC(t *testing.T) { pgRun(t, func(t *pgTest) { testCLIMigrateApplyBC(t, "postgres") }) } func TestPostgres_CLI(t *testing.T) { h := ` schema "public" { } table "users" { schema = schema.public column "id" { type = integer } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { pgRun(t, func(t *pgTest) { testCLISchemaInspect(t, h, t.url(""), postgres.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { pgRun(t, func(t *pgTest) { testCLISchemaApply(t, h, t.url("")) }) }) t.Run("SchemaApplyDryRun", func(t *testing.T) { pgRun(t, func(t *pgTest) { testCLISchemaApplyDry(t, h, t.url("")) }) }) t.Run("SchemaApplyWithVars", func(t *testing.T) { h := ` variable "tenant" { type = string } schema "tenant" { name = var.tenant } table "users" { schema = schema.tenant column "id" { type = int } } ` pgRun(t, func(t *pgTest) { testCLISchemaApply(t, h, t.url(""), "--var", "tenant=public") }) }) t.Run("SchemaDiffRun", func(t *testing.T) { pgRun(t, func(t *pgTest) { testCLISchemaDiff(t, t.url("")) }) }) t.Run("SchemaApplyAutoApprove", func(t *testing.T) { pgRun(t, func(t *pgTest) { testCLISchemaApplyAutoApprove(t, h, t.url("")) }) }) } func TestPostgres_CLI_MultiSchema(t *testing.T) { h := ` schema "public" { } table "users" { schema = schema.public column "id" { type = integer } primary_key { columns = [table.users.column.id] } } schema "test2" { } table "users" { schema = schema.test2 column "id" { type = integer } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { pgRun(t, func(t *pgTest) { t.dropSchemas("test2") t.dropTables("users") testCLIMultiSchemaInspect(t, h, t.url(""), []string{"public", "test2"}, postgres.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { pgRun(t, func(t *pgTest) { t.dropSchemas("test2") t.dropTables("users") testCLIMultiSchemaApply(t, h, t.url(""), []string{"public", "test2"}, postgres.EvalHCL) }) }) } func TestPostgres_MigrateDiffRealm(t *testing.T) { bin, err := buildCmd(t) require.NoError(t, err) pgRun(t, func(t *pgTest) { dir := t.TempDir() _, err := t.db.Exec("CREATE DATABASE migrate_diff") require.NoError(t, err) defer t.db.Exec("DROP DATABASE IF EXISTS migrate_diff") hcl := ` schema "public" {} table "users" { schema = schema.public column "id" { type = integer } } schema "other" {} table "posts" { schema = schema.other column "id" { type = integer } } ` err = os.WriteFile(filepath.Join(dir, "schema.hcl"), []byte(hcl), 0600) diff := func(name string) string { out, err := exec.Command( bin, "migrate", "diff", name, "--dir", fmt.Sprintf("file://%s", filepath.Join(dir, "migrations")), "--to", fmt.Sprintf("file://%s", filepath.Join(dir, "schema.hcl")), "--dev-url", fmt.Sprintf("postgres://postgres:pass@localhost:%d/migrate_diff?sslmode=disable", t.port), ).CombinedOutput() require.NoError(t, err, string(out)) return strings.TrimSpace(string(out)) } require.Empty(t, diff("initial")) // Expect one file and read its contents. files, err := os.ReadDir(filepath.Join(dir, "migrations")) require.NoError(t, err) require.Equal(t, 2, len(files)) require.Equal(t, "atlas.sum", files[1].Name()) b, err := os.ReadFile(filepath.Join(dir, "migrations", files[0].Name())) require.NoError(t, err) require.Equal(t, `-- Add new schema named "other" CREATE SCHEMA "other"; -- create "users" table CREATE TABLE "public"."users" ("id" integer NOT NULL); -- create "posts" table CREATE TABLE "other"."posts" ("id" integer NOT NULL); `, string(b)) require.Equal(t, "The migration directory is synced with the desired state, no changes to be made", diff("no_change")) // Append a change to the schema and expect a migration to be created. hcl += ` table "other" "users" { schema = schema.other column "id" { type = integer } }` err = os.WriteFile(filepath.Join(dir, "schema.hcl"), []byte(hcl), 0600) require.Empty(t, diff("second")) require.Equal(t, "The migration directory is synced with the desired state, no changes to be made", diff("no_change")) files, err = os.ReadDir(filepath.Join(dir, "migrations")) require.NoError(t, err) require.Equal(t, 3, len(files), dir) b, err = os.ReadFile(filepath.Join(dir, "migrations", files[1].Name())) require.NoError(t, err) require.Equal(t, `-- create "users" table CREATE TABLE "other"."users" ("id" integer NOT NULL); `, string(b)) }) } func TestPostgres_SchemaDiff(t *testing.T) { bin, err := buildCmd(t) require.NoError(t, err) pgRun(t, func(t *pgTest) { dir := t.TempDir() _, err = t.db.Exec("CREATE DATABASE test1") require.NoError(t, err) t.Cleanup(func() { _, err := t.db.Exec("DROP DATABASE IF EXISTS test1") require.NoError(t, err) }) _, err = t.db.Exec("CREATE DATABASE test2") require.NoError(t, err) t.Cleanup(func() { _, err = t.db.Exec("DROP DATABASE IF EXISTS test2") require.NoError(t, err) }) diff := func(db1, db2 string) string { out, err := exec.Command( bin, "schema", "diff", "--from", fmt.Sprintf("postgres://postgres:pass@localhost:%d/%s", t.port, db1), "--to", fmt.Sprintf("postgres://postgres:pass@localhost:%d/%s", t.port, db2), ).CombinedOutput() require.NoError(t, err, string(out)) return strings.TrimSpace(string(out)) } // Diff a database with itself. require.Equal(t, "Schemas are synced, no changes to be made.", diff("test1?sslmode=disable", "test2?sslmode=disable")) // Create schemas on test2 database. hcl := ` schema "public" {} table "users" { schema = schema.public column "id" { type = integer } } schema "other" {} table "posts" { schema = schema.other column "id" { type = integer } } ` err = os.WriteFile(filepath.Join(dir, "schema.hcl"), []byte(hcl), 0600) require.NoError(t, err) out, err := exec.Command( bin, "schema", "apply", "-u", fmt.Sprintf("postgres://postgres:pass@localhost:%d/test2?sslmode=disable", t.port), "-f", fmt.Sprintf(filepath.Join(dir, "schema.hcl")), "--auto-approve", ).CombinedOutput() require.NoError(t, err, string(out)) // Diff a database with different one. require.Equal(t, `-- Add new schema named "other" CREATE SCHEMA "other" -- Create "users" table CREATE TABLE "public"."users" ("id" integer NOT NULL) -- Create "posts" table CREATE TABLE "other"."posts" ("id" integer NOT NULL)`, diff("test1?sslmode=disable", "test2?sslmode=disable")) // diff schemas require.Equal(t, `-- Drop "posts" table DROP TABLE "posts" -- Create "users" table CREATE TABLE "users" ("id" integer NOT NULL)`, diff("test2?sslmode=disable&search_path=other", "test2?sslmode=disable&search_path=public")) // diff between schema and database out, err = exec.Command( bin, "schema", "diff", "--from", fmt.Sprintf("postgres://postgres:pass@localhost:%d/test2?sslmode=disable", t.port), "--to", fmt.Sprintf("postgres://postgres:pass@localhost:%d/test2?sslmode=disable&search_path=public", t.port), ).CombinedOutput() require.Error(t, err, string(out)) require.Equal(t, "Error: cannot diff schema \"\" with a database connection\n", string(out)) }) } func TestPostgres_DefaultsHCL(t *testing.T) { n := "atlas_defaults" pgRun(t, func(t *pgTest) { ddl := ` create table atlas_defaults ( string varchar(255) default 'hello_world', quoted varchar(100) default 'never say "never"', tBit bit(10) default b'10101', ts timestamp default CURRENT_TIMESTAMP, tstz timestamp with time zone default CURRENT_TIMESTAMP, number int default 42 ) ` t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() spec, err := postgres.MarshalHCL(realm.Schemas[0]) require.NoError(t, err) var s schema.Schema err = postgres.EvalHCLBytes(spec, &s, nil) require.NoError(t, err) t.dropTables(n) t.applyHcl(string(spec)) ensureNoChange(t, realm.Schemas[0].Tables[0]) }) } func TestPostgres_Sanity(t *testing.T) { n := "atlas_types_sanity" ddl := ` DROP TYPE IF EXISTS address; CREATE TYPE address AS (city VARCHAR(90), street VARCHAR(90)); create table atlas_types_sanity ( "tBit" bit(10) default b'100' null, "tBitVar" bit varying(10) default b'100' null, "tBoolean" boolean default false not null, "tBool" bool default false not null, "tBytea" bytea default E'\\001' not null, "tCharacter" character(10) default 'atlas' null, "tChar" char(10) default 'atlas' null, "tCharVar" character varying(10) default 'atlas' null, "tVarChar" varchar(10) default 'atlas' null, "tText" text default 'atlas' null, "tSmallInt" smallint default '10' null, "tInteger" integer default '10' null, "tBigInt" bigint default '10' null, "tInt" int default '10' null, "tInt2" int2 default '10' null, "tInt4" int4 default '10' null, "tInt8" int8 default '10' null, "tCIDR" cidr default '127.0.0.1' null, "tInet" inet default '127.0.0.1' null, "tMACAddr" macaddr default '08:00:2b:01:02:03' null, "tMACAddr8" macaddr8 default '08:00:2b:01:02:03:04:05' null, "tCircle" circle default null, "tLine" line default null, "tLseg" lseg default null, "tBox" box default null, "tPath" path default null, "tPoint" point default null, "tDate" date default current_date null, "tTime" time default current_time null, "tTimeWTZ" time with time zone default current_time null, "tTimeWOTZ" time without time zone default current_time null, "tTimestamp" timestamp default now() null, "tTimestampTZ" timestamptz default now() null, "tTimestampWTZ" timestamp with time zone default now() null, "tTimestampWOTZ" timestamp without time zone default now() null, "tTimestampPrec" timestamp(4) default now() null, "tDouble" double precision default 0 null, "tReal" real default 0 null, "tFloat8" float8 default 0 null, "tFloat4" float4 default 0 null, "tNumeric" numeric default 0 null, "tDecimal" decimal default 0 null, "tSmallSerial" smallserial , "tSerial" serial , "tBigSerial" bigserial , "tSerial2" serial2 , "tSerial4" serial4 , "tSerial8" serial8 , "tArray" text[10][10] default '{}' null, "tXML" xml default 'foo' null, "tJSON" json default '{"key":"value"}' null, "tJSONB" jsonb default '{"key":"value"}' null, "tUUID" uuid default 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11' null, "tMoney" money default 18 null, "tInterval" interval default '4 hours' null, "tUserDefined" address default '("ab","cd")' null ); ` pgRun(t, func(t *pgTest) { t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Schema: realm.Schemas[0], Columns: []*schema.Column{ { Name: "tBit", Type: &schema.ColumnType{Type: &postgres.BitType{T: "bit", Len: 10}, Raw: "bit", Null: true}, Default: &schema.RawExpr{X: t.valueByVersion(map[string]string{"postgres10": "B'100'::\"bit\""}, "'100'::\"bit\"")}, }, { Name: "tBitVar", Type: &schema.ColumnType{Type: &postgres.BitType{T: "bit varying", Len: 10}, Raw: "bit varying", Null: true}, Default: &schema.RawExpr{X: t.valueByVersion(map[string]string{"postgres10": "B'100'::\"bit\""}, "'100'::\"bit\"")}, }, { Name: "tBoolean", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Raw: "boolean", Null: false}, Default: &schema.Literal{V: "false"}, }, { Name: "tBool", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Raw: "boolean", Null: false}, Default: &schema.Literal{V: "false"}, }, { Name: "tBytea", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "bytea"}, Raw: "bytea", Null: false}, Default: &schema.Literal{V: "'\\x01'"}, }, { Name: "tCharacter", Type: &schema.ColumnType{Type: &schema.StringType{T: "character", Size: 10}, Raw: "character", Null: true}, Default: &schema.Literal{V: "'atlas'"}, }, { Name: "tChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "character", Size: 10}, Raw: "character", Null: true}, Default: &schema.Literal{V: "'atlas'"}, }, { Name: "tCharVar", Type: &schema.ColumnType{Type: &schema.StringType{T: "character varying", Size: 10}, Raw: "character varying", Null: true}, Default: &schema.Literal{V: "'atlas'"}, }, { Name: "tVarChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "character varying", Size: 10}, Raw: "character varying", Null: true}, Default: &schema.Literal{V: "'atlas'"}, }, { Name: "tText", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Raw: "text", Null: true}, Default: &schema.Literal{V: "'atlas'"}, }, { Name: "tSmallInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint"}, Raw: "smallint", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tInteger", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tBigInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Raw: "bigint", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tInt2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint"}, Raw: "smallint", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tInt4", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tInt8", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}, Raw: "bigint", Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "tCIDR", Type: &schema.ColumnType{Type: &postgres.NetworkType{T: "cidr"}, Raw: "cidr", Null: true}, Default: &schema.Literal{V: "'127.0.0.1/32'"}, }, { Name: "tInet", Type: &schema.ColumnType{Type: &postgres.NetworkType{T: "inet"}, Raw: "inet", Null: true}, Default: &schema.Literal{V: "'127.0.0.1'"}, }, { Name: "tMACAddr", Type: &schema.ColumnType{Type: &postgres.NetworkType{T: "macaddr"}, Raw: "macaddr", Null: true}, Default: &schema.Literal{V: "'08:00:2b:01:02:03'"}, }, { Name: "tMACAddr8", Type: &schema.ColumnType{Type: &postgres.NetworkType{T: "macaddr8"}, Raw: "macaddr8", Null: true}, Default: &schema.Literal{V: "'08:00:2b:01:02:03:04:05'"}, }, { Name: "tCircle", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "circle"}, Raw: "circle", Null: true}, }, { Name: "tLine", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "line"}, Raw: "line", Null: true}, }, { Name: "tLseg", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "lseg"}, Raw: "lseg", Null: true}, }, { Name: "tBox", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "box"}, Raw: "box", Null: true}, }, { Name: "tPath", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "path"}, Raw: "path", Null: true}, }, { Name: "tPoint", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "point"}, Raw: "point", Null: true}, }, { Name: "tDate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "date"}, Raw: "date", Null: true}, Default: &schema.RawExpr{X: "CURRENT_DATE"}, }, { Name: "tTime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time without time zone", Precision: intp(6)}, Raw: "time without time zone", Null: true}, Default: &schema.RawExpr{X: "CURRENT_TIME"}, }, { Name: "tTimeWTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time with time zone", Precision: intp(6)}, Raw: "time with time zone", Null: true}, Default: &schema.RawExpr{X: "CURRENT_TIME"}, }, { Name: "tTimeWOTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time without time zone", Precision: intp(6)}, Raw: "time without time zone", Null: true}, Default: &schema.RawExpr{X: "CURRENT_TIME"}, }, { Name: "tTimestamp", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp without time zone", Precision: intp(6)}, Raw: "timestamp without time zone", Null: true}, Default: &schema.RawExpr{X: "now()"}, }, { Name: "tTimestampTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp with time zone", Precision: intp(6)}, Raw: "timestamp with time zone", Null: true}, Default: &schema.RawExpr{X: "now()"}, }, { Name: "tTimestampWTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp with time zone", Precision: intp(6)}, Raw: "timestamp with time zone", Null: true}, Default: &schema.RawExpr{X: "now()"}, }, { Name: "tTimestampWOTZ", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp without time zone", Precision: intp(6)}, Raw: "timestamp without time zone", Null: true}, Default: &schema.RawExpr{X: "now()"}, }, { Name: "tTimestampPrec", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp without time zone", Precision: intp(4)}, Raw: "timestamp without time zone", Null: true}, Default: &schema.RawExpr{X: "now()"}, }, { Name: "tDouble", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 53}, Raw: "double precision", Null: true}, Default: &schema.Literal{V: "0"}, }, { Name: "tReal", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 24}, Raw: "real", Null: true}, Default: &schema.Literal{V: "0"}, }, { Name: "tFloat8", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 53}, Raw: "double precision", Null: true}, Default: &schema.Literal{V: "0"}, }, { Name: "tFloat4", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 24}, Raw: "real", Null: true}, Default: &schema.Literal{V: "0"}, }, { Name: "tNumeric", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 0}, Raw: "numeric", Null: true}, Default: &schema.Literal{V: "0"}, }, { Name: "tDecimal", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 0}, Raw: "numeric", Null: true}, Default: &schema.Literal{V: "0"}, }, { Name: "tSmallSerial", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint", Unsigned: false}, Raw: "smallint", Null: false}, Default: &schema.RawExpr{ X: "nextval('\"atlas_types_sanity_tSmallSerial_seq\"'::regclass)", }, }, { Name: "tSerial", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer", Unsigned: false}, Raw: "integer", Null: false}, Default: &schema.RawExpr{ X: "nextval('\"atlas_types_sanity_tSerial_seq\"'::regclass)", }, }, { Name: "tBigSerial", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "nextval('\"atlas_types_sanity_tBigSerial_seq\"'::regclass)", }, }, { Name: "tSerial2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint", Unsigned: false}, Raw: "smallint", Null: false}, Default: &schema.RawExpr{ X: "nextval('\"atlas_types_sanity_tSerial2_seq\"'::regclass)", }, }, { Name: "tSerial4", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer", Unsigned: false}, Raw: "integer", Null: false}, Default: &schema.RawExpr{ X: "nextval('\"atlas_types_sanity_tSerial4_seq\"'::regclass)", }, }, { Name: "tSerial8", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint", Null: false}, Default: &schema.RawExpr{ X: "nextval('\"atlas_types_sanity_tSerial8_seq\"'::regclass)", }, }, { Name: "tArray", Type: &schema.ColumnType{Type: &postgres.ArrayType{Type: &schema.StringType{T: "text"}, T: "text[]"}, Raw: "ARRAY", Null: true}, Default: &schema.Literal{ V: "'{}'", }, }, { Name: "tXML", Type: &schema.ColumnType{Type: &postgres.XMLType{T: "xml"}, Raw: "xml", Null: true}, Default: &schema.Literal{ V: "'foo'", }, }, { Name: "tJSON", Type: &schema.ColumnType{Type: &schema.JSONType{T: "json"}, Raw: "json", Null: true}, Default: &schema.Literal{ V: "'{\"key\":\"value\"}'", }, }, { Name: "tJSONB", Type: &schema.ColumnType{Type: &schema.JSONType{T: "jsonb"}, Raw: "jsonb", Null: true}, Default: &schema.Literal{ V: "'{\"key\": \"value\"}'", }, }, { Name: "tUUID", Type: &schema.ColumnType{Type: &postgres.UUIDType{T: "uuid"}, Raw: "uuid", Null: true}, Default: &schema.Literal{ V: "'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'", }, }, { Name: "tMoney", Type: &schema.ColumnType{Type: &postgres.CurrencyType{T: "money"}, Raw: "money", Null: true}, Default: &schema.Literal{ V: "18", }, }, { Name: "tInterval", Type: &schema.ColumnType{Type: &postgres.IntervalType{T: "interval", Precision: intp(6)}, Raw: "interval", Null: true}, Default: &schema.RawExpr{ X: "'04:00:00'::interval", }, }, { Name: "tUserDefined", Type: &schema.ColumnType{Type: &postgres.UserDefinedType{T: "address"}, Raw: "USER-DEFINED", Null: true}, Default: &schema.RawExpr{ X: "'(ab,cd)'::address", }, }, }, } require.EqualValues(t, &expected, ts) }) t.Run("ImplicitIndexes", func(t *testing.T) { pgRun(t, func(t *pgTest) { testImplicitIndexes(t, t.db) }) }) } func (t *pgTest) url(schema string) string { var ( format = "postgres://postgres:pass@localhost:%d/test?sslmode=disable" args = []any{t.port} ) if schema != "" { format += "&search_path=%s" args = append(args, schema) } return fmt.Sprintf(format, args...) } func (t *pgTest) driver() migrate.Driver { return t.drv } func (t *pgTest) revisionsStorage() migrate.RevisionReadWriter { return t.rrw } func (t *pgTest) applyHcl(spec string) { realm := t.loadRealm() var desired schema.Schema err := postgres.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) existing := realm.Schemas[0] diff, err := t.drv.SchemaDiff(existing, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } func (t *pgTest) valueByVersion(values map[string]string, defaults string) string { if v, ok := values[t.version]; ok { return v } return defaults } func (t *pgTest) loadRealm() *schema.Realm { r, err := t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{ Schemas: []string{"public"}, }) require.NoError(t, err) return r } func (t *pgTest) loadUsers() *schema.Table { return t.loadTable("users") } func (t *pgTest) loadPosts() *schema.Table { return t.loadTable("posts") } func (t *pgTest) loadTable(name string) *schema.Table { realm := t.loadRealm() require.Len(t, realm.Schemas, 1) table, ok := realm.Schemas[0].Table(name) require.True(t, ok) return table } func (t *pgTest) users() *schema.Table { usersT := &schema.Table{ Name: "users", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&postgres.Identity{}}, }, { Name: "x", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, }, }, } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} return usersT } func (t *pgTest) posts() *schema.Table { usersT := t.users() postsT := &schema.Table{ Name: "posts", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&postgres.Identity{}}, }, { Name: "author_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.Literal{V: "10"}, }, { Name: "ctime", Type: &schema.ColumnType{Raw: "timestamp", Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP", }, }, }, Attrs: []schema.Attr{ &schema.Comment{Text: "posts comment"}, }, } postsT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: postsT.Columns[0]}}} postsT.Indexes = []*schema.Index{ {Name: "author_id", Parts: []*schema.IndexPart{{C: postsT.Columns[1]}}}, {Name: "id_author_id_unique", Unique: true, Parts: []*schema.IndexPart{{C: postsT.Columns[1]}, {C: postsT.Columns[0]}}}, } postsT.ForeignKeys = []*schema.ForeignKey{ {Symbol: "author_id", Table: postsT, Columns: postsT.Columns[1:2], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction}, } return postsT } func (t *pgTest) realm() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "public", }, }, Attrs: []schema.Attr{ &schema.Collation{V: "en_US.utf8"}, &postgres.CType{V: "en_US.utf8"}, }, } r.Schemas[0].Realm = r return r } func (t *pgTest) diff(t1, t2 *schema.Table) []schema.Change { changes, err := t.drv.TableDiff(t1, t2) require.NoError(t, err) return changes } func (t *pgTest) migrate(changes ...schema.Change) { err := t.drv.ApplyChanges(context.Background(), changes) require.NoError(t, err) } func (t *pgTest) dropTables(names ...string) { t.Cleanup(func() { _, err := t.db.Exec("DROP TABLE IF EXISTS " + strings.Join(names, ", ")) require.NoError(t.T, err, "drop tables %q", names) }) } func (t *pgTest) dropSchemas(names ...string) { t.Cleanup(func() { _, err := t.db.Exec("DROP SCHEMA IF EXISTS " + strings.Join(names, ", ") + " CASCADE") require.NoError(t.T, err, "drop schema %q", names) }) } atlas-0.7.2/internal/integration/script_test.go000066400000000000000000000522711431455511600216710ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "bytes" "context" "database/sql" "fmt" "os" "os/exec" "path/filepath" "regexp" "strconv" "strings" "testing" "unicode" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/mysql" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlite" "github.com/pkg/diff" "github.com/rogpeppe/go-internal/testscript" "github.com/stretchr/testify/require" ) func TestMySQL_Script(t *testing.T) { myRun(t, func(t *myTest) { testscript.Run(t.T, testscript.Params{ Dir: "testdata/mysql", Setup: t.setupScript, Cmds: map[string]func(ts *testscript.TestScript, neg bool, args []string){ "only": cmdOnly, "apply": t.cmdApply, "exist": t.cmdExist, "synced": t.cmdSynced, "cmphcl": t.cmdCmpHCL, "cmpshow": t.cmdCmpShow, "cmpmig": t.cmdCmpMig, "execsql": t.cmdExec, "atlas": t.cmdCLI, "clearSchema": t.clearSchema, }, }) }) } func TestPostgres_Script(t *testing.T) { pgRun(t, func(t *pgTest) { testscript.Run(t.T, testscript.Params{ Dir: "testdata/postgres", Setup: t.setupScript, Cmds: map[string]func(ts *testscript.TestScript, neg bool, args []string){ "only": cmdOnly, "apply": t.cmdApply, "exist": t.cmdExist, "synced": t.cmdSynced, "cmphcl": t.cmdCmpHCL, "cmpshow": t.cmdCmpShow, "cmpmig": t.cmdCmpMig, "execsql": t.cmdExec, "atlas": t.cmdCLI, "clearSchema": t.clearSchema, }, }) }) } func TestSQLite_Script(t *testing.T) { tt := &liteTest{T: t} testscript.Run(t, testscript.Params{ Dir: "testdata/sqlite", Setup: tt.setupScript, Cmds: map[string]func(ts *testscript.TestScript, neg bool, args []string){ "apply": tt.cmdApply, "exist": tt.cmdExist, "synced": tt.cmdSynced, "cmpshow": tt.cmdCmpShow, "cmpmig": tt.cmdCmpMig, "execsql": tt.cmdExec, "atlas": tt.cmdCLI, "clearSchema": tt.clearSchema, }, }) } var keyT struct{} func (t *myTest) setupScript(env *testscript.Env) error { attrs := t.defaultAttrs() env.Setenv("version", t.version) env.Setenv("charset", attrs[0].(*schema.Charset).V) env.Setenv("collate", attrs[1].(*schema.Collation).V) if err := replaceDBURL(env, t.url("")); err != nil { return err } return setupScript(t.T, env, t.db, "DROP SCHEMA IF EXISTS %s") } func replaceDBURL(env *testscript.Env, url string) error { // Set the workdir in the test atlas.hcl file. projectFile := filepath.Join(env.WorkDir, "atlas.hcl") if b, err := os.ReadFile(projectFile); err == nil { rep := strings.ReplaceAll(string(b), "URL", url) return os.WriteFile(projectFile, []byte(rep), 0600) } return nil } func (t *pgTest) setupScript(env *testscript.Env) error { env.Setenv("version", t.version) u := strings.ReplaceAll(t.url(""), "/test", "/") if err := replaceDBURL(env, u); err != nil { return err } return setupScript(t.T, env, t.db, "DROP SCHEMA IF EXISTS %s CASCADE") } func setupScript(t *testing.T, env *testscript.Env, db *sql.DB, dropCmd string) error { ctx := context.Background() conn, err := db.Conn(ctx) if err != nil { return err } name := strings.ReplaceAll(filepath.Base(env.WorkDir), "-", "_") env.Setenv("db", name) if _, err := conn.ExecContext(ctx, fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s", name)); err != nil { return err } env.Defer(func() { if _, err := conn.ExecContext(ctx, fmt.Sprintf(dropCmd, name)); err != nil { t.Fatal(err) } if err := conn.Close(); err != nil { t.Fatal(err) } }) // Store the testscript.T for later use. // See "only" function below. env.Values[keyT] = env.T() if err := setupCLITest(t, env); err != nil { return err } return nil } var ( keyDB *sql.DB keyDrv *sqlite.Driver ) const atlasPathKey = "cli.atlas" func (t *liteTest) setupScript(env *testscript.Env) error { db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&_fk=1", filepath.Join(env.WorkDir, "atlas.sqlite"))) require.NoError(t, err) env.Defer(func() { require.NoError(t, db.Close()) }) drv, err := sqlite.Open(db) require.NoError(t, err) env.Setenv("db", "main") // Attach connection and driver to the // environment as tests run in parallel. env.Values[keyDB] = db env.Values[keyDrv] = drv if err := setupCLITest(t.T, env); err != nil { return err } // Set the workdir in the test atlas.hcl file. projectFile := filepath.Join(env.WorkDir, "atlas.hcl") if b, err := os.ReadFile(projectFile); err == nil { rep := strings.ReplaceAll(string(b), "URL", fmt.Sprintf("sqlite://file:%s/atlas.sqlite?cache=shared&_fk=1", env.WorkDir)) return os.WriteFile(projectFile, []byte(rep), 0600) } return nil } func setupCLITest(t *testing.T, env *testscript.Env) error { path, err := buildCmd(t) if err != nil { return err } env.Setenv(atlasPathKey, path) return nil } // cmdOnly executes only tests that their driver version matches the given pattern. // For example, "only 8" or "only 8 maria*" func cmdOnly(ts *testscript.TestScript, neg bool, args []string) { ver := ts.Getenv("version") for i := range args { re, rerr := regexp.Compile(`(?mi)` + args[i]) ts.Check(rerr) if !neg == re.MatchString(ver) { return } } // This is not an elegant way to get the created testing.T for the script, // but we need some workaround to get it in order to skip specific tests. ts.Value(keyT).(testscript.T).Skip("skip version", ver) } func (t *myTest) cmdCmpShow(ts *testscript.TestScript, _ bool, args []string) { cmdCmpShow(ts, args, func(schema, name string) (string, error) { var create string if err := t.db.QueryRow(fmt.Sprintf("SHOW CREATE TABLE `%s`.`%s`", schema, name)).Scan(&name, &create); err != nil { return "", err } i := strings.LastIndexByte(create, ')') create, opts := create[:i+1], strings.Fields(create[i+1:]) for _, opt := range opts { switch strings.Split(opt, "=")[0] { // Keep only options that are relevant for the tests. case "AUTO_INCREMENT", "COMMENT": create += " " + opt } } return create, nil }) } func (t *pgTest) cmdCmpShow(ts *testscript.TestScript, _ bool, args []string) { cmdCmpShow(ts, args, func(schema, name string) (string, error) { buf, err := exec.Command("docker", "ps", "-qa", "-f", fmt.Sprintf("publish=%d", t.port)).CombinedOutput() if err != nil { return "", fmt.Errorf("get container id %q: %v", buf, err) } buf = bytes.TrimSpace(buf) if len(bytes.Split(buf, []byte("\n"))) > 1 { return "", fmt.Errorf("multiple container ids found: %q", buf) } cmd := exec.Command("docker", "exec", string(buf), "psql", "-U", "postgres", "-d", "test", "-c", fmt.Sprintf(`\d %s.%s`, schema, name)) // Use "cmd.String" to debug command. buf, err = cmd.CombinedOutput() if err != nil { return "", err } lines := strings.Split(string(buf), "\n") for i := range lines { lines[i] = strings.TrimRightFunc(lines[i], unicode.IsSpace) } return strings.Join(lines, "\n"), err }) } func (t *liteTest) cmdCmpShow(ts *testscript.TestScript, _ bool, args []string) { cmdCmpShow(ts, args, func(_, name string) (string, error) { var ( stmts []string db = ts.Value(keyDB).(*sql.DB) ) rows, err := db.Query("SELECT sql FROM sqlite_schema where tbl_name = ?", name) if err != nil { return "", fmt.Errorf("querying schema") } defer rows.Close() for rows.Next() { var s string if err := rows.Scan(&s); err != nil { return "", err } stmts = append(stmts, s) } return strings.Join(stmts, "\n"), nil }) } func cmdCmpShow(ts *testscript.TestScript, args []string, show func(schema, name string) (string, error)) { if len(args) < 2 { ts.Fatalf("invalid number of args to 'cmpshow': %d", len(args)) } var ( ver = ts.Getenv("version") fname = args[len(args)-1] stmts = make([]string, 0, len(args)-1) ) for _, name := range args[:len(args)-1] { create, err := show(ts.Getenv("db"), name) if err != nil { ts.Fatalf("show table %q: %v", name, err) } stmts = append(stmts, create) } // Check if there is a file prefixed by database version (1.sql and /1.sql). if _, err := os.Stat(ts.MkAbs(filepath.Join(ver, fname))); err == nil { fname = filepath.Join(ver, fname) } t1, t2 := strings.Join(stmts, "\n"), ts.ReadFile(fname) if strings.TrimSpace(t1) == strings.TrimSpace(t2) { return } var sb strings.Builder ts.Check(diff.Text("show", fname, t1, t2, &sb)) ts.Fatalf(sb.String()) } func (t *myTest) cmdCmpHCL(ts *testscript.TestScript, _ bool, args []string) { r := strings.NewReplacer("$charset", ts.Getenv("charset"), "$collate", ts.Getenv("collate"), "$db", ts.Getenv("db")) cmdCmpHCL(ts, args, func(name string) (string, error) { s, err := t.drv.InspectSchema(context.Background(), name, nil) ts.Check(err) buf, err := mysql.MarshalHCL(s) require.NoError(t, err) return string(buf), nil }, func(s string) string { return r.Replace(ts.ReadFile(s)) }) } func (t *pgTest) cmdCmpHCL(ts *testscript.TestScript, _ bool, args []string) { cmdCmpHCL(ts, args, func(name string) (string, error) { s, err := t.drv.InspectSchema(context.Background(), name, nil) ts.Check(err) buf, err := postgres.MarshalHCL(s) require.NoError(t, err) return string(buf), nil }, func(s string) string { return strings.ReplaceAll(ts.ReadFile(s), "$db", ts.Getenv("db")) }) } func cmdCmpHCL(ts *testscript.TestScript, args []string, inspect func(schema string) (string, error), read ...func(string) string) { if len(args) != 1 { ts.Fatalf("invalid number of args to 'cmpinspect': %d", len(args)) } if len(read) == 0 { read = append(read, ts.ReadFile) } var ( fname = args[0] ver = ts.Getenv("version") ) f1, err := inspect(ts.Getenv("db")) if err != nil { ts.Fatalf("inspect schema %q: %v", ts.Getenv("db"), err) } // Check if there is a file prefixed by database version (1.sql and /1.sql). if _, err := os.Stat(ts.MkAbs(filepath.Join(ver, fname))); err == nil { fname = filepath.Join(ver, fname) } f2 := read[0](fname) if strings.TrimSpace(f1) == strings.TrimSpace(f2) { return } var sb strings.Builder ts.Check(diff.Text("inspect", fname, f1, f2, &sb)) ts.Fatalf(sb.String()) } func (t *myTest) cmdExec(ts *testscript.TestScript, _ bool, args []string) { cmdExec(ts, args, t.db) } func (t *pgTest) cmdExec(ts *testscript.TestScript, _ bool, args []string) { cmdExec(ts, args, t.db) } func (t *liteTest) cmdExec(ts *testscript.TestScript, _ bool, args []string) { cmdExec(ts, args, ts.Value(keyDB).(*sql.DB)) } func (t *myTest) cmdCLI(ts *testscript.TestScript, neg bool, args []string) { cmdCLI(ts, neg, args, t.url(ts.Getenv("db")), ts.Getenv(atlasPathKey)) } func (t *pgTest) cmdCLI(ts *testscript.TestScript, neg bool, args []string) { cmdCLI(ts, neg, args, t.url(ts.Getenv("db")), ts.Getenv(atlasPathKey)) } func (t *liteTest) cmdCLI(ts *testscript.TestScript, neg bool, args []string) { dbURL := fmt.Sprintf("sqlite://file:%s/atlas.sqlite?cache=shared&_fk=1", ts.Getenv("WORK")) cmdCLI(ts, neg, args, dbURL, ts.Getenv(atlasPathKey)) } func cmdCLI(ts *testscript.TestScript, neg bool, args []string, dbURL, cliPath string) { var ( workDir = ts.Getenv("WORK") r = strings.NewReplacer("URL", dbURL, "$db", ts.Getenv("db")) ) for i, arg := range args { args[i] = r.Replace(arg) } switch l := len(args); { // If command was run with a unix redirect-like suffix. case l > 1 && args[l-2] == ">": outPath := filepath.Join(workDir, args[l-1]) f, err := os.Create(outPath) ts.Check(err) defer f.Close() cmd := exec.Command(cliPath, args[0:l-2]...) cmd.Stdout = f stderr := &bytes.Buffer{} cmd.Stderr = stderr cmd.Dir = workDir if err := cmd.Run(); err != nil && !neg { ts.Fatalf("\n[stderr]\n%s", stderr) } default: err := ts.Exec(cliPath, args...) if !neg { ts.Check(err) } if neg && err == nil { ts.Fatalf("expected fail") } } } func (t *myTest) cmdCmpMig(ts *testscript.TestScript, neg bool, args []string) { cmdCmpMig(ts, neg, args) } func (t *pgTest) cmdCmpMig(ts *testscript.TestScript, neg bool, args []string) { cmdCmpMig(ts, neg, args) } func (t *liteTest) cmdCmpMig(ts *testscript.TestScript, neg bool, args []string) { cmdCmpMig(ts, neg, args) } var reLiquibaseChangeset = regexp.MustCompile("--changeset atlas:[0-9]+-[0-9]+") // cmdCmpMig compares a migration file under migrations with a provided file. // If the first argument is a filename that does exist, that file is used for comparison. // If there is no file with that name, the argument is parsed to an integer n and the // nth sql file is used for comparison. Lexicographic order of // the files in the directory is used to access the file of interest. func cmdCmpMig(ts *testscript.TestScript, _ bool, args []string) { if len(args) < 2 { ts.Fatalf("invalid number of args to 'cmpmig': %d", len(args)) } // Check if there is a file prefixed by database version (1.sql and /1.sql). var ( ver = ts.Getenv("version") fname = args[1] ) if _, err := os.Stat(ts.MkAbs(filepath.Join(ver, fname))); err == nil { fname = filepath.Join(ver, fname) } expected := strings.TrimSpace(ts.ReadFile(fname)) dir, err := os.ReadDir(ts.MkAbs("migrations")) ts.Check(err) idx, err := strconv.Atoi(args[0]) ts.Check(err) current := 0 for _, f := range dir { if f.IsDir() || !strings.HasSuffix(f.Name(), ".sql") { continue } if current == idx { actual := strings.TrimSpace(ts.ReadFile(filepath.Join("migrations", f.Name()))) exLines, acLines := strings.Split(actual, "\n"), strings.Split(expected, "\n") if len(exLines) != len(acLines) { var sb strings.Builder ts.Check(diff.Text(f.Name(), args[1], expected, actual, &sb)) ts.Fatalf(sb.String()) } for i := range exLines { // Skip liquibase changeset comments since they contain a timestamp. if reLiquibaseChangeset.MatchString(acLines[i]) { continue } if exLines[i] != acLines[i] { var sb strings.Builder ts.Check(diff.Text(f.Name(), args[1], expected, actual, &sb)) ts.Fatalf(sb.String()) } } return } current++ } ts.Fatalf("could not find the #%d migration", idx) } func cmdExec(ts *testscript.TestScript, args []string, db *sql.DB) { if len(args) == 0 { ts.Fatalf("missing statements for 'execsql'") } for i := range args { s := strings.ReplaceAll(args[i], "$db", ts.Getenv("db")) _, err := db.Exec(s) ts.Check(err) } } func (t *myTest) cmdExist(ts *testscript.TestScript, neg bool, args []string) { cmdExist(ts, neg, args, func(schema, name string) (bool, error) { var b bool if err := t.db.QueryRow("SELECT COUNT(*) FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?", schema, name).Scan(&b); err != nil { return false, err } return b, nil }) } func (t *pgTest) cmdExist(ts *testscript.TestScript, neg bool, args []string) { cmdExist(ts, neg, args, func(schema, name string) (bool, error) { var b bool if err := t.db.QueryRow("SELECT COUNT(*) FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = $1 AND TABLE_NAME = $2", schema, name).Scan(&b); err != nil { return false, err } return b, nil }) } func (t *liteTest) cmdExist(ts *testscript.TestScript, neg bool, args []string) { cmdExist(ts, neg, args, func(_, name string) (bool, error) { var ( b bool db = ts.Value(keyDB).(*sql.DB) ) if err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE `type`='table' AND `name` = ?", name).Scan(&b); err != nil { return false, err } return b, nil }) } func cmdExist(ts *testscript.TestScript, neg bool, args []string, exists func(schema, name string) (bool, error)) { for _, name := range args { b, err := exists(ts.Getenv("db"), name) if err != nil { ts.Fatalf("failed query table existence %q: %v", name, err) } if !b != neg { ts.Fatalf("table %q existence failed", name) } } } func (t *myTest) cmdSynced(ts *testscript.TestScript, neg bool, args []string) { cmdSynced(ts, neg, args, t.hclDiff) } func (t *myTest) cmdApply(ts *testscript.TestScript, neg bool, args []string) { cmdApply(ts, neg, args, t.drv.ApplyChanges, t.hclDiff) } func (t *myTest) hclDiff(ts *testscript.TestScript, name string) ([]schema.Change, error) { var ( desired = &schema.Schema{} f = ts.ReadFile(name) ctx = context.Background() r = strings.NewReplacer("$charset", ts.Getenv("charset"), "$collate", ts.Getenv("collate"), "$db", ts.Getenv("db")) ) ts.Check(mysql.EvalHCLBytes([]byte(r.Replace(f)), desired, nil)) current, err := t.drv.InspectSchema(ctx, desired.Name, nil) ts.Check(err) desired, err = t.drv.(schema.Normalizer).NormalizeSchema(ctx, desired) // Normalization and diffing errors should // be returned to the caller. if err != nil { return nil, err } changes, err := t.drv.SchemaDiff(current, desired) if err != nil { return nil, err } return changes, nil } func (t *pgTest) cmdSynced(ts *testscript.TestScript, neg bool, args []string) { cmdSynced(ts, neg, args, t.hclDiff) } func (t *pgTest) cmdApply(ts *testscript.TestScript, neg bool, args []string) { cmdApply(ts, neg, args, t.drv.ApplyChanges, t.hclDiff) } func (t *pgTest) hclDiff(ts *testscript.TestScript, name string) ([]schema.Change, error) { var ( desired = &schema.Schema{} ctx = context.Background() f = strings.ReplaceAll(ts.ReadFile(name), "$db", ts.Getenv("db")) ) ts.Check(postgres.EvalHCLBytes([]byte(f), desired, nil)) current, err := t.drv.InspectSchema(ctx, desired.Name, nil) ts.Check(err) desired, err = t.drv.(schema.Normalizer).NormalizeSchema(ctx, desired) // Normalization and diffing errors should // be returned to the caller. if err != nil { return nil, err } changes, err := t.drv.SchemaDiff(current, desired) if err != nil { return nil, err } return changes, nil } func (t *liteTest) cmdSynced(ts *testscript.TestScript, neg bool, args []string) { cmdSynced(ts, neg, args, t.hclDiff) } func (t *liteTest) cmdApply(ts *testscript.TestScript, neg bool, args []string) { cmdApply(ts, neg, args, ts.Value(keyDrv).(*sqlite.Driver).ApplyChanges, t.hclDiff) } func (t *liteTest) hclDiff(ts *testscript.TestScript, name string) ([]schema.Change, error) { var ( desired = &schema.Schema{} f = ts.ReadFile(name) drv = ts.Value(keyDrv).(*sqlite.Driver) ) ts.Check(sqlite.EvalHCLBytes([]byte(f), desired, nil)) current, err := drv.InspectSchema(context.Background(), desired.Name, nil) ts.Check(err) changes, err := drv.SchemaDiff(current, desired) // Diff errors should return to the caller. if err != nil { return nil, err } return changes, nil } func (t *myTest) clearSchema(ts *testscript.TestScript, _ bool, args []string) { if len(args) == 0 { args = append(args, ts.Getenv("db")) } _, err := t.db.Exec("DROP DATABASE IF EXISTS " + args[0]) ts.Check(err) _, err = t.db.Exec("CREATE DATABASE IF NOT EXISTS " + args[0]) ts.Check(err) } func (t *pgTest) clearSchema(ts *testscript.TestScript, _ bool, args []string) { if len(args) == 0 { args = append(args, ts.Getenv("db")) } _, err := t.db.Exec(fmt.Sprintf("DROP SCHEMA IF EXISTS %s CASCADE", args[0])) ts.Check(err) _, err = t.db.Exec("CREATE SCHEMA IF NOT EXISTS " + args[0]) ts.Check(err) } func (t *liteTest) clearSchema(ts *testscript.TestScript, _ bool, _ []string) { for _, stmt := range []string{ "PRAGMA writable_schema = 1;", "DELETE FROM sqlite_master WHERE type IN ('table', 'index', 'trigger');", "PRAGMA writable_schema = 0;", "VACUUM;", } { _, err := ts.Value(keyDB).(*sql.DB).Exec(stmt) ts.Check(err) } } func cmdSynced(ts *testscript.TestScript, neg bool, args []string, diff func(*testscript.TestScript, string) ([]schema.Change, error)) { if len(args) != 1 { ts.Fatalf("unexpected number of args to synced command: %d", len(args)) } switch changes, err := diff(ts, args[0]); { case err != nil: ts.Fatalf("unexpected diff failure on synced: %v", err) case len(changes) > 0 && !neg: ts.Fatalf("expect no schema changes, but got: %d", len(changes)) case len(changes) == 0 && neg: ts.Fatalf("expect schema changes, but there are none") } } func cmdApply(ts *testscript.TestScript, neg bool, args []string, apply func(context.Context, []schema.Change, ...migrate.PlanOption) error, diff func(*testscript.TestScript, string) ([]schema.Change, error)) { changes, err := diff(ts, args[0]) switch { case err != nil && !neg: ts.Fatalf("diff states: %v", err) // If we expect to fail, and there's a specific error to compare. case err != nil && len(args) == 2: matchErr(ts, err, args[1]) return } switch err := apply(context.Background(), changes); { case err != nil && !neg: ts.Fatalf("apply changes: %v", err) case err == nil && neg: ts.Fatalf("unexpected apply success") // If we expect to fail, and there's a specific error to compare. case err != nil && len(args) == 2: matchErr(ts, err, args[1]) // Apply passed. Make sure there is no drift. case !neg: changes, err := diff(ts, args[0]) ts.Check(err) if len(changes) > 0 { ts.Fatalf("unexpected schema changes: %d", len(changes)) } } } func matchErr(ts *testscript.TestScript, err error, p string) { re, rerr := regexp.Compile(`(?m)` + regexp.QuoteMeta(p)) ts.Check(rerr) if !re.MatchString(err.Error()) { ts.Fatalf("mismatched errors: %v != %s", err, p) } } atlas-0.7.2/internal/integration/sqlite_test.go000066400000000000000000000726541431455511600216750ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "context" "database/sql" "database/sql/driver" "fmt" "os" "path" "path/filepath" "strings" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlite" "entgo.io/ent/dialect" _ "github.com/mattn/go-sqlite3" "github.com/stretchr/testify/require" ) type liteTest struct { *testing.T db *sql.DB drv migrate.Driver rrw migrate.RevisionReadWriter file string } func liteRun(t *testing.T, fn func(test *liteTest)) { t.Parallel() f := path.Join(t.TempDir(), strings.ReplaceAll(t.Name(), "/", "_")) db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&_fk=1", f)) require.NoError(t, err) t.Cleanup(func() { require.NoError(t, db.Close()) }) drv, err := sqlite.Open(db) require.NoError(t, err) tt := &liteTest{T: t, db: db, drv: drv, file: f, rrw: &rrw{}} fn(tt) } func TestSQLite_Executor(t *testing.T) { liteRun(t, func(t *liteTest) { testExecutor(t) }) } func TestSQLite_AddDropTable(t *testing.T) { liteRun(t, func(t *liteTest) { testAddDrop(t) }) } func TestSQLite_Relation(t *testing.T) { liteRun(t, func(t *liteTest) { testRelation(t) }) } func TestSQLite_Ent(t *testing.T) { liteRun(t, func(t *liteTest) { testEntIntegration(t, dialect.SQLite, t.db) }) } func TestSQLite_ColumnCheck(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Attrs: []schema.Attr{schema.NewCheck().SetName("users_c_check").SetExpr("c > 5")}, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, }, } t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) }) } func TestSQLite_AddIndexedColumns(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) // Insert 2 records to the users table, and make sure they are there // after executing migration. _, err := t.db.Exec("INSERT INTO users (id) VALUES (1), (2)") require.NoError(t, err) usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.Literal{V: "10"}, }, &schema.Column{ Name: "b", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.Literal{V: "20"}, }, &schema.Column{ Name: "c", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.Literal{V: "30"}, }) usersT.Indexes = append(usersT.Indexes, &schema.Index{ Unique: true, Name: "id_a_b_c_unique", Parts: []*schema.IndexPart{{C: usersT.Columns[0]}, {C: usersT.Columns[1]}, {C: usersT.Columns[2]}, {C: usersT.Columns[3]}}, }) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 4, "usersT contains 3 new columns and 1 new index") t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Scan records from the table to ensure correctness of // the rows transferring. rows, err := t.db.Query("SELECT * FROM users") require.NoError(t, err) require.True(t, rows.Next()) var v [4]int require.NoError(t, rows.Scan(&v[0], &v[1], &v[2], &v[3])) require.Equal(t, [4]int{1, 10, 20, 30}, v) require.True(t, rows.Next()) require.NoError(t, rows.Scan(&v[0], &v[1], &v[2], &v[3])) require.Equal(t, [4]int{2, 10, 20, 30}, v) require.False(t, rows.Next()) require.NoError(t, rows.Close()) // Dropping a column from both table and index. usersT = t.loadUsers() idx, ok := usersT.Index("id_a_b_c_unique") require.True(t, ok) require.Len(t, idx.Parts, 4) usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] idx.Parts = idx.Parts[:len(idx.Parts)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) // Scan records from the table to ensure correctness of // the rows transferring. rows, err = t.db.Query("SELECT * FROM users") require.NoError(t, err) require.True(t, rows.Next()) var u [3]int require.NoError(t, rows.Scan(&u[0], &u[1], &u[2])) require.Equal(t, [3]int{1, 10, 20}, u) require.True(t, rows.Next()) require.NoError(t, rows.Scan(&u[0], &u[1], &u[2])) require.Equal(t, [3]int{2, 10, 20}, u) require.False(t, rows.Next()) require.NoError(t, rows.Close()) }) } func TestSQLite_AutoIncrement(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{sqlite.AutoIncrement{}}}, }, } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) _, err := t.db.Exec("INSERT INTO users DEFAULT VALUES") require.NoError(t, err) var id int err = t.db.QueryRow("SELECT id FROM users").Scan(&id) require.NoError(t, err) require.Equal(t, 1, id) }) } func TestSQLite_AutoIncrementSequence(t *testing.T) { // This test shows a bug detected in Ent when working with pre-defined auto-increment start values. // If there is a change somewhere to create an auto-increment with a start value, Atlas must make sure to create // an entry in the 'sqlite_sequence' table (and also ensure the table exists before attempting to create the entry). liteRun(t, func(t *liteTest) { t1 := &schema.Table{ Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&sqlite.AutoIncrement{Seq: 10}}, }, }, Attrs: []schema.Attr{&sqlite.AutoIncrement{}}, } t1.PrimaryKey = &schema.Index{Table: t1, Parts: []*schema.IndexPart{{C: t1.Columns[0]}}} t1.Columns[0].Indexes = append(t1.Columns[0].Indexes, t1.PrimaryKey) // Planning the changes should not result in an error. _ = plan(t, "col_seq", &schema.AddTable{T: t1}) }) } func TestSQLite_AddColumns(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{sqlite.AutoIncrement{}}}, }, } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) _, err := t.db.Exec("INSERT INTO users (id) VALUES (1), (2)") require.NoError(t, err) usersT.Columns = append( usersT.Columns, &schema.Column{Name: "null_int", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}}, &schema.Column{Name: "notnull_int", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Default: &schema.Literal{V: "1"}}, &schema.Column{Name: "null_real", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real"}, Null: true}}, &schema.Column{Name: "notnull_real", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real"}}, Default: &schema.Literal{V: "1.0"}}, &schema.Column{Name: "null_text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, &schema.Column{Name: "notnull_text1", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Default: &schema.RawExpr{X: "hello"}}, &schema.Column{Name: "notnull_text2", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Default: &schema.Literal{V: "'hello'"}}, &schema.Column{Name: "null_blob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "blob"}, Null: true}}, &schema.Column{Name: "notnull_blob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "blob"}}, Default: &schema.Literal{V: "'blob'"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 9) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Scan records from the table to ensure correctness of // the rows transferring. rows, err := t.db.Query("SELECT id, notnull_int FROM users") require.NoError(t, err) require.True(t, rows.Next()) var v [2]int require.NoError(t, rows.Scan(&v[0], &v[1])) require.Equal(t, [2]int{1, 1}, v) require.True(t, rows.Next()) require.NoError(t, rows.Scan(&v[0], &v[1])) require.Equal(t, [2]int{2, 1}, v) require.False(t, rows.Next()) require.NoError(t, rows.Close()) }) } func TestSQLite_ColumnInt(t *testing.T) { t.Run("ChangeTypeNull", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) usersT.Columns[0].Type.Null = true usersT.Columns[0].Type.Type = &schema.FloatType{T: "real"} changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) require.Equal(t, schema.ChangeNull|schema.ChangeType, changes[0].(*schema.ModifyColumn).Change) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) t.Run("ChangeDefault", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Default: &schema.Literal{V: "1"}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) for _, x := range []string{"2", "'3'", "10.1"} { usersT.Columns[0].Default.(*schema.Literal).V = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) _, err := t.db.Exec("INSERT INTO users DEFAULT VALUES") require.NoError(t, err) } rows, err := t.db.Query("SELECT a FROM users") require.NoError(t, err) for _, e := range []driver.Value{2, 3, 10.1} { var v driver.Value require.True(t, rows.Next()) require.NoError(t, rows.Scan(&v)) require.EqualValues(t, e, v) } require.False(t, rows.Next()) require.NoError(t, rows.Close()) }) }) } func TestSQLite_ForeignKey(t *testing.T) { t.Run("ChangeAction", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() // The "author_id" constraint. SQLite does not support // getting the foreign-key constraint names at the moment. fk := postsT.ForeignKeys[0] fk.OnUpdate = schema.SetNull fk.OnDelete = schema.Cascade changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 1) modifyF, ok := changes[0].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("UnsetNull", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) fk := postsT.ForeignKeys[0] fk.OnDelete = schema.SetNull fk.OnUpdate = schema.SetNull t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() c, ok := postsT.Column("author_id") require.True(t, ok) c.Type.Null = false fk = postsT.ForeignKeys[0] fk.OnUpdate = schema.NoAction fk.OnDelete = schema.NoAction changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 2) modifyC, ok := changes[0].(*schema.ModifyColumn) require.True(t, ok) require.True(t, modifyC.Change == schema.ChangeNull) modifyF, ok := changes[1].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("AddDrop", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) // Add foreign key. usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "spouse_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, }) usersT.ForeignKeys = append(usersT.ForeignKeys, &schema.ForeignKey{ Symbol: "spouse_id", Table: usersT, Columns: usersT.Columns[len(usersT.Columns)-1:], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction, }) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) addC, ok := changes[0].(*schema.AddColumn) require.True(t, ok) require.Equal(t, "spouse_id", addC.C.Name) addF, ok := changes[1].(*schema.AddForeignKey) require.True(t, ok) require.Equal(t, "spouse_id", addF.F.Symbol) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Drop foreign keys. usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] usersT.ForeignKeys = usersT.ForeignKeys[:len(usersT.ForeignKeys)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) } func TestSQLite_HCL(t *testing.T) { full := ` schema "main" { } table "users" { schema = schema.main column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.main column "id" { type = int } column "author_id" { type = int } foreign_key "author" { columns = [ table.posts.column.author_id, ] ref_columns = [ table.users.column.id, ] } primary_key { columns = [table.users.column.id] } } ` empty := ` schema "main" { } ` liteRun(t, func(t *liteTest) { testHCLIntegration(t, full, empty) }) } func TestSQLite_DefaultsHCL(t *testing.T) { n := "atlas_defaults" liteRun(t, func(t *liteTest) { ddl := ` create table atlas_defaults ( string varchar(255) default "hello_world", quoted varchar(100) default 'never say "never"', d date default current_timestamp, n integer default 0x100 ) ` t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() spec, err := sqlite.MarshalHCL(realm.Schemas[0]) require.NoError(t, err) var s schema.Schema err = sqlite.EvalHCLBytes(spec, &s, nil) require.NoError(t, err) t.dropTables(n) t.applyHcl(string(spec)) ensureNoChange(t, realm.Schemas[0].Tables[0]) }) } func TestSQLite_CLI(t *testing.T) { h := ` schema "main" { } table "users" { schema = schema.main column "id" { type = int } }` t.Run("InspectFromEnv", func(t *testing.T) { liteRun(t, func(t *liteTest) { env := fmt.Sprintf(` env "hello" { url = "%s" src = "./schema.hcl" } `, t.url("")) wd, _ := os.Getwd() envfile := filepath.Join(wd, "atlas.hcl") err := os.WriteFile(envfile, []byte(env), 0600) t.Cleanup(func() { os.Remove(envfile) }) require.NoError(t, err) testCLISchemaInspectEnv(t, h, "hello", sqlite.EvalHCL) }) }) t.Run("SchemaInspect", func(t *testing.T) { liteRun(t, func(t *liteTest) { testCLISchemaInspect(t, h, t.url(""), sqlite.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { liteRun(t, func(t *liteTest) { testCLISchemaApply(t, h, t.url("")) }) }) t.Run("SchemaApplyWithVars", func(t *testing.T) { h := ` variable "tenant" { type = string } schema "tenant" { name = var.tenant } table "users" { schema = schema.tenant column "id" { type = int } } ` liteRun(t, func(t *liteTest) { testCLISchemaApply(t, h, t.url(""), "--var", "tenant=main") }) }) t.Run("SchemaApplyDryRun", func(t *testing.T) { liteRun(t, func(t *liteTest) { testCLISchemaApplyDry(t, h, t.url("")) }) }) t.Run("SchemaDiffRun", func(t *testing.T) { liteRun(t, func(t *liteTest) { testCLISchemaDiff(t, t.url("")) }) }) t.Run("SchemaApplyAutoApprove", func(t *testing.T) { liteRun(t, func(t *liteTest) { testCLISchemaApplyAutoApprove(t, h, t.url("")) }) }) } func TestSQLite_Sanity(t *testing.T) { n := "atlas_types_sanity" ddl := ` create table atlas_types_sanity ( "tInteger" integer(10) default 100 null, "tInt" int(10) default 100 null, "tTinyIny" tinyint(10) default 100 null, "tSmallInt" smallint(10) default 100 null, "tMediumInt" mediumint(10) default 100 null, "tIntegerBigInt" bigint(10) default 100 null, "tUnsignedBigInt" unsigned big int(10) default 100 null, "tInt2" int2(10) default 100 null, "tInt8" int8(10) default 100 null, "tReal" real(10) default 100 null, "tDouble" double(10) default 100 null, "tDoublePrecision" double precision(10) default 100 null, "tFloat" float(10) default 100 null, "tText" text(10) default 'I am Text' not null, "tCharacter" character(10) default 'I am Text' not null, "tVarchar" varchar(10) default 'I am Text' not null, "tVaryingCharacter" varying character(10) default 'I am Text' not null, "tNchar" nchar(10) default 'I am Text' not null, "tNativeCharacter" native character(10) default 'I am Text' not null, "tNVarChar" nvarchar(10) default 'I am Text' not null, "tClob" clob(10) default 'I am Text' not null, "tBlob" blob(10) default 'A' not null, "tNumeric" numeric(10) default 100 not null, "tDecimal" decimal(10,5) default 100 not null, "tBoolean" boolean default false not null, "tDate" date default 'now()' not null , "tDatetime" datetime default 'now()' not null ); ` liteRun(t, func(t *liteTest) { t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Schema: realm.Schemas[0], Attrs: ts.Attrs, Columns: []*schema.Column{ { Name: "tInteger", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer", Unsigned: false}, Raw: "integer(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int", Unsigned: false}, Raw: "int(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tTinyIny", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "tinyint", Unsigned: false}, Raw: "tinyint(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tSmallInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint", Unsigned: false}, Raw: "smallint(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tMediumInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "mediumint", Unsigned: false}, Raw: "mediumint(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tIntegerBigInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: "bigint(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tUnsignedBigInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "unsigned big int", Unsigned: false}, Raw: "unsigned big int(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tInt2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int2", Unsigned: false}, Raw: "int2(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tInt8", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int8", Unsigned: false}, Raw: "int8(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tReal", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 0}, Raw: "real(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tDouble", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double", Precision: 0}, Raw: "double(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tDoublePrecision", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double precision", Precision: 0}, Raw: "double precision(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tFloat", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 0}, Raw: "float(10)", Null: true}, Default: &schema.Literal{ V: "100", }, }, { Name: "tText", Type: &schema.ColumnType{Type: &schema.StringType{T: "text", Size: 10}, Raw: "text(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tCharacter", Type: &schema.ColumnType{Type: &schema.StringType{T: "character", Size: 10}, Raw: "character(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tVarchar", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 10}, Raw: "varchar(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tVaryingCharacter", Type: &schema.ColumnType{Type: &schema.StringType{T: "varying character", Size: 10}, Raw: "varying character(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tNchar", Type: &schema.ColumnType{Type: &schema.StringType{T: "nchar", Size: 10}, Raw: "nchar(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tNativeCharacter", Type: &schema.ColumnType{Type: &schema.StringType{T: "native character", Size: 10}, Raw: "native character(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tNVarChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "nvarchar", Size: 10}, Raw: "nvarchar(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tClob", Type: &schema.ColumnType{Type: &schema.StringType{T: "clob", Size: 10}, Raw: "clob(10)", Null: false}, Default: &schema.Literal{ V: "'I am Text'", }, }, { Name: "tBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "blob"}, Raw: "blob(10)", Null: false}, Default: &schema.Literal{ V: "'A'", }, }, { Name: "tNumeric", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 10}, Raw: "numeric(10)", Null: false}, Default: &schema.Literal{ V: "100", }, }, { Name: "tDecimal", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 5}, Raw: "decimal(10,5)", Null: false}, Default: &schema.Literal{ V: "100", }, }, { Name: "tBoolean", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Raw: "boolean", Null: false}, Default: &schema.Literal{ V: "false", }, }, { Name: "tDate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "date"}, Raw: "date", Null: false}, Default: &schema.Literal{ V: "'now()'", }, }, { Name: "tDatetime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "datetime"}, Raw: "datetime", Null: false}, Default: &schema.Literal{ V: "'now()'", }, }, }, } require.EqualValues(t, &expected, ts) }) t.Run("ImplicitIndexes", func(t *testing.T) { liteRun(t, func(t *liteTest) { testImplicitIndexes(t, t.db) }) }) } func (t *liteTest) driver() migrate.Driver { return t.drv } func (t *liteTest) revisionsStorage() migrate.RevisionReadWriter { return t.rrw } func (t *liteTest) dropSchemas(...string) {} func (t *liteTest) applyHcl(spec string) { realm := t.loadRealm() var desired schema.Schema err := sqlite.EvalHCLBytes([]byte(spec), &desired, nil) require.NoError(t, err) existing := realm.Schemas[0] diff, err := t.drv.SchemaDiff(existing, &desired) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), diff) require.NoError(t, err) } func (t *liteTest) loadRealm() *schema.Realm { r, err := t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{ Schemas: []string{"main"}, }) require.NoError(t, err) return r } func (t *liteTest) loadUsers() *schema.Table { return t.loadTable("users") } func (t *liteTest) loadPosts() *schema.Table { return t.loadTable("posts") } func (t *liteTest) loadTable(name string) *schema.Table { realm := t.loadRealm() require.Len(t, realm.Schemas, 1) table, ok := realm.Schemas[0].Table(name) require.True(t, ok) return table } func (t *liteTest) users() *schema.Table { usersT := &schema.Table{ Name: "users", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, }, { Name: "x", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, }, }, } usersT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: usersT.Columns[0]}}} return usersT } func (t *liteTest) posts() *schema.Table { usersT := t.users() postsT := &schema.Table{ Name: "posts", Schema: t.realm().Schemas[0], Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&postgres.Identity{}}, }, { Name: "author_id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }, { Name: "ctime", Type: &schema.ColumnType{Raw: "timestamp", Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP", }, }, }, Attrs: []schema.Attr{ &schema.Comment{Text: "posts comment"}, }, } postsT.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: postsT.Columns[0]}}} postsT.Indexes = []*schema.Index{ {Name: "author_id", Parts: []*schema.IndexPart{{C: postsT.Columns[1]}}}, {Name: "id_author_id_unique", Unique: true, Parts: []*schema.IndexPart{{C: postsT.Columns[1]}, {C: postsT.Columns[0]}}}, } postsT.ForeignKeys = []*schema.ForeignKey{ {Symbol: "author_id", Table: postsT, Columns: postsT.Columns[1:2], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction}, } return postsT } func (t *liteTest) realm() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "main", Attrs: []schema.Attr{ &sqlite.File{Name: t.file}, }, }, }, } r.Schemas[0].Realm = r return r } func (t *liteTest) diff(t1, t2 *schema.Table) []schema.Change { changes, err := t.drv.TableDiff(t1, t2) require.NoError(t, err) return changes } func (t *liteTest) migrate(changes ...schema.Change) { err := t.drv.ApplyChanges(context.Background(), changes) require.NoError(t, err) } func (t *liteTest) dropTables(names ...string) { t.Cleanup(func() { for i := range names { _, err := t.db.Exec("DROP TABLE IF EXISTS " + names[i]) require.NoError(t.T, err, "drop tables %q", names[i]) } }) } func (t *liteTest) url(_ string) string { return fmt.Sprintf("sqlite://file:%s?cache=shared&_fk=1", t.file) } func (t *liteTest) applyRealmHcl(spec string) { t.applyHcl(spec) } atlas-0.7.2/internal/integration/testdata/000077500000000000000000000000001431455511600206015ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/migrations/000077500000000000000000000000001431455511600227555ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/migrations/mysql/000077500000000000000000000000001431455511600241225ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/migrations/mysql/1_initial.sql000066400000000000000000000001341431455511600265120ustar00rootroot00000000000000CREATE SCHEMA IF NOT EXISTS `bc_test`; CREATE TABLE `bc_test`.`bc_tbl` (`col` INTEGER NULL);atlas-0.7.2/internal/integration/testdata/migrations/mysql/atlas.sum000066400000000000000000000001561431455511600257560ustar00rootroot00000000000000h1:FT0VjrL64KJmuOe1Dq4dpbG/50Kwn0lZqfopa6BhJM8= 1_initial.sql h1:bWUYLjb0oiGQHf45Q08aKFKxVZ3pZBArJnSmuGBw9X4= atlas-0.7.2/internal/integration/testdata/migrations/postgres/000077500000000000000000000000001431455511600246235ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/migrations/postgres/1_initial.sql000066400000000000000000000001341431455511600272130ustar00rootroot00000000000000CREATE SCHEMA IF NOT EXISTS "bc_test"; CREATE TABLE "bc_test"."bc_tbl" ("col" INTEGER NULL);atlas-0.7.2/internal/integration/testdata/migrations/postgres/atlas.sum000066400000000000000000000001561431455511600264570ustar00rootroot00000000000000h1:80V3wCzovMg2ot2hR0arbEjEfMfKWDeQNrXZJbFPF10= 1_initial.sql h1:53poyM34ShPWCVU41ldi4d9LUrzqPiQfBdEq//yH4Jo= atlas-0.7.2/internal/integration/testdata/mysql/000077500000000000000000000000001431455511600217465ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/mysql/autoincrement.txt000066400000000000000000000027641431455511600253750ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Setup a custom AUTO_INCREMENT initial value. apply 2.hcl cmpshow users 2.sql # Increase the AUTO_INCREMENT value. apply 3.hcl cmpshow users 3.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } } -- 1.sql -- CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`) ) -- mysql8/1.sql -- CREATE TABLE `users` ( `id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`) ) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } auto_increment = 1000 } -- 2.sql -- CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`) ) AUTO_INCREMENT=1000 -- mysql8/2.sql -- CREATE TABLE `users` ( `id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`) ) AUTO_INCREMENT=1000 -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } auto_increment = 2000 } -- 3.sql -- CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`) ) AUTO_INCREMENT=2000 -- mysql8/3.sql -- CREATE TABLE `users` ( `id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`) ) AUTO_INCREMENT=2000atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-apply.txt000066400000000000000000000061601431455511600260320ustar00rootroot00000000000000only mysql ! atlas migrate apply stderr 'Error: checksum file not found' stdout 'You have a checksum error in your migration directory.' stdout 'atlas migrate hash' atlas migrate hash # Apply all of them atlas migrate apply --url URL --revisions-schema $db stdout 'Migrating to version 3 \(3 migrations in total\):' stdout '-- migrating version 1' stdout '-> CREATE TABLE `users` \(`id` bigint NOT NULL AUTO_INCREMENT, `age` bigint NOT NULL, `name` varchar\(255\) NOT NULL, PRIMARY KEY \(`id`\)\) CHARSET utf8mb4 COLLATE utf8mb4_bin;' stdout '-- migrating version 2' stdout '-> ALTER TABLE `users` ADD UNIQUE INDEX `age` \(`age`\);' stdout '-- migrating version 3' stdout '-> CREATE TABLE `pets` \(`id` bigint NOT NULL AUTO_INCREMENT, `name` varchar\(255\) NOT NULL, PRIMARY KEY \(`id`\)\) CHARSET utf8mb4 COLLATE utf8mb4_bin;' stdout '-- 3 migrations' stdout '-- 3 sql statements' cmpshow users users.sql cmpshow pets pets.sql atlas migrate apply --url URL --revisions-schema $db stdout 'No migration files to execute' clearSchema # Apply one by one atlas migrate apply --url URL --revisions-schema $db 1 stdout 'Migrating to version 1 \(1 migrations in total\):' cmpshow users users_1.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'Migrating to version 2 from 1 \(1 migrations in total\):' cmpshow users users.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'Migrating to version 3 from 2 \(1 migrations in total\):' cmpshow users users.sql cmpshow pets pets.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'No migration files to execute' -- migrations/1_first.sql -- CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, `age` bigint NOT NULL, `name` varchar(255) NOT NULL, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_bin; -- migrations/2_second.sql -- ALTER TABLE `users` ADD UNIQUE INDEX `age` (`age`); -- migrations/3_third.sql -- CREATE TABLE `pets` (`id` bigint NOT NULL AUTO_INCREMENT, `name` varchar(255) NOT NULL, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_bin; -- users.sql -- CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ) -- mysql8/users.sql -- CREATE TABLE `users` ( `id` bigint NOT NULL AUTO_INCREMENT, `age` bigint NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `age` (`age`) ) -- users_1.sql -- CREATE TABLE `users` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `age` bigint(20) NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`) ) -- mysql8/users_1.sql -- CREATE TABLE `users` ( `id` bigint NOT NULL AUTO_INCREMENT, `age` bigint NOT NULL, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`) ) -- pets.sql -- CREATE TABLE `pets` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`) ) -- mysql8/pets.sql -- CREATE TABLE `pets` ( `id` bigint NOT NULL AUTO_INCREMENT, `name` varchar(255) COLLATE utf8mb4_bin NOT NULL, PRIMARY KEY (`id`) ) atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-diff-format.txt000066400000000000000000000103271431455511600271030ustar00rootroot00000000000000only maria103 mkdir migrations atlas migrate diff --dev-url URL --to file://1.hcl --dir-format golang-migrate first cmpmig 0 golang-migrate/1.down.sql cmpmig 1 golang-migrate/1.up.sql atlas migrate diff --dev-url URL --to file://2.hcl --dir-format golang-migrate second cmpmig 2 golang-migrate/2.down.sql cmpmig 3 golang-migrate/2.up.sql rm migrations mkdir migrations atlas migrate diff --dev-url URL --to file://1.hcl --dir-format goose first cmpmig 0 goose/1.sql atlas migrate diff --dev-url URL --to file://2.hcl --dir-format goose second cmpmig 1 goose/2.sql rm migrations mkdir migrations atlas migrate diff --dev-url URL --to file://1.hcl --dir-format dbmate first cmpmig 0 dbmate/1.sql atlas migrate diff --dev-url URL --to file://2.hcl --dir-format dbmate second cmpmig 1 dbmate/2.sql rm migrations mkdir migrations atlas migrate diff --dev-url URL --to file://1.hcl --dir-format flyway first cmpmig 0 flyway/U1.sql cmpmig 1 flyway/V1.sql atlas migrate diff --dev-url URL --to file://2.hcl --dir-format flyway second cmpmig 1 flyway/U2.sql cmpmig 3 flyway/V2.sql rm migrations mkdir migrations atlas migrate diff --dev-url URL --to file://1.hcl --dir-format liquibase first cmpmig 0 liquibase/1.sql atlas migrate diff --dev-url URL --to file://2.hcl --dir-format liquibase second cmpmig 1 liquibase/2.sql -- golang-migrate/1.up.sql -- -- create "users" table CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; -- golang-migrate/1.down.sql -- -- reverse: create "users" table DROP TABLE `users`; -- golang-migrate/2.up.sql -- -- modify "users" table ALTER TABLE `users` ADD COLUMN `email` varchar(100) NULL; -- golang-migrate/2.down.sql -- -- reverse: modify "users" table ALTER TABLE `users` DROP COLUMN `email`; -- goose/1.sql -- -- +goose Up -- create "users" table CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; -- +goose Down -- reverse: create "users" table DROP TABLE `users`; -- goose/2.sql -- -- +goose Up -- modify "users" table ALTER TABLE `users` ADD COLUMN `email` varchar(100) NULL; -- +goose Down -- reverse: modify "users" table ALTER TABLE `users` DROP COLUMN `email`; -- dbmate/1.sql -- -- migrate:up -- create "users" table CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; -- migrate:down -- reverse: create "users" table DROP TABLE `users`; -- dbmate/2.sql -- -- migrate:up -- modify "users" table ALTER TABLE `users` ADD COLUMN `email` varchar(100) NULL; -- migrate:down -- reverse: modify "users" table ALTER TABLE `users` DROP COLUMN `email`; -- flyway/V1.sql -- -- create "users" table CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; -- flyway/U1.sql -- -- reverse: create "users" table DROP TABLE `users`; -- flyway/V2.sql -- -- modify "users" table ALTER TABLE `users` ADD COLUMN `email` varchar(100) NULL; -- flyway/U2.sql -- -- reverse: modify "users" table ALTER TABLE `users` DROP COLUMN `email`; -- liquibase/1.sql -- --liquibase formatted sql --changeset atlas:0-0 --comment: create "users" table CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; --rollback: DROP TABLE `users`; -- liquibase/2.sql -- --liquibase formatted sql --changeset atlas:0-0 --comment: modify "users" table ALTER TABLE `users` ADD COLUMN `email` varchar(100) NULL; --rollback: ALTER TABLE `users` DROP COLUMN `email`; -- 1.hcl -- schema "script_cli_migrate_diff_format" {} table "users" { schema = schema.script_cli_migrate_diff_format column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } charset = "utf8mb4" collate = "utf8mb4_general_ci" } -- 2.hcl -- schema "script_cli_migrate_diff_format" {} table "users" { schema = schema.script_cli_migrate_diff_format column "id" { null = false type = bigint auto_increment = true } column "email" { null = true type = varchar(100) } primary_key { columns = [column.id] } charset = "utf8mb4" collate = "utf8mb4_general_ci" }atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-diff.txt000066400000000000000000000037641431455511600256240ustar00rootroot00000000000000only maria107 maria102 exec mkdir migrations ! atlas migrate diff --to file://1.hcl --dir file://migrations stderr '"dev-url" not set' ! atlas migrate diff --dev-url mysql://devdb --dir file://migrations stderr '"to" not set' atlas migrate diff --dev-url URL --to file://./1.hcl first cmpmig 0 1.sql atlas migrate diff --dev-url URL --to file://./2.hcl second cmpmig 1 2.sql # Clean migration directory and run diff with a custom qualifier. exec rm -rf migrations atlas migrate diff --dev-url URL --to file://./1.hcl --qualifier test first cmpmig 0 1-qualifier.sql -- 1.hcl -- schema "script_cli_migrate_diff" {} table "users" { schema = schema.script_cli_migrate_diff column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } charset = "utf8mb4" collate = "utf8mb4_general_ci" } -- 1.sql -- -- create "users" table CREATE TABLE `users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; -- 1-qualifier.sql -- -- create "users" table CREATE TABLE `test`.`users` (`id` bigint NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci; -- 2.hcl -- schema "script_cli_migrate_diff" {} table "users" { schema = schema.script_cli_migrate_diff column "id" { null = false type = bigint auto_increment = true } column "create_time" { null = false type = timestamp(6) default = sql("CURRENT_TIMESTAMP(6)") } primary_key { columns = [column.id] } charset = "utf8mb4" collate = "utf8mb4_general_ci" } -- 2.sql -- -- modify "users" table ALTER TABLE `users` ADD COLUMN `create_time` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6); -- maria102/2.sql -- -- modify "users" table ALTER TABLE `users` ADD COLUMN `create_time` timestamp(6) NOT NULL DEFAULT (current_timestamp(6)); -- maria107/2.sql -- -- modify "users" table ALTER TABLE `users` ADD COLUMN `create_time` timestamp(6) NOT NULL DEFAULT (current_timestamp(6)); atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-lint-destructive.txt000066400000000000000000000035011431455511600302060ustar00rootroot00000000000000only mysql8 atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt # Expect the command to fail; exit code 1. ! atlas migrate lint --dir file://migrations3 --dev-url URL --latest=1 > got.txt cmp got.txt expected3.txt # Expect to log only the connected schema. ! atlas migrate lint --dir file://migrations3 --dev-url URL --log='{{println "current:"}}{{.Schema.Current}}{{println "desired:"}}{{.Schema.Desired}}' --latest=1 > got.txt cmp got.txt schema3.txt -- empty.txt -- -- migrations1/1.sql -- CREATE TABLE users (id int); /* Rename column is not a destructive change. */ ALTER TABLE users RENAME COLUMN id TO oid; -- migrations2/1.sql -- CREATE TABLE users (id int); -- migrations2/2.sql -- /* Rename table mixed with rename column is not a destructive change. */ ALTER TABLE users RENAME TO user, RENAME COLUMN id TO oid; -- migrations3/1.sql -- CREATE TABLE users (id int); -- migrations3/2.sql -- /* Rename table mixed with rename column is not a destructive change. */ ALTER TABLE users RENAME TO user, RENAME COLUMN id TO oid; -- migrations3/3.sql -- ALTER TABLE user ADD COLUMN id int, DROP COLUMN oid; -- expected3.txt -- 3.sql: destructive changes detected: L1: Dropping non-virtual column "oid" -- schema3.txt -- current: table "user" { schema = schema.script_cli_migrate_lint_destructive column "oid" { null = true type = int } } schema "script_cli_migrate_lint_destructive" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } desired: table "user" { schema = schema.script_cli_migrate_lint_destructive column "id" { null = true type = int } } schema "script_cli_migrate_lint_destructive" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-lint-ignore.txt000066400000000000000000000040611431455511600271320ustar00rootroot00000000000000only mysql8 # Without ignoring. ! atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt expected1.txt # Ignore destructive. atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt expected2.txt # Ignore data_depend. ! atlas migrate lint --dir file://migrations3 --dev-url URL --latest=1 > got.txt cmp got.txt expected3.txt # Ignore all. ! atlas migrate lint --dir file://migrations4 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt # Ignore all. ! atlas migrate lint --dir file://migrations5 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt -- migrations1/1.sql -- CREATE TABLE users (id int, name text); -- migrations1/2.sql -- ALTER TABLE users ADD COLUMN age int NOT NULL, DROP COLUMN name; -- expected1.txt -- 2.sql: destructive changes detected: L1: Dropping non-virtual column "name" 2.sql: data dependent changes detected: L1: Adding a non-nullable "int" column "age" on table "users" without a default value implicitly sets existing rows with 0 -- migrations2/1.sql -- CREATE TABLE users (id int, name text); -- migrations2/2.sql -- -- atlas:nolint destructive ALTER TABLE users ADD COLUMN age int NOT NULL, DROP COLUMN name; -- expected2.txt -- 2.sql: data dependent changes detected: L2: Adding a non-nullable "int" column "age" on table "users" without a default value implicitly sets existing rows with 0 -- migrations3/1.sql -- CREATE TABLE users (id int, name text); -- migrations3/2.sql -- -- atlas:nolint data_depend ALTER TABLE users ADD COLUMN age int NOT NULL, DROP COLUMN name; -- expected3.txt -- 2.sql: destructive changes detected: L2: Dropping non-virtual column "name" -- migrations4/1.sql -- CREATE TABLE users (id int, name text); -- migrations4/2.sql -- -- atlas:nolint ALTER TABLE users ADD COLUMN age int NOT NULL, DROP COLUMN name; -- migrations5/1.sql -- CREATE TABLE users (id int, name text); -- migrations5/2.sql -- -- atlas:nolint destructive data_depend anything ALTER TABLE users ADD COLUMN age int NOT NULL, DROP COLUMN name; -- empty.txt -- atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-lint-modify-nullability.txt000066400000000000000000000044661431455511600314750ustar00rootroot00000000000000only mysql8 atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt expected1.txt atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt expected2.txt atlas migrate lint --dir file://migrations3 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt atlas migrate lint --dir file://migrations4 --dev-url URL --latest=1 > got.txt cmp got.txt expected4.txt -- empty.txt -- -- migrations1/1.sql -- CREATE TABLE users (id int); -- migrations1/2.sql -- ALTER TABLE users ADD COLUMN `rank` int NOT NULL DEFAULT 1; ALTER TABLE users MODIFY COLUMN id int NOT NULL; -- expected1.txt -- 2.sql: data dependent changes detected: L2: Modifying nullable column "id" to non-nullable might fail in case it contains NULL values -- migrations2/1.sql -- CREATE TABLE users (id int); -- migrations2/2.sql -- -- Add (a, b, c), backfill (a, b) and then modify all to not-null. ALTER TABLE users ADD COLUMN `a` int, ADD COLUMN `b` int, ADD COLUMN `c` int; UPDATE users SET `a` = 1; UPDATE users SET `b` = 1 WHERE `b` IS NULL; ALTER TABLE users MODIFY COLUMN `a` int NOT NULL, MODIFY COLUMN `b` int NOT NULL, MODIFY COLUMN `c` int NOT NULL; -- expected2.txt -- 2.sql: data dependent changes detected: L5: Modifying nullable column "c" to non-nullable might fail in case it contains NULL values -- migrations3/1.sql -- CREATE TABLE users (id int); ALTER TABLE users MODIFY COLUMN id int NOT NULL; -- migrations4/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); -- migrations4/2.sql -- ALTER TABLE users ADD COLUMN name varchar(255), ADD COLUMN age float; UPDATE users SET name = 'Unknown', age = 0; -- No diagnostics. ALTER TABLE users MODIFY COLUMN name varchar(255) NOT NULL, MODIFY COLUMN age float NOT NULL; ALTER TABLE pets ADD COLUMN name varchar(255), ADD COLUMN age float; UPDATE pets SET name = 'Unknown', age = 0 WHERE RAND() > 0.5; -- With diagnostics as statement above cannot be sure NULL values are back-filled. ALTER TABLE pets MODIFY COLUMN name varchar(255) NOT NULL, MODIFY COLUMN age float NOT NULL; -- expected4.txt -- 2.sql: data dependent changes detected: L9: Modifying nullable column "name" to non-nullable might fail in case it contains NULL values L9: Modifying nullable column "age" to non-nullable might fail in case it contains NULL values atlas-0.7.2/internal/integration/testdata/mysql/cli-migrate-lint-unique-index.txt000066400000000000000000000012711431455511600302620ustar00rootroot00000000000000only mysql8 atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt expected1.txt atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt -- empty.txt -- -- migrations1/1.sql -- CREATE TABLE users (id int); -- migrations1/2.sql -- ALTER TABLE users ADD UNIQUE INDEX idx (id); -- expected1.txt -- 2.sql: data dependent changes detected: L1: Adding a unique index "idx" on table "users" might fail in case column "id" contains duplicate entries -- migrations2/1.sql -- CREATE TABLE users (id int); ALTER TABLE users ADD UNIQUE INDEX idx (id); -- migrations2/2.sql -- ALTER TABLE users RENAME INDEX idx TO idx1;atlas-0.7.2/internal/integration/testdata/mysql/cli-project-schemas.txt000066400000000000000000000010561431455511600263450ustar00rootroot00000000000000atlas schema apply --env local --auto-approve > out.txt exec cat out.txt stdout 'CREATE TABLE `script_cli_project_schemas`' -- 1.hcl -- schema "script_cli_project_schemas" { } table "users" { schema = schema.script_cli_project_schemas column "id" { type = bigint null = false } } -- atlas.hcl -- env "local" { url = "URL" src = "./1.hcl" schemas = ["script_cli_project_schemas"] } -- expected.sql -- CREATE TABLE `users` ( `id` bigint NOT NULL ) -- 0.hcl -- schema "$db" { charset = "$charset" collate = "$collate" }atlas-0.7.2/internal/integration/testdata/mysql/column-add-drop.txt000066400000000000000000000034001431455511600254710ustar00rootroot00000000000000# Each test runs on a clean database. # Apply schema "1.hcl" on fresh database. apply 1.hcl # Ensures tables exist in the database. exist users # Compare the result of "SHOW TABLE users" with the content of a file named '1.sql'. # The "cmpshow" command searches first a file named '/1.sql' (version, defines # the database version), and in case it was found, it will use it instead. cmpshow users 1.sql # Apply schema "2.hcl" on the updated database. apply 2.hcl # Compare database with 2.sql. cmpshow users 2.sql # Apply schema "1.hcl" should migrate database to previous state. apply 1.hcl cmpshow users 1.sql # Drop table. apply 0.hcl ! exist users # Below files represent HCL and SQL. File names defined their index in # execution order. 1.hcl is executed first, 2.hcl executed second, etc. -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } -- 1.sql -- CREATE TABLE `users` ( `id` int(11) NOT NULL, PRIMARY KEY (`id`) ) -- mysql8/1.sql -- CREATE TABLE `users` ( `id` int NOT NULL, PRIMARY KEY (`id`) ) -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } column "name" { type = text } primary_key { columns = [table.users.column.id] } } -- 2.sql -- CREATE TABLE `users` ( `id` int(11) NOT NULL, `name` text NOT NULL, PRIMARY KEY (`id`) ) -- mysql8/2.sql -- CREATE TABLE `users` ( `id` int NOT NULL, `name` text NOT NULL, PRIMARY KEY (`id`) ) -- 0.hcl -- schema "$db" { charset = "$charset" collate = "$collate" }atlas-0.7.2/internal/integration/testdata/mysql/column-bit.txt000066400000000000000000000004701431455511600245610ustar00rootroot00000000000000apply 1.hcl cmpshow t 1.sql -- 1.hcl -- schema "$db" {} table "t" { schema = schema.$db column "c1" { type = bit } column "c2" { type = bit(1) } column "c3" { type = bit(64) } } -- 1.sql -- CREATE TABLE `t` ( `c1` bit(1) NOT NULL, `c2` bit(1) NOT NULL, `c3` bit(64) NOT NULL ) atlas-0.7.2/internal/integration/testdata/mysql/column-bool.txt000066400000000000000000000025501431455511600247370ustar00rootroot00000000000000# Each test runs on a clean database. apply 1.hcl cmpshow users 1.sql # "bool", "boolean" and "tinyint(1)" are equal. synced 2.hcl # Changing "tinyint(1)" to "tinyint" should cause a schema change. apply 3.hcl cmpshow users 3.sql synced 3.hcl -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = bool } column "b" { type = boolean } column "c" { type = tinyint(1) } } -- 1.sql -- CREATE TABLE `users` ( `a` tinyint(1) NOT NULL, `b` tinyint(1) NOT NULL, `c` tinyint(1) NOT NULL ) -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = boolean } column "b" { type = tinyint(1) } column "c" { type = bool } } -- 3.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = boolean } column "b" { type = tinyint } column "c" { type = bool } } -- 3.sql -- CREATE TABLE `users` ( `a` tinyint(1) NOT NULL, `b` tinyint(4) NOT NULL, `c` tinyint(1) NOT NULL ) -- mysql8/3.sql -- CREATE TABLE `users` ( `a` tinyint(1) NOT NULL, `b` tinyint NOT NULL, `c` tinyint(1) NOT NULL ) atlas-0.7.2/internal/integration/testdata/mysql/column-charset.txt000066400000000000000000000040201431455511600254270ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Dropping the default COLLATE from the HCL does not have any effect. apply 2.hcl cmpshow users 1.sql # Changing the default COLLATE to hebrew_bin. apply 3.hcl cmpshow users 3.sql # Dropping custom COLLATE reverts to the default. apply 4.hcl cmpshow users 1.sql # Dropping CHARSET and COLLATE. apply 5.hcl cmpshow users 5.sql -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "name" { type = varchar(255) charset = "hebrew" collate = "hebrew_general_ci" } charset = "$charset" collate = "$collate" } -- 1.sql -- CREATE TABLE `users` ( `name` varchar(255) CHARACTER SET hebrew NOT NULL ) -- mysql8/1.sql -- CREATE TABLE `users` ( `name` varchar(255) CHARACTER SET hebrew COLLATE hebrew_general_ci NOT NULL ) -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "name" { type = varchar(255) charset = "hebrew" } charset = "$charset" collate = "$collate" } -- 3.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "name" { type = varchar(255) charset = "hebrew" collate = "hebrew_bin" } charset = "$charset" collate = "$collate" } -- 3.sql -- CREATE TABLE `users` ( `name` varchar(255) CHARACTER SET hebrew COLLATE hebrew_bin NOT NULL ) -- 4.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "name" { type = varchar(255) charset = "hebrew" } charset = "$charset" collate = "$collate" } -- 5.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "name" { type = varchar(255) } charset = "$charset" collate = "$collate" } -- 5.sql -- CREATE TABLE `users` ( `name` varchar(255) NOT NULL )atlas-0.7.2/internal/integration/testdata/mysql/column-default-expr.txt000066400000000000000000000020471431455511600264050ustar00rootroot00000000000000# Run tests only on MySQL 8. only mysql8 # Apply schema "1.hcl" on fresh database. apply 1.hcl # Compare the result of "SHOW TABLE users" with the content of a file named '1.sql'. cmpshow users 1.sql cmphcl 1.hcl -- 1.hcl -- table "users" { schema = schema.script_column_default_expr column "a" { null = false type = varchar(255) default = "" } column "b" { null = false type = varchar(255) default = sql("(concat(_utf8mb4'a',`a`,_utf8mb4'\\'s',_utf8mb4'b'))") } column "c" { null = false type = varchar(255) default = "a'b" } column "d" { null = false type = varchar(255) default = sql("(_utf8mb4'a\\'b')") } } schema "script_column_default_expr" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } -- 1.sql -- CREATE TABLE `users` ( `a` varchar(255) NOT NULL DEFAULT '', `b` varchar(255) NOT NULL DEFAULT (concat(_utf8mb4'a',`a`,_utf8mb4'\'s',_utf8mb4'b')), `c` varchar(255) NOT NULL DEFAULT 'a''b', `d` varchar(255) NOT NULL DEFAULT (_utf8mb4'a\'b') )atlas-0.7.2/internal/integration/testdata/mysql/column-generated-inspect.txt000066400000000000000000000025571431455511600274140ustar00rootroot00000000000000# Skip MySQL 5.6 as it does not support generated columns. ! only mysql56 apply 1.hcl cmphcl 1.inspect.hcl -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = bool null = true } column "b" { type = bool null = true as = "a" } column "c" { type = bool null = true as { type = STORED expr = "b" } } column "d e" { null = false type = varchar(255) } column "d $" { null = false type = varchar(255) } index "idx_1" { columns = [column["d e"]] } index "idx_2" { columns = [column["d $"]] } } -- 1.inspect.hcl -- table "users" { schema = schema.$db column "a" { null = true type = bool } column "b" { null = true type = bool as { expr = "`a`" type = VIRTUAL } } column "c" { null = true type = bool as { expr = "`b`" type = STORED } } column "d e" { null = false type = varchar(255) } column "d $" { null = false type = varchar(255) } index "idx_1" { columns = [column["d e"]] } index "idx_2" { columns = [column["d $"]] } } schema "$db" { charset = "$charset" collate = "$collate" } atlas-0.7.2/internal/integration/testdata/mysql/column-generated.txt000066400000000000000000000066561431455511600257550ustar00rootroot00000000000000# Skip MySQL 5.6 as it does not support generated columns. ! only mysql56 apply 1.hcl cmpshow users 1.sql ! apply 2.fail1.hcl 'changing VIRTUAL generated column "b" to non-generated column is not supported (drop and add is required)' ! apply 2.fail2.hcl 'changing column "a" to VIRTUAL generated column is not supported (drop and add is required)' ! apply 2.fail3.hcl 'changing the store type of generated column "c" from "STORED" to "VIRTUAL" is not supported' apply 3.hcl cmpshow users 3.sql -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int as = "a * 2" } column "c" { type = int as { expr = "a * b" type = STORED } } } -- 1.sql -- CREATE TABLE `users` ( `a` int(11) NOT NULL, `b` int(11) GENERATED ALWAYS AS (`a` * 2) VIRTUAL, `c` int(11) GENERATED ALWAYS AS (`a` * `b`) STORED ) -- mysql57/1.sql -- CREATE TABLE `users` ( `a` int(11) NOT NULL, `b` int(11) GENERATED ALWAYS AS ((`a` * 2)) VIRTUAL NOT NULL, `c` int(11) GENERATED ALWAYS AS ((`a` * `b`)) STORED NOT NULL ) -- mysql8/1.sql -- CREATE TABLE `users` ( `a` int NOT NULL, `b` int GENERATED ALWAYS AS ((`a` * 2)) VIRTUAL NOT NULL, `c` int GENERATED ALWAYS AS ((`a` * `b`)) STORED NOT NULL ) -- 2.fail1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int } column "c" { type = int as { expr = "a * b" type = STORED } } } -- 2.fail2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = int as = "1" } column "b" { type = int as = "a * 2" } column "c" { type = int as { expr = "a * b" type = VIRTUAL } } } -- 2.fail3.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int as = "a * 2" } column "c" { type = int as { expr = "a * b" type = VIRTUAL } } } -- 3.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "a" { type = int as { expr = "1" type = STORED } } column "b" { type = int as = "a * 3" } column "c" { type = int as { expr = "a * b" type = STORED } } } -- 3.sql -- CREATE TABLE `users` ( `a` int(11) GENERATED ALWAYS AS (1) STORED, `b` int(11) GENERATED ALWAYS AS (`a` * 3) VIRTUAL, `c` int(11) GENERATED ALWAYS AS (`a` * `b`) STORED ) -- mysql57/3.sql -- CREATE TABLE `users` ( `a` int(11) GENERATED ALWAYS AS (1) STORED NOT NULL, `b` int(11) GENERATED ALWAYS AS ((`a` * 3)) VIRTUAL NOT NULL, `c` int(11) GENERATED ALWAYS AS ((`a` * `b`)) STORED NOT NULL ) -- mysql8/3.sql -- CREATE TABLE `users` ( `a` int GENERATED ALWAYS AS (1) STORED NOT NULL, `b` int GENERATED ALWAYS AS ((`a` * 3)) VIRTUAL NOT NULL, `c` int GENERATED ALWAYS AS ((`a` * `b`)) STORED NOT NULL ) atlas-0.7.2/internal/integration/testdata/mysql/column-time-precision-maria.txt000066400000000000000000000022541431455511600300230ustar00rootroot00000000000000# Each test runs on a clean database. only maria* # Apply schema "1.hcl" on fresh database. apply 1.hcl # Compare the result of "SHOW TABLE users" with the content of a file named '1.sql'. cmpshow foo 1.sql # Files -- 1.hcl -- schema "$db" {} table "foo" { schema = schema.$db column "id" { null = false type = char(36) } column "precision_default" { null = false type = timestamp default = sql("CURRENT_TIMESTAMP") on_update = sql("CURRENT_TIMESTAMP") } column "create_time" { null = false type = timestamp(6) default = sql("CURRENT_TIMESTAMP(6)") } column "update_time" { null = false type = datetime(6) default = sql("CURRENT_TIMESTAMP(6)") on_update = sql("CURRENT_TIMESTAMP(6)") } primary_key { columns = [table.foo.column.id, ] } } -- 1.sql -- CREATE TABLE `foo` ( `id` char(36) NOT NULL, `precision_default` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(), `create_time` timestamp(6) NOT NULL DEFAULT current_timestamp(6), `update_time` datetime(6) NOT NULL DEFAULT current_timestamp(6) ON UPDATE current_timestamp(6), PRIMARY KEY (`id`) ) atlas-0.7.2/internal/integration/testdata/mysql/column-time-precision-mysql.txt000066400000000000000000000022701431455511600300750ustar00rootroot00000000000000# Each test runs on a clean database. only mysql56 mysql57 mysql8 # Apply schema "1.hcl" on fresh database. apply 1.hcl # Compare the result of "SHOW TABLE users" with the content of a file named '1.sql'. cmpshow foo 1.sql # Files -- 1.hcl -- schema "$db" {} table "foo" { schema = schema.$db column "id" { null = false type = char(36) } column "precision_default" { null = false type = timestamp default = sql("CURRENT_TIMESTAMP") on_update = sql("CURRENT_TIMESTAMP") } column "create_time" { null = false type = timestamp(6) default = sql("CURRENT_TIMESTAMP(6)") } column "update_time" { null = false type = datetime(6) default = sql("CURRENT_TIMESTAMP(6)") on_update = sql("CURRENT_TIMESTAMP(6)") } primary_key { columns = [table.foo.column.id, ] } } -- 1.sql -- CREATE TABLE `foo` ( `id` char(36) NOT NULL, `precision_default` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `create_time` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), `update_time` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), PRIMARY KEY (`id`) ) atlas-0.7.2/internal/integration/testdata/mysql/foreign-key-add.txt000066400000000000000000000065611431455511600254640ustar00rootroot00000000000000# Each test runs on a clean database. # Apply schema "1.hcl" on fresh database. apply 1.hcl # Check that users exists in the database. exist users # The negate version indicates that this command is expected to fail and the # second argument is an optional pattern for matching on the returned error. ! apply invalid-on-delete-action.hcl 'foreign key constraint was "author_id" SET NULL, but column "author_id" is NOT NULL' ! apply invalid-on-update-action.hcl 'foreign key constraint was "author_id" SET NULL, but column "author_id" is NOT NULL' apply 2.hcl exist users posts cmpshow users posts 2.sql # Below files represent HCL and SQL. File names defined their index in # execution order. 1.hcl is executed first, 2.hcl executed second, etc. -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } -- invalid-on-delete-action.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.$db column "id" { type = int } column "author_id" { type = int } primary_key { columns = [table.posts.column.id] } foreign_key "owner_id" { columns = [table.posts.column.author_id] ref_columns = [table.users.column.id] on_delete = SET_NULL } } -- invalid-on-update-action.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.$db column "id" { type = int } column "author_id" { type = int } primary_key { columns = [table.posts.column.id] } foreign_key "owner_id" { columns = [table.posts.column.author_id] ref_columns = [table.users.column.id] on_update = SET_NULL } } -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.$db column "id" { type = int } column "author_id" { type = int null = true } primary_key { columns = [table.posts.column.id] } foreign_key "owner_id" { columns = [table.posts.column.author_id] ref_columns = [table.users.column.id] on_delete = SET_NULL } } -- 2.sql -- CREATE TABLE `users` ( `id` int(11) NOT NULL, PRIMARY KEY (`id`) ) CREATE TABLE `posts` ( `id` int(11) NOT NULL, `author_id` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `owner_id` (`author_id`), CONSTRAINT `owner_id` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON DELETE SET NULL ) -- mysql8/2.sql -- CREATE TABLE `users` ( `id` int NOT NULL, PRIMARY KEY (`id`) ) CREATE TABLE `posts` ( `id` int NOT NULL, `author_id` int DEFAULT NULL, PRIMARY KEY (`id`), KEY `owner_id` (`author_id`), CONSTRAINT `owner_id` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON DELETE SET NULL ) atlas-0.7.2/internal/integration/testdata/mysql/foreign-key-modify-action.txt000066400000000000000000000054021431455511600274670ustar00rootroot00000000000000# Each test runs on a clean database. # Apply schema "1.hcl" on fresh database. apply 1.hcl cmpshow users posts 1.sql apply 2.hcl cmpshow users posts 2.sql # Below files represent HCL and SQL. File names defined their index in # execution order. 1.hcl is executed first, 2.hcl executed second, etc. -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.$db column "id" { type = int } column "author_id" { type = int null = true } primary_key { columns = [table.posts.column.id] } foreign_key "owner_id" { columns = [table.posts.column.author_id] ref_columns = [table.users.column.id] on_update = SET_NULL } } -- 1.sql -- CREATE TABLE `users` ( `id` int(11) NOT NULL, PRIMARY KEY (`id`) ) CREATE TABLE `posts` ( `id` int(11) NOT NULL, `author_id` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `owner_id` (`author_id`), CONSTRAINT `owner_id` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON UPDATE SET NULL ) -- mysql8/1.sql -- CREATE TABLE `users` ( `id` int NOT NULL, PRIMARY KEY (`id`) ) CREATE TABLE `posts` ( `id` int NOT NULL, `author_id` int DEFAULT NULL, PRIMARY KEY (`id`), KEY `owner_id` (`author_id`), CONSTRAINT `owner_id` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON UPDATE SET NULL ) -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.$db column "id" { type = int } column "author_id" { type = int null = true } primary_key { columns = [table.posts.column.id] } foreign_key "owner_id" { columns = [table.posts.column.author_id] ref_columns = [table.users.column.id] on_update = NO_ACTION on_delete = CASCADE } } -- 2.sql -- CREATE TABLE `users` ( `id` int(11) NOT NULL, PRIMARY KEY (`id`) ) CREATE TABLE `posts` ( `id` int(11) NOT NULL, `author_id` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `owner_id` (`author_id`), CONSTRAINT `owner_id` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION ) -- mysql8/2.sql -- CREATE TABLE `users` ( `id` int NOT NULL, PRIMARY KEY (`id`) ) CREATE TABLE `posts` ( `id` int NOT NULL, `author_id` int DEFAULT NULL, PRIMARY KEY (`id`), KEY `owner_id` (`author_id`), CONSTRAINT `owner_id` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`) ON DELETE CASCADE )atlas-0.7.2/internal/integration/testdata/mysql/index-add-drop.txt000066400000000000000000000031541431455511600253110ustar00rootroot00000000000000# Each test runs on a clean database. # Apply schema "1.hcl" on fresh database. apply 1.hcl cmpshow users 1.sql # Add index to table "users". apply 2.hcl cmpshow users 2.sql # Drop uniqueness from index. apply 3.hcl cmpshow users 3.sql # Drop index. apply 1.hcl cmpshow users 1.sql # Below files represent HCL and SQL. File names defined their index in # execution order. 1.hcl is executed first, 2.hcl executed second, etc. -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } } -- 1.sql -- CREATE TABLE `users` ( `rank` bigint(20) NOT NULL ) -- mysql8/1.sql -- CREATE TABLE `users` ( `rank` bigint NOT NULL ) -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } index "rank_idx" { unique = true columns = [table.users.column.rank] } } -- 2.sql -- CREATE TABLE `users` ( `rank` bigint(20) NOT NULL, UNIQUE KEY `rank_idx` (`rank`) ) -- mysql8/2.sql -- CREATE TABLE `users` ( `rank` bigint NOT NULL, UNIQUE KEY `rank_idx` (`rank`) ) -- 3.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } index "rank_idx" { columns = [table.users.column.rank] } } -- 3.sql -- CREATE TABLE `users` ( `rank` bigint(20) NOT NULL, KEY `rank_idx` (`rank`) ) -- mysql8/3.sql -- CREATE TABLE `users` ( `rank` bigint NOT NULL, KEY `rank_idx` (`rank`) )atlas-0.7.2/internal/integration/testdata/mysql/index-desc.txt000066400000000000000000000040771431455511600245420ustar00rootroot00000000000000# Each test runs on a clean database. # Run this test only on MySQL 8 as it is not supported by other versions. only mysql8 # Apply schema "1.hcl" on fresh database. apply 1.hcl cmpshow users 1.sql # Drop the "DESC" option from the key part. apply 2.hcl cmpshow users 2.sql # Use of "columns" instead of "on" should not trigger a change. synced 2-no-change.hcl apply 3.hcl cmpshow users 3.sql # Below files represent HCL and SQL. File names defined their index in # execution order. 1.hcl is executed first, 2.hcl executed second, etc. -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } index "rank_idx" { on { desc = true column = table.users.column.rank } } } -- 1.sql -- CREATE TABLE `users` ( `rank` bigint NOT NULL, KEY `rank_idx` (`rank` DESC) ) -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } index "rank_idx" { on { column = table.users.column.rank } } } -- 2.sql -- CREATE TABLE `users` ( `rank` bigint NOT NULL, KEY `rank_idx` (`rank`) ) -- 2-no-change.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } index "rank_idx" { columns = [ table.users.column.rank, ] } } -- 3.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "users" { schema = schema.$db column "rank" { type = bigint } column "score" { type = int } index "rank_score_idx" { on { column = table.users.column.rank } on { column = table.users.column.score desc = true } } } -- 3.sql -- CREATE TABLE `users` ( `rank` bigint NOT NULL, `score` int NOT NULL, KEY `rank_score_idx` (`rank`,`score` DESC) ) atlas-0.7.2/internal/integration/testdata/mysql/index-expr.txt000066400000000000000000000021161431455511600245720ustar00rootroot00000000000000# Run this test only on MySQL 8 as it is not supported by other versions. only mysql8 apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "first_name" { null = false type = varchar(128) } column "last_name" { null = false type = varchar(128) } index "full_name" { on { expr = "concat(`first_name`, `last_name`)" } } } -- 1.sql -- CREATE TABLE `users` ( `first_name` varchar(128) NOT NULL, `last_name` varchar(128) NOT NULL, KEY `full_name` ((concat(`first_name`,`last_name`))) ) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "first_name" { null = false type = varchar(128) } column "last_name" { null = false type = varchar(128) } index "full_name" { on { expr = "concat(`first_name`, '\\'s first name')" } } } -- 2.sql -- CREATE TABLE `users` ( `first_name` varchar(128) NOT NULL, `last_name` varchar(128) NOT NULL, KEY `full_name` ((concat(`first_name`,_utf8mb4'\'s first name'))) )atlas-0.7.2/internal/integration/testdata/mysql/index-prefix.txt000066400000000000000000000017721431455511600251200ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Change prefix. apply 2.hcl cmpshow users 2.sql # Drop prefix. apply 3.hcl cmpshow users 3.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { type = varchar(255) } index "user_name" { on { column = column.name prefix = 64 } } } -- 1.sql -- CREATE TABLE `users` ( `name` varchar(255) NOT NULL, KEY `user_name` (`name`(64)) ) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { type = varchar(255) } index "user_name" { on { column = column.name prefix = 128 } } } -- 2.sql -- CREATE TABLE `users` ( `name` varchar(255) NOT NULL, KEY `user_name` (`name`(128)) ) -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { type = varchar(128) } index "user_name" { on { column = column.name } } } -- 3.sql -- CREATE TABLE `users` ( `name` varchar(128) NOT NULL, KEY `user_name` (`name`) )atlas-0.7.2/internal/integration/testdata/mysql/index-type.txt000066400000000000000000000010701431455511600245730ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Drop an index. apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "text" { null = false type = text } index "users_text" { type = FULLTEXT columns = [column.text] } } -- 1.sql -- CREATE TABLE `users` ( `text` text NOT NULL, FULLTEXT KEY `users_text` (`text`) ) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "text" { null = false type = text } } -- 2.sql -- CREATE TABLE `users` ( `text` text NOT NULL ) atlas-0.7.2/internal/integration/testdata/mysql/index-unique.txt000066400000000000000000000025711431455511600251270ustar00rootroot00000000000000apply 1.hcl cmpshow t 1.sql # Insert a few records to the table, and cause the new desired change to fail. execsql 'INSERT INTO $db.t (c, d) VALUES (1, 1), (1, 2), (1, 3)' ! apply 2.fail.hcl "Error 1062: Duplicate entry '1' for key 'c'" apply 2.hcl cmpshow t 2.sql -- 1.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "t" { schema = schema.$db column "c" { type = bigint } column "d" { type = bigint } index "c" { unique = true columns = [column.c, column.d] } } -- 1.sql -- CREATE TABLE `t` ( `c` bigint(20) NOT NULL, `d` bigint(20) NOT NULL, UNIQUE KEY `c` (`c`,`d`) ) -- mysql8/1.sql -- CREATE TABLE `t` ( `c` bigint NOT NULL, `d` bigint NOT NULL, UNIQUE KEY `c` (`c`,`d`) ) -- 2.fail.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "t" { schema = schema.$db column "c" { type = bigint } index "c" { unique = true columns = [column.c] } } -- 2.hcl -- schema "$db" { charset = "$charset" collate = "$collate" } table "t" { schema = schema.$db column "c" { type = bigint } index "c" { columns = [column.c] } } -- 2.sql -- CREATE TABLE `t` ( `c` bigint(20) NOT NULL, KEY `c` (`c`) ) -- mysql8/2.sql -- CREATE TABLE `t` ( `c` bigint NOT NULL, KEY `c` (`c`) )atlas-0.7.2/internal/integration/testdata/postgres/000077500000000000000000000000001431455511600224475ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/postgres/cli-migrate-apply.txt000066400000000000000000000147561431455511600265450ustar00rootroot00000000000000! atlas migrate apply stderr 'Error: checksum file not found' stdout 'You have a checksum error in your migration directory.' stdout 'atlas migrate hash' atlas migrate hash # Apply all of them atlas migrate apply --url URL --revisions-schema $db stdout 'Migrating to version 3 \(3 migrations in total\):' stdout '-- migrating version 1' stdout '-> CREATE TABLE "users" \("id" bigint NOT NULL GENERATED BY DEFAULT AS IDENTITY, "age" bigint NOT NULL, "name" character varying NOT NULL, PRIMARY KEY \("id"\)\);' stdout '-- migrating version 2' stdout '-> CREATE UNIQUE INDEX "users_age_key" ON "users" \("age"\);' stdout '-- migrating version 3' stdout '-> CREATE TABLE "pets" \("id" bigint NOT NULL GENERATED BY DEFAULT AS IDENTITY, "name" character varying NOT NULL, PRIMARY KEY \("id"\)\);' stdout '-- 3 migrations' stdout '-- 3 sql statements' cmpshow users users.sql cmpshow pets pets.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'No migration files to execute' clearSchema # Apply one by one atlas migrate apply --url URL --revisions-schema $db 1 stdout 'Migrating to version 1 \(1 migrations in total\):' cmpshow users users_1.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'Migrating to version 2 from 1 \(1 migrations in total\):' cmpshow users users.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'Migrating to version 3 from 2 \(1 migrations in total\):' cmpshow users users.sql cmpshow pets pets.sql atlas migrate apply --url URL --revisions-schema $db 1 stdout 'No migration files to execute' clearSchema # Move the broken migration into the migrations directory and check the different transaction modes. cp broken.sql migrations/4_fourth.sql atlas migrate hash ! atlas migrate apply --url URL --revisions-schema $db --tx-mode invalid stderr 'unknown tx-mode "invalid"' # Test --tx-mode all ! atlas migrate apply --url URL --revisions-schema $db --tx-mode all stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "4"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions cmp stdout empty.hcl # Apply one migration, after rolling everything back, the first revision must still exist. atlas migrate apply --url URL --revisions-schema $db 1 atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users cmp stdout empty.hcl cmpshow users users_1.sql ! atlas migrate apply --url URL --revisions-schema $db --tx-mode all stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "4"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users cmp stdout empty.hcl # If the broken migration is gone, we can apply everything without any problems. rm migrations/4_fourth.sql atlas migrate hash atlas migrate apply --url URL --revisions-schema $db cmpshow users users.sql cmpshow pets pets.sql atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout empty.hcl clearSchema # Test --tx-mode file cp broken.sql migrations/4_fourth.sql atlas migrate hash ! atlas migrate apply --url URL --revisions-schema $db --tx-mode file stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "4"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions cmpshow users users.sql cmpshow pets pets.sql # Table "broken" does not exist since we rolled back that migration. atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout empty.hcl # If the broken migration is gone, we can apply everything without any problems. rm migrations/4_fourth.sql atlas migrate hash atlas migrate apply --url URL --revisions-schema $db cmpshow users users.sql cmpshow pets pets.sql atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout empty.hcl clearSchema # Test --tx-mode none cp broken.sql migrations/4_fourth.sql atlas migrate hash ! atlas migrate apply --url URL --revisions-schema $db --tx-mode none stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "4"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions cmpshow users users.sql cmpshow pets pets.sql # Table "broken" does exist since we do not have transactions. atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout broken.hcl -- migrations/1_first.sql -- CREATE TABLE "users" ("id" bigint NOT NULL GENERATED BY DEFAULT AS IDENTITY, "age" bigint NOT NULL, "name" character varying NOT NULL, PRIMARY KEY ("id")); -- migrations/2_second.sql -- CREATE UNIQUE INDEX "users_age_key" ON "users" ("age"); -- migrations/3_third.sql -- CREATE TABLE "pets" ("id" bigint NOT NULL GENERATED BY DEFAULT AS IDENTITY, "name" character varying NOT NULL, PRIMARY KEY ("id")); -- broken.sql -- CREATE TABLE "broken" ("id" bigint); THIS IS A FAILING STATEMENT; -- empty.hcl -- schema "script_cli_migrate_apply" { } -- broken.hcl -- table "broken" { schema = schema.script_cli_migrate_apply column "id" { null = true type = bigint } } schema "script_cli_migrate_apply" { } -- users.sql -- Table "script_cli_migrate_apply.users" Column | Type | Collation | Nullable | Default --------+-------------------+-----------+----------+---------------------------------- id | bigint | | not null | generated by default as identity age | bigint | | not null | name | character varying | | not null | Indexes: "users_pkey" PRIMARY KEY, btree (id) "users_age_key" UNIQUE, btree (age) -- users_1.sql -- Table "script_cli_migrate_apply.users" Column | Type | Collation | Nullable | Default --------+-------------------+-----------+----------+---------------------------------- id | bigint | | not null | generated by default as identity age | bigint | | not null | name | character varying | | not null | Indexes: "users_pkey" PRIMARY KEY, btree (id) -- pets.sql -- Table "script_cli_migrate_apply.pets" Column | Type | Collation | Nullable | Default --------+-------------------+-----------+----------+---------------------------------- id | bigint | | not null | generated by default as identity name | character varying | | not null | Indexes: "pets_pkey" PRIMARY KEY, btree (id) atlas-0.7.2/internal/integration/testdata/postgres/cli-migrate-diff.txt000066400000000000000000000022441431455511600263150ustar00rootroot00000000000000exec mkdir migrations ! atlas migrate diff --to file://1.hcl --dir file://migrations stderr '"dev-url" not set' ! atlas migrate diff --dev-url postgres://devdb --dir file://migrations stderr '"to" not set' atlas migrate diff --dev-url URL --to file://./1.hcl first cmpmig 0 1.sql atlas migrate diff --dev-url URL --to file://./2.hcl second cmpmig 1 2.sql -- 1.hcl -- schema "script_cli_migrate_diff" {} table "users" { schema = schema.script_cli_migrate_diff column "id" { null = false type = bigint auto_increment = true } primary_key { columns = [column.id] } } -- 1.sql -- -- create "users" table CREATE TABLE "users" ("id" bigint NOT NULL, PRIMARY KEY ("id")); -- 2.hcl -- schema "script_cli_migrate_diff" {} table "users" { schema = schema.script_cli_migrate_diff column "id" { null = false type = bigint auto_increment = true } column "create_time" { null = false type = timestamp(4) default = sql("CURRENT_TIMESTAMP(4)") } primary_key { columns = [column.id] } } -- 2.sql -- -- modify "users" table ALTER TABLE "users" ADD COLUMN "create_time" timestamp(4) NOT NULL DEFAULT CURRENT_TIMESTAMP(4);atlas-0.7.2/internal/integration/testdata/postgres/cli-migrate-lint-modify-nullability.txt000066400000000000000000000044351431455511600321720ustar00rootroot00000000000000only postgres14 atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt expected1.txt atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt expected2.txt atlas migrate lint --dir file://migrations3 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt atlas migrate lint --dir file://migrations4 --dev-url URL --latest=1 > got.txt cmp got.txt expected4.txt -- empty.txt -- -- migrations1/1.sql -- CREATE TABLE users (id int); -- migrations1/2.sql -- ALTER TABLE users ADD COLUMN "rank" int NULL DEFAULT 1; ALTER TABLE users ALTER COLUMN "id" SET NOT NULL; -- expected1.txt -- 2.sql: data dependent changes detected: L2: Modifying nullable column "id" to non-nullable might fail in case it contains NULL values -- migrations2/1.sql -- CREATE TABLE users (id int); -- migrations2/2.sql -- -- Add (a, b, c), backfill (a, b) and then modify all to not-null. ALTER TABLE users ADD COLUMN "a" int, ADD COLUMN "b" int, ADD COLUMN "c" int; UPDATE users SET "a" = 1; UPDATE users SET "b" = 1 WHERE "b" IS NULL; ALTER TABLE users ALTER COLUMN "a" SET NOT NULL, ALTER COLUMN "b" SET NOT NULL, ALTER COLUMN "c" SET NOT NULL; -- expected2.txt -- 2.sql: data dependent changes detected: L5: Modifying nullable column "c" to non-nullable might fail in case it contains NULL values -- migrations3/1.sql -- CREATE TABLE users (id int); ALTER TABLE users ALTER COLUMN "id" SET NOT NULL; -- migrations4/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); -- migrations4/2.sql -- ALTER TABLE users ADD COLUMN name varchar(255), ADD COLUMN age float; UPDATE users SET name = 'Unknown', age = 0; -- No diagnostics. ALTER TABLE users ALTER COLUMN name SET NOT NULL, ALTER COLUMN age SET NOT NULL; ALTER TABLE pets ADD COLUMN name varchar(255), ADD COLUMN age float; UPDATE pets SET name = 'Unknown', age = 0 WHERE random() > 0.5; -- With diagnostics as statement above cannot be sure NULL values are back-filled. ALTER TABLE pets ALTER COLUMN name SET NOT NULL, ALTER COLUMN age SET NOT NULL; -- expected4.txt -- 2.sql: data dependent changes detected: L9: Modifying nullable column "name" to non-nullable might fail in case it contains NULL values L9: Modifying nullable column "age" to non-nullable might fail in case it contains NULL values atlas-0.7.2/internal/integration/testdata/postgres/cli-migrate-status.txt000066400000000000000000000027541431455511600267360ustar00rootroot00000000000000# make sure sum file is correct atlas migrate hash # for clean database atlas migrate status --url URL --revisions-schema $db cmp stdout status_clean.txt # apply one atlas migrate apply --url URL --revisions-schema $db 1 atlas migrate status --url URL --revisions-schema $db cmp stdout status_one_applied.txt # apply next (and last) atlas migrate apply --url URL --revisions-schema $db 1 atlas migrate status --url URL --revisions-schema $db cmp stdout status_ok.txt -- migrations/1.sql -- CREATE TABLE "users" ("id" bigint NOT NULL GENERATED ALWAYS AS IDENTITY, PRIMARY KEY ("id")); -- migrations/2.sql -- ALTER TABLE "users" ADD COLUMN "happy" boolean NOT NULL DEFAULT true; -- broken_migration.sql -- CREATE TABLE "users" ("id" bigint NOT NULL GENERATED ALWAYS AS IDENTITY, PRIMARY KEY ("id")); THIS LINE ADDS A SYNTAX ERROR; -- fixed_migration.sql -- CREATE TABLE "users" ("id" bigint NOT NULL GENERATED ALWAYS AS IDENTITY, PRIMARY KEY ("id")); ALTER TABLE "users" ADD COLUMN "happy" boolean NOT NULL DEFAULT true; -- status_clean.txt -- Migration Status: PENDING -- Current Version: No version applied yet -- Next Version: 1 -- Executed Files: 0 -- Pending Files: 2 -- status_one_applied.txt -- Migration Status: PENDING -- Current Version: 1 -- Next Version: 2 -- Executed Files: 1 -- Pending Files: 1 -- status_ok.txt -- Migration Status: OK -- Current Version: 2 -- Next Version: Already at latest version -- Executed Files: 2 -- Pending Files: 0atlas-0.7.2/internal/integration/testdata/postgres/column-array.txt000066400000000000000000000041761431455511600256310ustar00rootroot00000000000000apply 1.hcl cmpshow logs 1.sql # Change size of the underlying type. apply 2.change-size.hcl cmpshow logs 2.sql synced 3.nochange.hcl synced 4.nochange.hcl apply 5.hcl cmpshow logs 5.sql -- 1.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "records" { null = false type = sql("varchar(255)[]") } } -- 1.sql -- Table "script_column_array.logs" Column | Type | Collation | Nullable | Default ---------+--------------------------+-----------+----------+--------- records | character varying(255)[] | | not null | -- 2.change-size.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "records" { null = false type = sql("varchar(100) ARRAY") } } -- 2.sql -- Table "script_column_array.logs" Column | Type | Collation | Nullable | Default ---------+--------------------------+-----------+----------+--------- records | character varying(100)[] | | not null | -- 3.nochange.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "records" { null = false type = sql("varchar(100) ARRAY") } } -- 4.nochange.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "records" { null = false type = sql("varchar(100) [10][]") } } -- 5.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "a" { null = false type = sql("int[1]") } column "b" { null = false type = sql("int ARRAY[1]") } column "c" { null = false type = sql("character varying(100) ARRAY[1]") } column "d" { null = false type = sql("point [][1]") } } -- 5.sql -- Table "script_column_array.logs" Column | Type | Collation | Nullable | Default --------+--------------------------+-----------+----------+--------- a | integer[] | | not null | b | integer[] | | not null | c | character varying(100)[] | | not null | d | point[] | | not null | atlas-0.7.2/internal/integration/testdata/postgres/column-bit.txt000066400000000000000000000026361431455511600252700ustar00rootroot00000000000000apply 1.hcl cmpshow t 1.sql # Change size of the underlying type. apply 2.hcl cmpshow t 2.sql -- 1.hcl -- schema "$db" {} table "t" { schema = schema.$db column "c1" { // Equals to bit(1). type = bit } column "c2" { type = bit(2) } column "c3" { // Unlimited length. type = bit_varying } column "c4" { type = bit_varying(1) } } -- 1.sql -- Table "script_column_bit.t" Column | Type | Collation | Nullable | Default --------+----------------+-----------+----------+--------- c1 | bit(1) | | not null | c2 | bit(2) | | not null | c3 | bit varying | | not null | c4 | bit varying(1) | | not null | -- 2.hcl -- schema "$db" {} table "t" { schema = schema.$db column "c1" { // No change. type = bit(1) } column "c2" { // Reduce size. type = bit(1) } column "c3" { // Add size. type = bit_varying(4) } column "c4" { // Increase size. type = bit_varying(64) } } -- 2.sql -- Table "script_column_bit.t" Column | Type | Collation | Nullable | Default --------+-----------------+-----------+----------+--------- c1 | bit(1) | | not null | c2 | bit(1) | | not null | c3 | bit varying(4) | | not null | c4 | bit varying(64) | | not null | atlas-0.7.2/internal/integration/testdata/postgres/column-comment.txt000066400000000000000000000017061431455511600261510ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "created_at" { null = false type = timestamp comment = "without time zone" } } -- 1.sql -- Table "script_column_comment.users" Column | Type | Collation | Nullable | Default ------------+-----------------------------+-----------+----------+--------- created_at | timestamp without time zone | | not null | -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "created_at" { null = false type = timestamptz comment = "with time zone" } } -- 2.sql -- Table "script_column_comment.users" Column | Type | Collation | Nullable | Default ------------+--------------------------+-----------+----------+--------- created_at | timestamp with time zone | | not null | atlas-0.7.2/internal/integration/testdata/postgres/column-default.txt000066400000000000000000000026661431455511600261410ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Change column default. apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { type = character_varying default = "unknown" } column "age" { type = int default = 1 } column "active" { type = boolean default = true } } -- 1.sql -- Table "script_column_default.users" Column | Type | Collation | Nullable | Default --------+-------------------+-----------+----------+------------------------------ name | character varying | | not null | 'unknown'::character varying age | integer | | not null | 1 active | boolean | | not null | true -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { type = character_varying default = "anonymous" } column "age" { type = int default = 0 } column "active" { type = boolean default = false } } -- 2.sql -- Table "script_column_default.users" Column | Type | Collation | Nullable | Default --------+-------------------+-----------+----------+-------------------------------- name | character varying | | not null | 'anonymous'::character varying age | integer | | not null | 0 active | boolean | | not null | false atlas-0.7.2/internal/integration/testdata/postgres/column-enum-array.txt000066400000000000000000000062451431455511600265720ustar00rootroot00000000000000# Create table with an enum array column. apply 1.hcl cmpshow enums 1.sql # Drop an enum array column. apply 2.hcl cmpshow enums 2.sql # Append an enum array column to an existing table. apply 3.hcl cmpshow enums 3.sql # Append an enum column to existing enum. apply 4.hcl cmpshow enums 4.sql # Append an enum value. apply 5.hcl cmphcl 5.inspect.hcl -- 1.hcl -- schema "$db" {} enum "status" { schema = schema.$db values = ["active", "inactive"] } table "enums" { schema = schema.$db column "statuses" { type = sql("status[]") } } -- 1.sql -- Table "script_column_enum_array.enums" Column | Type | Collation | Nullable | Default ----------+-----------------------------------+-----------+----------+--------- statuses | script_column_enum_array.status[] | | not null | -- 2.hcl -- schema "$db" {} table "enums" { schema = schema.$db column "a" { type = int } } -- 2.sql -- Table "script_column_enum_array.enums" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- a | integer | | not null | -- 3.hcl -- schema "$db" {} enum "status" { schema = schema.$db values = ["active", "inactive"] } table "enums" { schema = schema.$db column "a" { type = int } column "statuses" { type = sql("status[]") } } -- 3.sql -- Table "script_column_enum_array.enums" Column | Type | Collation | Nullable | Default ----------+-----------------------------------+-----------+----------+--------- a | integer | | not null | statuses | script_column_enum_array.status[] | | not null | -- 4.hcl -- schema "$db" {} enum "status" { schema = schema.$db values = ["active", "inactive"] } table "enums" { schema = schema.$db column "a" { type = int } column "statuses" { type = sql("status[]") } column "status" { type = enum.status } } -- 4.sql -- Table "script_column_enum_array.enums" Column | Type | Collation | Nullable | Default ----------+-----------------------------------+-----------+----------+--------- a | integer | | not null | statuses | script_column_enum_array.status[] | | not null | status | script_column_enum_array.status | | not null | -- 5.hcl -- schema "$db" {} enum "status" { schema = schema.$db values = ["active", "inactive", "unknown"] } table "enums" { schema = schema.$db column "a" { type = int } column "statuses" { type = sql("status[]") } column "status" { type = enum.status } } -- 5.inspect.hcl -- table "enums" { schema = schema.script_column_enum_array column "a" { null = false type = integer } column "statuses" { null = false type = sql("script_column_enum_array.status[]") } column "status" { null = false type = enum.status } } enum "status" { schema = schema.script_column_enum_array values = ["active", "inactive", "unknown"] } schema "script_column_enum_array" { } atlas-0.7.2/internal/integration/testdata/postgres/column-enum.txt000066400000000000000000000031421431455511600254470ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql # Drop the enum. apply 3.hcl cmpshow users 3.sql # Add it back. apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} enum "status" { schema = schema.$db values = ["active", "inactive"] } table "users" { schema = schema.$db column "type" { type = enum.status default = "active" } } -- 1.sql -- Table "script_column_enum.users" Column | Type | Collation | Nullable | Default --------+---------------------------+-----------+----------+------------------------------------- type | script_column_enum.status | | not null | 'active'::script_column_enum.status -- 2.hcl -- schema "$db" {} enum "status" { schema = schema.$db values = ["active", "inactive"] } table "users" { schema = schema.$db column "type" { type = enum.status default = "inactive" } } -- 2.sql -- Table "script_column_enum.users" Column | Type | Collation | Nullable | Default --------+---------------------------+-----------+----------+--------------------------------------- type | script_column_enum.status | | not null | 'inactive'::script_column_enum.status -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "int" { type = int default = 1 } } -- 3.sql -- Table "script_column_enum.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- int | integer | | not null | 1 atlas-0.7.2/internal/integration/testdata/postgres/column-float.txt000066400000000000000000000026431431455511600256150ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "c1" { type = real } column "c2" { type = double_precision } column "c3" { // Equals to real when precision is between 1 and 24. type = float(10) } column "c4" { // Equals to double_precision when precision is between 1 and 24. type = float(30) } } -- 1.sql -- Table "script_column_float.users" Column | Type | Collation | Nullable | Default --------+------------------+-----------+----------+--------- c1 | real | | not null | c2 | double precision | | not null | c3 | real | | not null | c4 | double precision | | not null | -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "c1" { type = double_precision } column "c2" { type = real } column "c3" { type = float(30) } column "c4" { type = float(10) } } -- 2.sql -- Table "script_column_float.users" Column | Type | Collation | Nullable | Default --------+------------------+-----------+----------+--------- c1 | double precision | | not null | c2 | real | | not null | c3 | double precision | | not null | c4 | real | | not null |atlas-0.7.2/internal/integration/testdata/postgres/column-generated-inspect.txt000066400000000000000000000013741431455511600301110ustar00rootroot00000000000000# Skip PostgreSQL 10, 11 as they do not support generated columns. ! only postgres10|postgres11 apply 1.hcl cmphcl 1.inspect.hcl -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int as = "1" } column "c" { type = int as { expr = "2" type = STORED } } } -- 1.inspect.hcl -- table "users" { schema = schema.$db column "a" { null = false type = integer } column "b" { null = false type = integer as { expr = "1" type = STORED } } column "c" { null = false type = integer as { expr = "2" type = STORED } } } schema "$db" { } atlas-0.7.2/internal/integration/testdata/postgres/column-generated.txt000066400000000000000000000042521431455511600264440ustar00rootroot00000000000000# Skip PostgreSQL 10, 11 as they do not support generated columns. ! only postgres10|postgres11 apply 1.hcl cmpshow users 1.sql ! apply 2.fail1.hcl 'changing column "a" to generated column is not supported (drop and add is required)' ! apply 2.fail2.hcl 'changing the generation expression for a column "b" is not supported' # Skip PostgreSQL 12 as it does not support 'DROP EXPRESSION'. ! only postgres12 apply 3.hcl cmpshow users 3.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int as = "1" } column "c" { type = int as { expr = "2" type = STORED } } } -- 1.sql -- Table "script_column_generated.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+-------------------------------- a | integer | | not null | b | integer | | not null | generated always as (1) stored c | integer | | not null | generated always as (2) stored -- 2.fail1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { type = int as = "0" } column "b" { type = int as = "1" } column "c" { type = int as { expr = "2" type = STORED } } } -- 2.fail2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int as = "2" } column "c" { type = int as { expr = "3" type = STORED } } } -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { type = int } column "b" { type = int } column "c" { type = int } } -- 3.sql -- Table "script_column_generated.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- a | integer | | not null | b | integer | | not null | c | integer | | not null | atlas-0.7.2/internal/integration/testdata/postgres/column-identity.txt000066400000000000000000000020771431455511600263420ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Change identity generation. apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = int identity { generated = ALWAYS start = 10 increment = 10 } } } -- 1.sql -- Table "script_column_identity.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+------------------------------ name | integer | | not null | generated always as identity -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = int identity { generated = BY_DEFAULT start = 10 increment = 10 } } } -- 2.sql -- Table "script_column_identity.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+---------------------------------- name | integer | | not null | generated by default as identityatlas-0.7.2/internal/integration/testdata/postgres/column-interval.txt000066400000000000000000000027021431455511600263300ustar00rootroot00000000000000apply 1.hcl cmpshow logs 1.sql cmphcl 1.inspect.hcl -- 1.hcl -- table "logs" { schema = schema.script_column_interval column "a" { null = false type = interval default = "3 hours" } column "b" { null = false type = interval(1) } column "c" { null = false type = second } column "d" { null = false type = second(1) } column "e" { null = true type = day_to_second(4) } } schema "script_column_interval" {} -- 1.sql -- Table "script_column_interval.logs" Column | Type | Collation | Nullable | Default --------+---------------------------+-----------+----------+---------------------- a | interval | | not null | '03:00:00'::interval b | interval(1) | | not null | c | interval second | | not null | d | interval second(1) | | not null | e | interval day to second(4) | | | -- 1.inspect.hcl -- table "logs" { schema = schema.script_column_interval column "a" { null = false type = interval default = sql("'03:00:00'::interval") } column "b" { null = false type = interval(1) } column "c" { null = false type = second } column "d" { null = false type = second(1) } column "e" { null = true type = day_to_second(4) } } schema "script_column_interval" { }atlas-0.7.2/internal/integration/testdata/postgres/column-numeric.txt000066400000000000000000000034611431455511600261510ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { null = false type = numeric } column "b" { null = false type = numeric(10) } column "c" { null = false type = numeric(10,2) } column "d" { null = false type = decimal } column "e" { null = false type = decimal(10) } column "f" { null = false type = decimal(10,2) } } -- 1.sql -- Table "script_column_numeric.users" Column | Type | Collation | Nullable | Default --------+---------------+-----------+----------+--------- a | numeric | | not null | b | numeric(10,0) | | not null | c | numeric(10,2) | | not null | d | numeric | | not null | e | numeric(10,0) | | not null | f | numeric(10,2) | | not null | -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "a" { null = false type = numeric(5) } column "b" { null = false type = numeric(10,2) } column "c" { null = false type = numeric } column "d" { null = false type = decimal(4) } column "e" { null = false type = decimal } column "f" { null = false type = decimal(10,3) } } -- 2.sql -- Table "script_column_numeric.users" Column | Type | Collation | Nullable | Default --------+---------------+-----------+----------+--------- a | numeric(5,0) | | not null | b | numeric(10,2) | | not null | c | numeric | | not null | d | numeric(4,0) | | not null | e | numeric | | not null | f | numeric(10,3) | | not null |atlas-0.7.2/internal/integration/testdata/postgres/column-serial.txt000066400000000000000000000041461431455511600257670ustar00rootroot00000000000000apply 1.hcl cmpshow t 1.sql apply 2.hcl cmpshow t 2.sql apply 3.hcl cmpshow t 3.sql -- 1.hcl -- schema "$db" {} table "t" { schema = schema.$db column "x" { type = smallserial } column "y" { type = serial } column "z" { type = bigserial } } -- 1.sql -- Table "script_column_serial.t" Column | Type | Collation | Nullable | Default --------+----------+-----------+----------+--------------------------------------------------- x | smallint | | not null | nextval('script_column_serial.t_x_seq'::regclass) y | integer | | not null | nextval('script_column_serial.t_y_seq'::regclass) z | bigint | | not null | nextval('script_column_serial.t_z_seq'::regclass) -- 2.hcl -- schema "$db" {} table "t" { schema = schema.$db column "x" { # Drop sequence. type = smallint } column "y" { # Drop sequence and change type. type = bigint } column "z" { # Change sequence type. type = serial } } -- 2.sql -- Table "script_column_serial.t" Column | Type | Collation | Nullable | Default --------+----------+-----------+----------+--------------------------------------------------- x | smallint | | not null | y | bigint | | not null | z | integer | | not null | nextval('script_column_serial.t_z_seq'::regclass) -- 3.hcl -- schema "$db" {} table "t" { schema = schema.$db column "x" { # Add sequence. type = smallserial } column "y" { # Add sequence and change type. type = serial } } -- 3.sql -- Table "script_column_serial.t" Column | Type | Collation | Nullable | Default --------+----------+-----------+----------+--------------------------------------------------- x | smallint | | not null | nextval('script_column_serial.t_x_seq'::regclass) y | integer | | not null | nextval('script_column_serial.t_y_seq'::regclass) atlas-0.7.2/internal/integration/testdata/postgres/column-time-precision.txt000066400000000000000000000047641431455511600274450ustar00rootroot00000000000000# Apply schema "1.hcl" on fresh database. apply 1.hcl # Compare the result of "\d tbl" with the content of a file named '1.sql'. cmpshow tbl 1.sql apply 2.hcl cmpshow tbl 2.sql # Files -- 1.hcl -- schema "$db" {} table "tbl" { schema = schema.$db column "precision_default" { null = false type = timestamp default = sql("CURRENT_TIMESTAMP") } column "timestamp_4" { null = false type = timestamp(4) default = sql("CURRENT_TIMESTAMP(4)") } column "timestamptz_4" { null = false type = timestamptz(4) default = sql("CURRENT_TIMESTAMP(4)") } } -- 1.sql -- Table "script_column_time_precision.tbl" Column | Type | Collation | Nullable | Default -------------------+--------------------------------+-----------+----------+---------------------- precision_default | timestamp without time zone | | not null | CURRENT_TIMESTAMP timestamp_4 | timestamp(4) without time zone | | not null | CURRENT_TIMESTAMP(4) timestamptz_4 | timestamp(4) with time zone | | not null | CURRENT_TIMESTAMP(4) -- 2.hcl -- schema "$db" {} table "tbl" { schema = schema.$db column "c1" { type = timestamptz(1) } column "c2" { type = timestamptz } column "c3" { type = timestamptz(0) } column "c4" { type = time } column "c5" { type = time(1) } column "c6" { type = timestamp } column "c7" { type = timestamp(5) } column "c8" { type = timetz(0) } column "c9" { type = timetz } column "c10" { type = timetz(6) } } -- 2.sql -- Table "script_column_time_precision.tbl" Column | Type | Collation | Nullable | Default --------+--------------------------------+-----------+----------+--------- c1 | timestamp(1) with time zone | | not null | c2 | timestamp with time zone | | not null | c3 | timestamp(0) with time zone | | not null | c4 | time without time zone | | not null | c5 | time(1) without time zone | | not null | c6 | timestamp without time zone | | not null | c7 | timestamp(5) without time zone | | not null | c8 | time(0) with time zone | | not null | c9 | time with time zone | | not null | c10 | time with time zone | | not null | atlas-0.7.2/internal/integration/testdata/postgres/index-desc.txt000066400000000000000000000060771431455511600252450ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Drop the "DESC" option from the key part. apply 2.hcl cmpshow users 2.sql # Use of "columns" instead of "on" should not trigger a change. synced 2-no-change.hcl apply 3.hcl cmpshow users 3.sql apply 4.hcl cmpshow users 4.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "rank" { type = int } index "rank_idx" { on { desc = true column = column.rank } } } -- 1.sql -- Table "script_index_desc.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- rank | integer | | not null | Indexes: "rank_idx" btree (rank DESC) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "rank" { type = int } index "rank_idx" { on { column = table.users.column.rank } } } -- 2.sql -- Table "script_index_desc.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- rank | integer | | not null | Indexes: "rank_idx" btree (rank) -- 2-no-change.hcl -- schema "$db" {} table "users" { schema = schema.$db column "rank" { type = int } index "rank_idx" { columns = [ table.users.column.rank, ] } } -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "rank" { type = int } column "score" { type = int } index "rank_score_idx" { on { column = table.users.column.rank } on { column = table.users.column.score desc = true } } } -- 3.sql -- Table "script_index_desc.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- rank | integer | | not null | score | integer | | not null | Indexes: "rank_score_idx" btree (rank, score DESC) -- 4.hcl -- schema "$db" {} table "users" { schema = schema.$db column "rank" { type = int } column "score" { type = int } index "double_rank_desc_idx" { on { expr = "rank * 2" desc = true } } index "double_score_desc_idx" { on { expr = "score * 2" desc = true } } index "double_rank_idx" { on { expr = "rank * 2" desc = false } } index "double_score_idx" { on { expr = "score * 2" desc = false } } } -- 4.sql -- Table "script_index_desc.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- rank | integer | | not null | score | integer | | not null | Indexes: "double_rank_desc_idx" btree ((rank * 2) DESC) "double_rank_idx" btree ((rank * 2)) "double_score_desc_idx" btree ((score * 2) DESC) "double_score_idx" btree ((score * 2))atlas-0.7.2/internal/integration/testdata/postgres/index-expr.txt000066400000000000000000000027071431455511600253010ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "first_name" { null = false type = varchar(128) } column "last_name" { null = false type = varchar(128) } index "full_name" { on { expr = "first_name || ' ' || last_name" } } } -- 1.sql -- Table "script_index_expr.users" Column | Type | Collation | Nullable | Default ------------+------------------------+-----------+----------+--------- first_name | character varying(128) | | not null | last_name | character varying(128) | | not null | Indexes: "full_name" btree (((first_name::text || ' '::text) || last_name::text)) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "first_name" { null = false type = varchar(128) } column "last_name" { null = false type = varchar(128) } index "full_name" { on { expr = "first_name || '''s first name'" } } } -- 2.sql -- Table "script_index_expr.users" Column | Type | Collation | Nullable | Default ------------+------------------------+-----------+----------+--------- first_name | character varying(128) | | not null | last_name | character varying(128) | | not null | Indexes: "full_name" btree ((first_name::text || '''s first name'::text))atlas-0.7.2/internal/integration/testdata/postgres/index-include.txt000066400000000000000000000056231431455511600257460ustar00rootroot00000000000000! only postgres10 # Create with INCLUDE. apply 1.hcl cmpshow users 1.sql # Append one column to INCLUDE. apply 2.hcl cmpshow users 2.sql # Remove one column from INCLUDE. apply 3.hcl cmpshow users 3.sql # Remove INCLUDE. apply 4.hcl cmpshow users 4.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "active" { null = true type = boolean } index "users_name" { columns = [column.name] where = "active" include = [column.active] } } -- 1.sql -- Table "script_index_include.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- name | text | | not null | active | boolean | | | Indexes: "users_name" btree (name) INCLUDE (active) WHERE active -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "active" { null = true type = boolean } column "version" { null = true type = text } index "users_name" { columns = [column.name] where = "active" include = [column.active, column.version] } } -- 2.sql -- Table "script_index_include.users" Column | Type | Collation | Nullable | Default ---------+---------+-----------+----------+--------- name | text | | not null | active | boolean | | | version | text | | | Indexes: "users_name" btree (name) INCLUDE (active, version) WHERE active -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "active" { null = true type = boolean } column "version" { null = true type = text } index "users_name" { columns = [column.name] where = "active" include = [column.version] } } -- 3.sql -- Table "script_index_include.users" Column | Type | Collation | Nullable | Default ---------+---------+-----------+----------+--------- name | text | | not null | active | boolean | | | version | text | | | Indexes: "users_name" btree (name) INCLUDE (version) WHERE active -- 4.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "active" { null = true type = boolean } column "version" { null = true type = text } index "users_name" { columns = [column.name] where = "active" } } -- 4.sql -- Table "script_index_include.users" Column | Type | Collation | Nullable | Default ---------+---------+-----------+----------+--------- name | text | | not null | active | boolean | | | version | text | | | Indexes: "users_name" btree (name) WHERE active atlas-0.7.2/internal/integration/testdata/postgres/index-issue-557.txt000066400000000000000000000021051431455511600257610ustar00rootroot00000000000000apply 1.hcl cmpshow t1 t2 1.sql -- 1.hcl -- schema "$db" {} table "t1" { schema = schema.$db column "a" { null = false type = uuid } column "b" { null = true type = timestamp(6) } index "t1_a_b" { on { column = column.a } on { desc = true column = column.b } unique = true } } table "t2" { schema = schema.$db column "a" { null = false type = uuid } primary_key { columns = [column.a] } } -- 1.sql -- Table "script_index_issue_557.t1" Column | Type | Collation | Nullable | Default --------+-----------------------------+-----------+----------+--------- a | uuid | | not null | b | timestamp without time zone | | | Indexes: "t1_a_b" UNIQUE, btree (a, b DESC) Table "script_index_issue_557.t2" Column | Type | Collation | Nullable | Default --------+------+-----------+----------+--------- a | uuid | | not null | Indexes: "t2_pkey" PRIMARY KEY, btree (a) atlas-0.7.2/internal/integration/testdata/postgres/index-partial.txt000066400000000000000000000022771431455511600257610ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "active" { null = true type = boolean } index "users_name" { columns = [column.name] where = "active" } } -- 1.sql -- Table "script_index_partial.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- name | text | | not null | active | boolean | | | Indexes: "users_name" btree (name) WHERE active -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "active" { null = true type = boolean } index "users_name" { columns = [column.name] where = "active AND name <> ''" } } -- 2.sql -- Table "script_index_partial.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- name | text | | not null | active | boolean | | | Indexes: "users_name" btree (name) WHERE active AND name <> ''::textatlas-0.7.2/internal/integration/testdata/postgres/index-type-brin.txt000066400000000000000000000030241431455511600262250ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Add the "page_per_range" storage parameter. apply 2.hcl cmpshow users 2.sql # Change the "page_per_range" storage parameter. apply 3.hcl cmpshow users 3.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "c" { null = false type = int } index "users_c" { type = BRIN columns = [column.c] } } -- 1.sql -- Table "script_index_type_brin.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- c | integer | | not null | Indexes: "users_c" brin (c) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "c" { null = false type = int } index "users_c" { type = BRIN columns = [column.c] page_per_range = 2 } } -- 2.sql -- Table "script_index_type_brin.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- c | integer | | not null | Indexes: "users_c" brin (c) WITH (pages_per_range='2') -- 3.hcl -- schema "$db" {} table "users" { schema = schema.$db column "c" { null = false type = int } index "users_c" { type = BRIN columns = [column.c] page_per_range = 3 } } -- 3.sql -- Table "script_index_type_brin.users" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- c | integer | | not null | Indexes: "users_c" brin (c) WITH (pages_per_range='3')atlas-0.7.2/internal/integration/testdata/postgres/index-type.txt000066400000000000000000000025271431455511600253040ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Change index type. apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "data" { null = true type = jsonb } index "users_name" { type = HASH columns = [column.name] } index "users_data" { type = GIN columns = [column.data] } } -- 1.sql -- Table "script_index_type.users" Column | Type | Collation | Nullable | Default --------+-------+-----------+----------+--------- name | text | | not null | data | jsonb | | | Indexes: "users_data" gin (data) "users_name" hash (name) -- 2.hcl -- schema "$db" {} table "users" { schema = schema.$db column "name" { null = false type = text } column "data" { null = true type = jsonb } index "users_name" { columns = [column.name] # Index without "using" defaults to BTREE. } index "users_data" { type = BTREE columns = [column.data] } } -- 2.sql -- Table "script_index_type.users" Column | Type | Collation | Nullable | Default --------+-------+-----------+----------+--------- name | text | | not null | data | jsonb | | | Indexes: "users_data" btree (data) "users_name" btree (name) atlas-0.7.2/internal/integration/testdata/postgres/index-unique-constraint.txt000066400000000000000000000042701431455511600300100ustar00rootroot00000000000000# Create table with UNIQUE constraint. i.e. implicit unique index. execsql 'CREATE TABLE script_index_unique_constraint.users (name text, last text, nickname text UNIQUE, UNIQUE(name, last))' cmphcl 1.inspect.hcl # Dropping the unique index on the "nickname" column should drop the constraint as well. apply 2.hcl cmpshow users 2.sql apply 3.hcl cmpshow users 3.sql -- 1.inspect.hcl -- table "users" { schema = schema.script_index_unique_constraint column "name" { null = true type = text } column "last" { null = true type = text } column "nickname" { null = true type = text } index "users_name_last_key" { unique = true columns = [column.name, column.last] type = BTREE } index "users_nickname_key" { unique = true columns = [column.nickname] type = BTREE } } schema "script_index_unique_constraint" { } -- 2.hcl -- table "users" { schema = schema.script_index_unique_constraint column "name" { null = true type = text } column "last" { null = true type = text } column "nickname" { null = true type = text } index "users_name_last_key" { unique = true columns = [column.name, column.last] type = BTREE } } schema "script_index_unique_constraint" { } -- 2.sql -- Table "script_index_unique_constraint.users" Column | Type | Collation | Nullable | Default ----------+------+-----------+----------+--------- name | text | | | last | text | | | nickname | text | | | Indexes: "users_name_last_key" UNIQUE CONSTRAINT, btree (name, last) -- 3.hcl -- table "users" { schema = schema.script_index_unique_constraint column "name" { null = true type = text } column "last" { null = true type = text } column "nickname" { null = true type = text } } schema "script_index_unique_constraint" { } -- 3.sql -- Table "script_index_unique_constraint.users" Column | Type | Collation | Nullable | Default ----------+------+-----------+----------+--------- name | text | | | last | text | | | nickname | text | | |atlas-0.7.2/internal/integration/testdata/postgres/table-partition.txt000066400000000000000000000201651431455511600263120ustar00rootroot00000000000000apply 1.hcl cmpshow logs 1.sql # Changing partitioned table is not allowed. ! apply 2.hcl 'partition key of table "logs" cannot be changed from PARTITION BY LIST ("value") to PARTITION BY RANGE ("a", (b * (a % 2))) (drop and add is required)' # Drop all tables. apply drop.hcl # Recreate partitioned table. apply 2.hcl cmpshow logs 2.sql # Drop all tables. apply drop.hcl apply 3.hcl cmpshow measurement 3.sql cmphcl 3.inspect.hcl execsql 'CREATE TABLE measurement_y2006m02 PARTITION OF $db.measurement FOR VALUES FROM (''2006-02-01'') TO (''2006-03-01'')' cmpshow measurement 3.partition.sql # Drop all tables. apply drop.hcl apply 4.hcl cmpshow metrics 4.sql cmphcl 4.inspect.hcl -- 1.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "value" { null = false type = integer } partition { type = LIST columns = [column.value] } } -- postgres10/1.sql -- Table "script_table_partition.logs" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- value | integer | | not null | Partition key: LIST (value) -- postgres11/1.sql -- Table "script_table_partition.logs" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- value | integer | | not null | Partition key: LIST (value) Number of partitions: 0 -- 1.sql -- Partitioned table "script_table_partition.logs" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- value | integer | | not null | Partition key: LIST (value) Number of partitions: 0 -- 2.hcl -- schema "$db" {} table "logs" { schema = schema.$db column "a" { null = false type = integer } column "b" { null = false type = integer } partition { type = RANGE by { column = column.a } by { expr = "b * (a % 2)" } } } -- postgres10/2.sql -- Table "script_table_partition.logs" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- a | integer | | not null | b | integer | | not null | Partition key: RANGE (a, ((b * (a % 2)))) -- postgres11/2.sql -- Table "script_table_partition.logs" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- a | integer | | not null | b | integer | | not null | Partition key: RANGE (a, ((b * (a % 2)))) Number of partitions: 0 -- 2.sql -- Partitioned table "script_table_partition.logs" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- a | integer | | not null | b | integer | | not null | Partition key: RANGE (a, ((b * (a % 2)))) Number of partitions: 0 -- 3.hcl -- schema "$db" {} # The partitioned table from the PostgreSQL doc. table "measurement" { schema = schema.$db column "city_id" { null = false type = integer } column "logdate" { null = false type = date } column "peaktemp" { null = true type = int } column "unitsales" { null = true type = int } partition { type = RANGE columns = [column.logdate] } } -- postgres10/3.sql -- Table "script_table_partition.measurement" Column | Type | Collation | Nullable | Default -----------+---------+-----------+----------+--------- city_id | integer | | not null | logdate | date | | not null | peaktemp | integer | | | unitsales | integer | | | Partition key: RANGE (logdate) -- postgres11/3.sql -- Table "script_table_partition.measurement" Column | Type | Collation | Nullable | Default -----------+---------+-----------+----------+--------- city_id | integer | | not null | logdate | date | | not null | peaktemp | integer | | | unitsales | integer | | | Partition key: RANGE (logdate) Number of partitions: 0 -- 3.sql -- Partitioned table "script_table_partition.measurement" Column | Type | Collation | Nullable | Default -----------+---------+-----------+----------+--------- city_id | integer | | not null | logdate | date | | not null | peaktemp | integer | | | unitsales | integer | | | Partition key: RANGE (logdate) Number of partitions: 0 -- postgres10/3.partition.sql -- Table "script_table_partition.measurement" Column | Type | Collation | Nullable | Default -----------+---------+-----------+----------+--------- city_id | integer | | not null | logdate | date | | not null | peaktemp | integer | | | unitsales | integer | | | Partition key: RANGE (logdate) Number of partitions: 1 (Use \d+ to list them.) -- postgres11/3.partition.sql -- Table "script_table_partition.measurement" Column | Type | Collation | Nullable | Default -----------+---------+-----------+----------+--------- city_id | integer | | not null | logdate | date | | not null | peaktemp | integer | | | unitsales | integer | | | Partition key: RANGE (logdate) Number of partitions: 1 (Use \d+ to list them.) -- 3.partition.sql -- Partitioned table "script_table_partition.measurement" Column | Type | Collation | Nullable | Default -----------+---------+-----------+----------+--------- city_id | integer | | not null | logdate | date | | not null | peaktemp | integer | | | unitsales | integer | | | Partition key: RANGE (logdate) Number of partitions: 1 (Use \d+ to list them.) -- 3.inspect.hcl -- table "measurement" { schema = schema.script_table_partition column "city_id" { null = false type = integer } column "logdate" { null = false type = date } column "peaktemp" { null = true type = integer } column "unitsales" { null = true type = integer } partition { type = RANGE columns = [column.logdate] } } schema "script_table_partition" { } -- 4.hcl -- schema "$db" {} table "metrics" { schema = schema.$db column "x" { null = false type = integer } column "y" { null = false type = integer } partition { type = RANGE by { column = column.x } by { expr = "floor(x)" } by { expr = "y * 2" } by { expr = "floor(y)" } } } -- postgres10/4.sql -- Table "script_table_partition.metrics" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- x | integer | | not null | y | integer | | not null | Partition key: RANGE (x, floor((x)::double precision), ((y * 2)), floor((y)::double precision)) -- postgres11/4.sql -- Table "script_table_partition.metrics" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- x | integer | | not null | y | integer | | not null | Partition key: RANGE (x, floor((x)::double precision), ((y * 2)), floor((y)::double precision)) Number of partitions: 0 -- 4.sql -- Partitioned table "script_table_partition.metrics" Column | Type | Collation | Nullable | Default --------+---------+-----------+----------+--------- x | integer | | not null | y | integer | | not null | Partition key: RANGE (x, floor((x)::double precision), ((y * 2)), floor((y)::double precision)) Number of partitions: 0 -- 4.inspect.hcl -- table "metrics" { schema = schema.script_table_partition column "x" { null = false type = integer } column "y" { null = false type = integer } partition { type = RANGE by { column = column.x } by { expr = "floor((x)::double precision)" } by { expr = "(y * 2)" } by { expr = "floor((y)::double precision)" } } } schema "script_table_partition" { } -- drop.hcl -- schema "$db" {}atlas-0.7.2/internal/integration/testdata/sqlite/000077500000000000000000000000001431455511600221025ustar00rootroot00000000000000atlas-0.7.2/internal/integration/testdata/sqlite/autoincrement.txt000066400000000000000000000004741431455511600255250ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql -- 1.hcl -- schema "main" {} table "users" { schema = schema.main column "id" { null = false type = integer auto_increment = true } primary_key { columns = [column.id] } } -- 1.sql -- CREATE TABLE `users` (`id` integer NOT NULL PRIMARY KEY AUTOINCREMENT)atlas-0.7.2/internal/integration/testdata/sqlite/cli-apply-directory.txt000066400000000000000000000020271431455511600265400ustar00rootroot00000000000000! atlas schema apply -f empty/ -u URL --auto-approve stderr 'no schema files found' atlas schema apply -f schema/ -u URL --auto-approve cmpshow users expected.sql atlas schema apply -f schema/ -u URL --auto-approve --dry-run --exclude "*.*.id" > inspected.txt cmp inspected.txt alter.txt atlas schema apply -f schema/ -u URL --auto-approve --dry-run --exclude "*.users" > inspected.txt cmp inspected.txt create.txt -- schema/users.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } column "status" { null = true type = text default = "hello" } } -- schema/schema.hcl -- schema "main" { } -- empty/hello.txt -- hello, world! -- expected.sql -- CREATE TABLE `users` (`id` int NOT NULL, `status` text NULL DEFAULT 'hello') -- alter.txt -- -- Planned Changes: -- Add column "id" to table: "users" ALTER TABLE `users` ADD COLUMN `id` int NOT NULL -- create.txt -- -- Planned Changes: -- Create "users" table CREATE TABLE `users` (`id` int NOT NULL, `status` text NULL DEFAULT 'hello') atlas-0.7.2/internal/integration/testdata/sqlite/cli-apply-multifile.txt000066400000000000000000000006351431455511600265310ustar00rootroot00000000000000atlas schema apply -f users.hcl -f schema.hcl -u URL --auto-approve cmpshow users expected.sql -- users.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } column "status" { null = true type = text default = "hello" } } -- schema.hcl -- schema "main" { } -- expected.sql -- CREATE TABLE `users` (`id` int NOT NULL, `status` text NULL DEFAULT 'hello')atlas-0.7.2/internal/integration/testdata/sqlite/cli-apply-project-multifile.txt000066400000000000000000000010631431455511600301710ustar00rootroot00000000000000atlas schema apply --env local --auto-approve cmpshow users expected.sql -- atlas.hcl -- env "local" { url = "URL" src = "./schema" def_val = "hello" } -- schema/vars.hcl -- variable "def_val" { type = string } -- schema/table.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } column "status" { null = true type = text default = var.def_val } } -- schema/schema.hcl -- schema "main" { } -- expected.sql -- CREATE TABLE `users` (`id` int NOT NULL, `status` text NULL DEFAULT 'hello')atlas-0.7.2/internal/integration/testdata/sqlite/cli-apply-vars.txt000066400000000000000000000012521431455511600255060ustar00rootroot00000000000000! atlas schema apply --env local --auto-approve stderr 'missing value for required variable "def_val"' atlas schema apply --env local_with_vals --auto-approve cmpshow users expected.sql -- atlas.hcl -- env "local" { url = "URL" src = "./1.hcl" } env "local_with_vals" { url = "URL" src = "./1.hcl" def_val = "hello" } -- 1.hcl -- variable "def_val" { type = string } table "users" { schema = schema.main column "id" { null = false type = int } column "status" { null = true type = text default = var.def_val } } schema "main" { } -- expected.sql -- CREATE TABLE `users` (`id` int NOT NULL, `status` text NULL DEFAULT 'hello')atlas-0.7.2/internal/integration/testdata/sqlite/cli-inspect.txt000066400000000000000000000023051431455511600250550ustar00rootroot00000000000000apply 1.hcl # test deprecated -d flag atlas schema inspect -d URL > inspected.hcl cmp inspected.hcl 1.hcl # test url flag atlas schema inspect -u URL > inspected.hcl cmp inspected.hcl 1.hcl # test exclude flag on schema. atlas schema inspect -u URL --exclude "main" > inspected.hcl cmp inspected.hcl empty.hcl # test exclude flag on table. atlas schema inspect -u URL --exclude "*.users" > inspected.hcl cmp inspected.hcl notable.hcl # test exclude flag on column. atlas schema inspect -u URL --exclude "main.*.[ab]*" > inspected.hcl cmp inspected.hcl id.hcl # test exclude flag on column. atlas schema inspect -u URL --exclude "*.*.*" > inspected.hcl cmp inspected.hcl nocolumn.hcl -- 1.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } column "a" { null = false type = int } column "b" { null = false type = int } column "ab" { null = false type = int } } schema "main" { } -- empty.hcl -- -- notable.hcl -- schema "main" { } -- id.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } schema "main" { } -- nocolumn.hcl -- table "users" { schema = schema.main } schema "main" { }atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-apply.txt000066400000000000000000000107741431455511600261740ustar00rootroot00000000000000! atlas migrate apply stderr 'Error: checksum file not found' stdout 'You have a checksum error in your migration directory.' stdout 'atlas migrate hash' atlas migrate hash # Apply all of them atlas migrate apply --url URL stdout 'Migrating to version 2 \(2 migrations in total\):' stdout '-- migrating version 1' stdout '-> CREATE TABLE `users` \(`id` integer NOT NULL, `age` integer NOT NULL, `name` TEXT NOT NULL, PRIMARY KEY \(`id`\)\);' stdout '-- migrating version 2' stdout '-> CREATE TABLE `pets` \(`id` integer NOT NULL, `name` TEXT NOT NULL, PRIMARY KEY \(`id`\)\);' stdout '-- 2 migrations' stdout '-- 2 sql statements' cmpshow users users.sql cmpshow pets pets.sql atlas migrate apply --url URL 1 stdout 'No migration files to execute' clearSchema # Apply one by one atlas migrate apply --url URL 1 stdout 'Migrating to version 1 \(1 migrations in total\):' cmpshow users users.sql atlas migrate apply --url URL 1 stdout 'Migrating to version 2 from 1 \(1 migrations in total\):' cmpshow users users.sql cmpshow pets pets.sql atlas migrate apply --url URL 1 stdout 'No migration files to execute' clearSchema # Move the broken migration into the migrations directory and check the different transaction modes. cp broken.sql migrations/3_third.sql atlas migrate hash ! atlas migrate apply --url URL --tx-mode invalid stderr 'unknown tx-mode "invalid"' # Test --tx-mode all ! atlas migrate apply --url URL --tx-mode all stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "3"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions cmp stdout empty.hcl # Apply one migration, after rolling everything back, the first revision must still exist. atlas migrate apply --url URL 1 atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users cmp stdout empty.hcl cmpshow users users.sql ! atlas migrate apply --url URL --tx-mode all stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "3"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users cmp stdout empty.hcl # If the broken migration is gone, we can apply everything without any problems. rm migrations/3_third.sql atlas migrate hash atlas migrate apply --url URL --revisions-schema $db cmpshow users users.sql cmpshow pets pets.sql atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout empty.hcl clearSchema # Test --tx-mode file cp broken.sql migrations/3_third.sql atlas migrate hash ! atlas migrate apply --url URL --tx-mode file stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "3"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions cmpshow users users.sql cmpshow pets pets.sql # Table "broken" does not exist since we rolled back that migration. atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout empty.hcl # If the broken migration is gone, we can apply everything without any problems. rm migrations/3_third.sql atlas migrate hash atlas migrate apply --url URL --revisions-schema $db cmpshow users users.sql cmpshow pets pets.sql atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout empty.hcl clearSchema # Test --tx-mode none cp broken.sql migrations/3_third.sql atlas migrate hash ! atlas migrate apply --url URL --tx-mode none stderr 'executing statement "THIS IS A FAILING STATEMENT;" from version "3"' atlas schema inspect --url URL --exclude $db.atlas_schema_revisions cmpshow users users.sql cmpshow pets pets.sql # Table "broken" does exist since we do not have transactions. atlas schema inspect --url URL --exclude $db.atlas_schema_revisions --exclude $db.users --exclude $db.pets cmp stdout broken.hcl -- migrations/1_first.sql -- CREATE TABLE `users` (`id` integer NOT NULL, `age` integer NOT NULL, `name` TEXT NOT NULL, PRIMARY KEY (`id`)); -- migrations/2_second.sql -- CREATE TABLE `pets` (`id` integer NOT NULL, `name` TEXT NOT NULL, PRIMARY KEY (`id`)); -- broken.sql -- CREATE TABLE `broken` (`id` integer); THIS IS A FAILING STATEMENT; -- empty.hcl -- schema "main" { } -- broken.hcl -- table "broken" { schema = schema.main column "id" { null = true type = integer } } schema "main" { } -- users.sql -- CREATE TABLE `users` (`id` integer NOT NULL, `age` integer NOT NULL, `name` TEXT NOT NULL, PRIMARY KEY (`id`)) -- pets.sql -- CREATE TABLE `pets` (`id` integer NOT NULL, `name` TEXT NOT NULL, PRIMARY KEY (`id`)) atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-diff-minimal-env.txt000066400000000000000000000005111431455511600301550ustar00rootroot00000000000000atlas migrate diff --env local cmpmig 0 diff.sql -- atlas.hcl -- env "local" { src = "1.hcl" dev = "sqlite://devdb" } -- 1.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } schema "main" { } -- diff.sql -- -- create "users" table CREATE TABLE `users` (`id` int NOT NULL);atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-diff-multifile.txt000066400000000000000000000015061431455511600277400ustar00rootroot00000000000000! atlas migrate diff --dev-url sqlite://devdb --to file://schema/ --to other://scheme --dir file://migrations stderr 'got mixed --to url schemes' ! atlas migrate diff --dev-url sqlite://devdb --to mysql://localhost/x --to mysql://localhost/y --dir file://migrations stderr 'got multiple --to urls of scheme' atlas migrate diff --dev-url sqlite://devdb --to file://schema/ --dir file://migrations cmpmig 0 diff.sql # reset dir exec rm -rf migrations/ atlas migrate diff --dev-url sqlite://devdb --to file://schema/schema.hcl --to file://schema/table.hcl --dir file://migrations cmpmig 0 diff.sql -- schema/schema.hcl -- schema "main" { } -- schema/table.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } -- diff.sql -- -- create "users" table CREATE TABLE `users` (`id` int NOT NULL);atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-diff.txt000066400000000000000000000010021431455511600257370ustar00rootroot00000000000000exec mkdir migrations ! atlas migrate diff --to file://1.hcl --dir file://migrations stderr '"dev-url" not set' ! atlas migrate diff --dev-url sqlite://devdb --dir file://migrations stderr '"to" not set' atlas migrate diff --dev-url sqlite://devdb --to file://1.hcl --dir file://migrations cmpmig 0 diff.sql -- 1.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } schema "main" { } -- diff.sql -- -- create "users" table CREATE TABLE `users` (`id` int NOT NULL);atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-lint-add-notnull.txt000066400000000000000000000011101431455511600302140ustar00rootroot00000000000000atlas migrate lint --dir file://migrations --dev-url URL --latest=2 > got1.txt cmp got1.txt expected1.txt -- migrations/1.sql -- CREATE TABLE users (id int); /* Adding a not-null column without default to a table created in this file should not report. */ ALTER TABLE users ADD COLUMN c1 int NOT NULL; -- migrations/2.sql -- ALTER TABLE users ADD COLUMN c2 int NOT NULL; ALTER TABLE users ADD COLUMN c3 int NOT NULL DEFAULT 1; -- expected1.txt -- 2.sql: data dependent changes detected: L1: Adding a non-nullable "int" column "c2" will fail in case table "users" is not empty atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-lint-destructive.txt000066400000000000000000000013601431455511600303430ustar00rootroot00000000000000# Expect the command to fail; exit code 1. ! atlas migrate lint --dir file://migrations --dev-url URL --latest=1 > got1.txt cmp got1.txt expected1.txt # Expect the command to fail; exit code 1. ! atlas migrate lint --dir file://migrations --dev-url URL --latest=2 > got2.txt cmp got2.txt expected2.txt -- migrations/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); ALTER TABLE users RENAME COLUMN id TO oid; -- migrations/2.sql -- DROP TABLE users; -- migrations/3.sql -- DROP TABLE pets; -- expected1.txt -- 3.sql: destructive changes detected: L1: Dropping table "pets" -- expected2.txt -- 2.sql: destructive changes detected: L1: Dropping table "users" 3.sql: destructive changes detected: L1: Dropping table "pets" atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-lint-ignore.txt000066400000000000000000000022451431455511600272700ustar00rootroot00000000000000# Ignore all diagnostics. atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt # Ignore specific diagnostics. atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt # Ignore by code. atlas migrate lint --dir file://migrations3 --dev-url URL --latest=1 > got.txt cmp got.txt expected3.txt -- migrations1/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); -- migrations1/2.sql -- -- atlas:nolint ALTER TABLE users ADD COLUMN name text NOT NULL; -- atlas:nolint DROP TABLE pets; -- migrations2/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); -- migrations2/2.sql -- -- atlas:nolint data_depend ALTER TABLE users ADD COLUMN name text NOT NULL; -- atlas:nolint destructive DROP TABLE pets; -- empty.txt -- -- migrations3/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); -- migrations3/2.sql -- ALTER TABLE users ADD COLUMN name text NOT NULL; -- atlas:nolint DS102 DROP TABLE pets; -- expected3.txt -- 2.sql: data dependent changes detected: L1: Adding a non-nullable "text" column "name" will fail in case table "users" is not empty atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-lint-minimal-env.txt000066400000000000000000000005421431455511600302170ustar00rootroot00000000000000# Expect the command to fail; exit code 1. ! atlas migrate lint --env local --latest=2 > got1.txt cmp got1.txt expected1.txt -- atlas.hcl -- env "local" { dev = "URL" } -- migrations/1.sql -- CREATE TABLE users (id int); -- migrations/2.sql -- DROP TABLE users; -- expected1.txt -- 2.sql: destructive changes detected: L1: Dropping table "users" atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-lint-modify-nullability.txt000066400000000000000000000051251431455511600316220ustar00rootroot00000000000000# Modify column from nullable to non-nullable with default value. atlas migrate lint --dir file://migrations1 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt # Modify column from nullable to non-nullable without default value. atlas migrate lint --dir file://migrations2 --dev-url URL --latest=1 > got.txt cmp got.txt expected2.txt # Modify column from nullable to non-nullable without default value but backfill previous rows. atlas migrate lint --dir file://migrations3 --dev-url URL --latest=1 > got.txt cmp got.txt empty.txt -- empty.txt -- -- migrations1/1.sql -- CREATE TABLE `users` (`a` int NULL); -- migrations1/2.sql -- -- disable the enforcement of foreign-keys constraints PRAGMA foreign_keys = off; -- create "new_users" table CREATE TABLE `new_users` (`a` int NOT NULL DEFAULT 1); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`a`) SELECT IFNULL(`a`, 1) FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; -- enable back the enforcement of foreign-keys constraints PRAGMA foreign_keys = on; -- migrations2/1.sql -- CREATE TABLE `users` (`a` int NULL); -- migrations2/2.sql -- -- disable the enforcement of foreign-keys constraints PRAGMA foreign_keys = off; -- create "new_users" table CREATE TABLE `new_users` (`a` int NOT NULL); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`a`) SELECT `a` FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; -- enable back the enforcement of foreign-keys constraints PRAGMA foreign_keys = on; -- expected2.txt -- 2.sql: data dependent changes detected: L4: Modifying nullable column "a" to non-nullable without default value might fail in case it contains NULL values -- migrations3/1.sql -- CREATE TABLE `users` (`a` int NULL); -- migrations3/2.sql -- -- backfill previous rows UPDATE `users` SET `a` = 1 WHERE `a` IS NULL; -- disable the enforcement of foreign-keys constraints PRAGMA foreign_keys = off; -- create "new_users" table CREATE TABLE `new_users` (`a` int NOT NULL); -- copy rows from old table "users" to new temporary table "new_users" INSERT INTO `new_users` (`a`) SELECT `a` FROM `users`; -- drop "users" table after copying rows DROP TABLE `users`; -- rename temporary table "new_users" to "users" ALTER TABLE `new_users` RENAME TO `users`; -- enable back the enforcement of foreign-keys constraints PRAGMA foreign_keys = on; atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-lint-project.txt000066400000000000000000000013711431455511600274520ustar00rootroot00000000000000atlas migrate lint --dir file://migrations --dev-url URL --env=log_name > got.txt cmp got.txt expected1.txt atlas migrate lint --dir file://migrations --dev-url URL --env=log_count > got.txt cmp got.txt expected2.txt -- migrations/1.sql -- CREATE TABLE users (id int); CREATE TABLE pets (id int); ALTER TABLE users RENAME COLUMN id TO oid; -- migrations/2.sql -- DROP TABLE users; -- migrations/3.sql -- DROP TABLE pets; -- expected1.txt -- 3.sql -- expected2.txt -- 2 -- atlas.hcl -- lint { latest = 1 destructive { error = false } } env "log_name" { lint { log = "{{ range .Files }}{{ println .Name }}{{ end }}" } } env "log_count" { lint { latest = 2 log = "{{ len .Files | println }}" } }atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-project-multifile.txt000066400000000000000000000021001431455511600304650ustar00rootroot00000000000000atlas migrate diff --env local cmpmig 0 diff.sql # reset exec rm -rf migrations atlas migrate diff --env src_list cmpmig 0 diff.sql # reset exec rm -rf migrations atlas migrate diff --env single_elem cmpmig 0 diff.sql -- atlas.hcl -- env "local" { url = "URL" dev = "sqlite://test?mode=memory&_fk=1" src = "./schema" migration { dir = "file://migrations" format = atlas } } env "src_list" { url = "URL" dev = "sqlite://test?mode=memory&_fk=1" src = [ "./schema/1.hcl", "./schema/2.hcl", ] migration { dir = "file://migrations" format = atlas } } env "single_elem" { url = "URL" dev = "sqlite://test?mode=memory&_fk=1" src = [ "./schema/", ] migration { dir = "file://migrations" format = atlas } } -- schema/1.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } -- schema/2.hcl -- schema "main" { } -- diff.sql -- -- create "users" table CREATE TABLE `users` (`id` int NOT NULL); -- empty.sql -- atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-project.txt000066400000000000000000000013421431455511600265040ustar00rootroot00000000000000exec mkdir migrations atlas migrate diff --env local cmpmig 0 diff.sql atlas migrate validate --env local atlas migrate new 1 --env local cmpmig 1 empty.sql exec touch migrations/2.sql ! atlas migrate validate --env local stderr 'Error: checksum mismatch' atlas migrate hash --env local atlas migrate validate --env local -- atlas.hcl -- env "local" { url = "URL" dev = "sqlite://test?mode=memory&_fk=1" src = "./1.hcl" migration { dir = "file://migrations" format = atlas } } -- 1.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } schema "main" { } -- diff.sql -- -- create "users" table CREATE TABLE `users` (`id` int NOT NULL); -- empty.sql -- atlas-0.7.2/internal/integration/testdata/sqlite/cli-migrate-set.txt000066400000000000000000000036051431455511600256350ustar00rootroot00000000000000! atlas migrate set stderr 'Error: accepts 1 arg\(s\), received 0' ! atlas migrate set foo bar stderr 'Error: accepts 1 arg\(s\), received 2' ! atlas migrate set 0 stderr 'Error: checksum file not found' stdout 'You have a checksum error in your migration directory.' atlas migrate hash # Works on fresh database. atlas migrate set 1 --url URL atlas migrate apply 1 --url URL --dry-run stdout 'Migrating to version 2 from 1' # Set to second last migration. atlas migrate set 2 --url URL atlas migrate apply 1 --url URL --dry-run stdout 'Migrating to version 3 from 2' # Have one migration applied, manual do second, set revision and continue apply. clearSchema atlas migrate apply 1 --url URL stdout 'Migrating to version 1' atlas migrate set 2 --url URL atlas migrate apply 1 --url URL --dry-run stdout 'Migrating to version 3 from 2' # Set to non-existing migration requires flag. ! atlas migrate set 4 --url URL stderr 'Error: migration with version "4" not found' # If set to last version, nothing to do. atlas migrate set 3 --url URL atlas migrate apply --url URL stdout 'No migration files to execute' # Partially applied (error), fix with set. clearSchema mv broken.sql migrations/4.sql atlas migrate hash ! atlas migrate apply --url URL --tx-mode none stdout 'Migrating to version 4' atlas migrate set 4 --url URL atlas migrate apply --url URL stdout 'No migration files to execute' -- migrations/1_first.sql -- CREATE TABLE `users` (`id` bigint NOT NULL, `age` bigint NOT NULL, `name` varchar(255) NOT NULL, PRIMARY KEY (`id`)); -- migrations/2_second.sql -- ALTER TABLE `users` ADD UNIQUE INDEX `age` (`age`); -- migrations/3_third.sql -- CREATE TABLE `pets` (`id` bigint NOT NULL, `name` varchar(255) NOT NULL, PRIMARY KEY (`id`)); -- broken.sql -- CREATE TABLE `vets` (`id` bigint NOT NULL, `name` varchar(255) NOT NULL, PRIMARY KEY(`id`)); asdf ALTER TABLE `users` ADD UNIQUE INDEX `name` (`name`); atlas-0.7.2/internal/integration/testdata/sqlite/cli-project-vars.txt000066400000000000000000000013311431455511600260250ustar00rootroot00000000000000! atlas schema apply --env local --auto-approve stderr 'Error: missing value for required variable "user_status_default"' atlas schema apply --env local --auto-approve --var user_status_default=hello cmpshow users expected.sql -- atlas.hcl -- variable "user_status_default" { type = string } env "local" { url = "URL" src = "./1.hcl" def_val = var.user_status_default } -- 1.hcl -- variable "def_val" { type = string } table "users" { schema = schema.main column "id" { null = false type = int } column "status" { null = true type = text default = var.def_val } } schema "main" { } -- expected.sql -- CREATE TABLE `users` (`id` int NOT NULL, `status` text NULL DEFAULT 'hello')atlas-0.7.2/internal/integration/testdata/sqlite/cli-schema-project-file.txt000066400000000000000000000016101431455511600272270ustar00rootroot00000000000000! atlas schema inspect stderr '"url" not set' ! atlas schema apply -f 1.hcl stderr '"url" not set' ! atlas schema apply --url URL stderr '"file" not set' ! atlas schema apply -f atlas.hcl -u URL stderr 'cannot parse project file' # Verify "url" and "src" attributes of the env are used. atlas schema apply --env local --auto-approve atlas schema inspect --env local > inspected.hcl cmp 1.hcl inspected.hcl # Verify the precedence of flag over project file. atlas schema apply --env local --auto-approve -f 2.hcl atlas schema inspect --env local > inspected.hcl cmp 2.hcl inspected.hcl -- atlas.hcl -- env "local" { url = "URL" src = "./1.hcl" } -- 1.hcl -- table "users" { schema = schema.main column "id" { null = false type = int } } schema "main" { } -- 2.hcl -- table "other" { schema = schema.main column "id" { null = false type = int } } schema "main" { }atlas-0.7.2/internal/integration/testdata/sqlite/column-generated.txt000066400000000000000000000041121431455511600260720ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Insert a few records to the table, and check the # migration process using a temporary table. execsql 'INSERT INTO users (a) VALUES (1), (2), (3)' apply 2.hcl cmpshow users 2.sql apply 3.hcl cmpshow users 3.sql # Appending a new VIRTUAL column should use ALTER command. apply 4.hcl cmpshow users 4.sql -- 1.hcl -- schema "main" {} table "users" { schema = schema.main column "a" { type = int } column "b" { type = int as = "1" } column "c" { type = int as { expr = "2" type = STORED } } } -- 1.sql -- CREATE TABLE `users` (`a` int NOT NULL, `b` int NOT NULL AS (1) VIRTUAL, `c` int NOT NULL AS (2) STORED) -- 2.hcl -- schema "main" {} table "users" { schema = schema.main column "a" { type = int } column "b" { type = int as = "1" } column "c" { type = int as { expr = "2" type = VIRTUAL } } } -- 2.sql -- CREATE TABLE "users" (`a` int NOT NULL, `b` int NOT NULL AS (1) VIRTUAL, `c` int NOT NULL AS (2) VIRTUAL) -- 3.hcl -- schema "main" {} table "users" { schema = schema.main column "a" { type = int } column "b" { type = int as = "2" } column "c" { type = int as { expr = "3" type = VIRTUAL } } } -- 3.sql -- CREATE TABLE "users" (`a` int NOT NULL, `b` int NOT NULL AS (2) VIRTUAL, `c` int NOT NULL AS (3) VIRTUAL) -- 4.hcl -- schema "main" {} table "users" { schema = schema.main column "a" { type = int } column "b" { type = int as = "2" } column "c" { type = int as { expr = "3" type = VIRTUAL } } column "d" { type = int as { expr = "4" type = VIRTUAL } } } -- 4.sql -- CREATE TABLE "users" (`a` int NOT NULL, `b` int NOT NULL AS (2) VIRTUAL, `c` int NOT NULL AS (3) VIRTUAL, `d` int NOT NULL AS (4) VIRTUAL)atlas-0.7.2/internal/integration/testdata/sqlite/index-desc.txt000066400000000000000000000031231431455511600246650ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql # Drop the "DESC" option from the key part. apply 2.hcl cmpshow users 2.sql # Use of "columns" instead of "on" should not trigger a change. synced 2-no-change.hcl apply 3.hcl cmpshow users 3.sql -- 1.hcl -- schema "main" {} table "users" { schema = schema.main column "rank" { type = int } index "rank_idx" { on { desc = true column = table.users.column.rank } } } -- 1.sql -- CREATE TABLE `users` (`rank` int NOT NULL) CREATE INDEX `rank_idx` ON `users` (`rank` DESC) -- 2.hcl -- schema "main" {} table "users" { schema = schema.main column "rank" { type = int } index "rank_idx" { on { column = table.users.column.rank } } } -- 2.sql -- CREATE TABLE "users" (`rank` int NOT NULL) CREATE INDEX `rank_idx` ON `users` (`rank`) -- 2-no-change.hcl -- schema "main" {} table "users" { schema = schema.main column "rank" { type = int } index "rank_idx" { columns = [ table.users.column.rank, ] } } -- 3.hcl -- schema "main" {} table "users" { schema = schema.main column "rank" { type = int } column "score" { type = int } index "rank_score_idx" { on { column = table.users.column.rank } on { column = table.users.column.score desc = true } } } -- 3.sql -- CREATE TABLE "users" (`rank` int NOT NULL, `score` int NOT NULL) CREATE INDEX `rank_score_idx` ON `users` (`rank`, `score` DESC)atlas-0.7.2/internal/integration/testdata/sqlite/index-expr.txt000066400000000000000000000015371431455511600247340ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "main" {} table "users" { schema = schema.main column "first_name" { null = false type = text } column "last_name" { null = false type = text } index "full_name" { on { expr = "first_name || ' ' || last_name" } } } -- 1.sql -- CREATE TABLE `users` (`first_name` text NOT NULL, `last_name` text NOT NULL) CREATE INDEX `full_name` ON `users` ((first_name || ' ' || last_name)) -- 2.hcl -- schema "main" {} table "users" { schema = schema.main column "first_name" { null = false type = text } index "full_name" { on { expr = "first_name || '''s first name'" } } } -- 2.sql -- CREATE TABLE "users" (`first_name` text NOT NULL) CREATE INDEX `full_name` ON `users` ((first_name || '''s first name'))atlas-0.7.2/internal/integration/testdata/sqlite/index-partial.txt000066400000000000000000000016001431455511600254010ustar00rootroot00000000000000apply 1.hcl cmpshow users 1.sql apply 2.hcl cmpshow users 2.sql -- 1.hcl -- schema "main" {} table "users" { schema = schema.main column "name" { null = false type = text } column "active" { null = true type = boolean } index "users_name" { columns = [column.name] where = "active" } } -- 1.sql -- CREATE TABLE `users` (`name` text NOT NULL, `active` boolean NULL) CREATE INDEX `users_name` ON `users` (`name`) WHERE active -- 2.hcl -- schema "main" {} table "users" { schema = schema.main column "name" { null = false type = text } column "active" { null = true type = boolean } index "users_name" { columns = [column.name] where = "active AND name <> ''" } } -- 2.sql -- CREATE TABLE "users" (`name` text NOT NULL, `active` boolean NULL) CREATE INDEX `users_name` ON `users` (`name`) WHERE active AND name <> ''atlas-0.7.2/internal/integration/tidb_test.go000066400000000000000000001121201431455511600212750ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package integration import ( "context" "database/sql" "fmt" "log" "testing" "ariga.io/atlas/sql/mysql" "ariga.io/atlas/sql/schema" "entgo.io/ent/dialect" entsql "entgo.io/ent/dialect/sql" entschema "entgo.io/ent/dialect/sql/schema" "entgo.io/ent/entc/integration/ent" "entgo.io/ent/entc/integration/ent/migrate" _ "github.com/go-sql-driver/mysql" "github.com/stretchr/testify/require" ) var tidbTests = map[string]*myTest{ "tidb5": {port: 4309}, "tidb6": {port: 4310}, } func tidbRun(t *testing.T, fn func(*myTest)) { for version, tt := range tidbTests { if flagVersion == "" || flagVersion == version { t.Run(version, func(t *testing.T) { tt.once.Do(func() { var err error tt.version = version tt.rrw = &rrw{} tt.db, err = sql.Open("mysql", fmt.Sprintf("root@tcp(localhost:%d)/test?parseTime=True", tt.port)) if err != nil { log.Fatalln(err) } dbs = append(dbs, tt.db) // close connection after all tests have been run tt.drv, err = mysql.Open(tt.db) if err != nil { log.Fatalln(err) } }) tt := &myTest{T: t, db: tt.db, drv: tt.drv, version: version, port: tt.port, rrw: tt.rrw} fn(tt) }) } } } func TestTiDB_AddDropTable(t *testing.T) { tidbRun(t, func(t *myTest) { testAddDrop(t) }) } func TestTiDB_Relation(t *testing.T) { tidbRun(t, func(t *myTest) { testRelation(t) }) } func TestTiDB_AddIndexedColumns(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "a", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }, &schema.Column{ Name: "b", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }, &schema.Column{ Name: "c", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, Default: &schema.RawExpr{X: "10"}, }) parts := usersT.Columns[len(usersT.Columns)-3:] usersT.Indexes = append(usersT.Indexes, &schema.Index{ Unique: true, Name: "a_b_c_unique", Parts: []*schema.IndexPart{{C: parts[0]}, {C: parts[1]}, {C: parts[2]}}, }) changes := t.diff(t.loadUsers(), usersT) require.NotEmpty(t, changes, "usersT contains 2 new columns and 1 new index") t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // In MySQL, dropping a column should remove it from the key. // However, on TiDB an explicit DROP/ADD INDEX is required. idx, ok := usersT.Index("a_b_c_unique") require.True(t, ok) idx.Parts = idx.Parts[:len(idx.Parts)-1] usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] changes = t.diff(t.loadUsers(), usersT) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) // Dropping a column from both table and index. usersT = t.loadUsers() idx, ok = usersT.Index("a_b_c_unique") require.True(t, ok) require.Len(t, idx.Parts, 2) usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] idx.Parts = idx.Parts[:len(idx.Parts)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) // Dropping a column should remove // single-column keys as well. usersT = t.loadUsers() idx, ok = usersT.Index("a_b_c_unique") require.True(t, ok) require.Len(t, idx.Parts, 1) usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] // In MySQL, dropping a column should remove its index. // However, on TiDB an explicit DROP INDEX is required. usersT.Indexes = nil changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) _, ok = t.loadUsers().Index("a_b_c_unique") require.False(t, ok) }) } func TestTiDB_AddColumns(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) usersT.Columns = append( usersT.Columns, &schema.Column{Name: "a", Type: &schema.ColumnType{Raw: "tinyblob", Type: &schema.BinaryType{T: "tinyblob"}}}, &schema.Column{Name: "b", Type: &schema.ColumnType{Raw: "mediumblob", Type: &schema.BinaryType{T: "mediumblob"}}}, &schema.Column{Name: "c", Type: &schema.ColumnType{Raw: "blob", Type: &schema.BinaryType{T: "blob"}}}, &schema.Column{Name: "d", Type: &schema.ColumnType{Raw: "longblob", Type: &schema.BinaryType{T: "longblob"}}}, &schema.Column{Name: "e", Type: &schema.ColumnType{Raw: "binary", Type: &schema.BinaryType{T: "binary"}}}, &schema.Column{Name: "f", Type: &schema.ColumnType{Raw: "varbinary(255)", Type: &schema.BinaryType{T: "varbinary(255)"}}, Default: &schema.Literal{V: "foo"}}, &schema.Column{Name: "g", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 255}}}, &schema.Column{Name: "h", Type: &schema.ColumnType{Raw: "varchar(255)", Type: &schema.StringType{T: "varchar(255)"}}}, &schema.Column{Name: "i", Type: &schema.ColumnType{Raw: "tinytext", Type: &schema.StringType{T: "tinytext"}}}, &schema.Column{Name: "j", Type: &schema.ColumnType{Raw: "mediumtext", Type: &schema.StringType{T: "mediumtext"}}}, &schema.Column{Name: "k", Type: &schema.ColumnType{Raw: "text", Type: &schema.StringType{T: "text"}}}, &schema.Column{Name: "l", Type: &schema.ColumnType{Raw: "longtext", Type: &schema.StringType{T: "longtext"}}}, &schema.Column{Name: "m", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 6}}}, &schema.Column{Name: "m1", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal"}}}, &schema.Column{Name: "m2", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 2}}}, &schema.Column{Name: "n", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 10, Scale: 2}}}, &schema.Column{Name: "n1", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric"}}}, &schema.Column{Name: "n2", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 2}}}, &schema.Column{Name: "o", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float", Precision: 2}}}, &schema.Column{Name: "p", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double", Precision: 14}}}, &schema.Column{Name: "q", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real", Precision: 14}}}, &schema.Column{Name: "r", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, &schema.Column{Name: "s", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, &schema.Column{Name: "t", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint"}}}, &schema.Column{Name: "u", Type: &schema.ColumnType{Type: &schema.EnumType{T: "enum", Values: []string{"a", "b", "c"}}}}, &schema.Column{Name: "v", Type: &schema.ColumnType{Type: &schema.StringType{T: "char(36)"}}}, &schema.Column{Name: "x", Type: &schema.ColumnType{Type: &schema.SpatialType{T: "line"}}}, &schema.Column{Name: "z", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{X: "CURRENT_TIMESTAMP"}}, ) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 27) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) } func TestTiDB_ColumnInt(t *testing.T) { t.Run("ChangeType", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) for _, typ := range []string{"tinyint", "smallint", "mediumint", "bigint"} { usersT.Columns[0].Type.Type = &schema.IntegerType{T: typ} changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) t.Run("ChangeDefault", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Default: &schema.RawExpr{X: "1"}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) for _, x := range []string{"2", "'3'", "10.1"} { usersT.Columns[0].Default.(*schema.RawExpr).X = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) } func TestTiDB_ColumnString(t *testing.T) { t.Run("ChangeType", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(20)"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) for _, typ := range []string{"varchar(255)", "char(120)", "tinytext", "mediumtext", "longtext"} { usersT.Columns[0].Type.Type = &schema.StringType{T: typ} changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) t.Run("AddWithDefault", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}, Default: &schema.RawExpr{X: "hello"}}, {Name: "b", Type: &schema.ColumnType{Type: &schema.StringType{T: "char(255)"}}, Default: &schema.RawExpr{X: "'world'"}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) }) }) t.Run("ChangeDefault", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}, Default: &schema.RawExpr{X: "hello"}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) for _, x := range []string{"2", "'3'", "'world'"} { usersT.Columns[0].Default.(*schema.RawExpr).X = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) } func TestTiDB_ColumnBool(t *testing.T) { t.Run("Add", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}}, {Name: "b", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.BoolType{T: "tinyint"}}}, {Name: "d", Type: &schema.ColumnType{Type: &schema.BoolType{T: "tinyint(1)"}}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) }) }) t.Run("AddWithDefault", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "1"}}, {Name: "b", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "0"}}, {Name: "c", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "'1'"}}, {Name: "d", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "'0'"}}, {Name: "e", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "true"}}, {Name: "f", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "false"}}, {Name: "g", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "TRUE"}}, {Name: "h", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "FALSE"}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) }) }) t.Run("ChangeDefault", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}, Default: &schema.RawExpr{X: "1"}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) // Change default from "true" to "false" to "true". for _, x := range []string{"false", "true"} { usersT.Columns[0].Default.(*schema.RawExpr).X = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) } }) }) t.Run("ChangeNull", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}, Null: true}}, }, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) usersT.Columns[0].Type.Null = false changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) } func TestTiDB_ForeignKey(t *testing.T) { t.Run("ChangeAction", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() fk, ok := postsT.ForeignKey("author_id") require.True(t, ok) fk.OnUpdate = schema.SetNull fk.OnDelete = schema.Cascade changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 1) modifyF, ok := changes[0].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("UnsetNull", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT, postsT := t.users(), t.posts() t.dropTables(postsT.Name, usersT.Name) fk, ok := postsT.ForeignKey("author_id") require.True(t, ok) fk.OnDelete = schema.SetNull fk.OnUpdate = schema.SetNull t.migrate(&schema.AddTable{T: usersT}, &schema.AddTable{T: postsT}) ensureNoChange(t, postsT, usersT) postsT = t.loadPosts() c, ok := postsT.Column("author_id") require.True(t, ok) c.Type.Null = false fk, ok = postsT.ForeignKey("author_id") require.True(t, ok) fk.OnUpdate = schema.NoAction fk.OnDelete = schema.NoAction changes := t.diff(t.loadPosts(), postsT) require.Len(t, changes, 2) modifyC, ok := changes[0].(*schema.ModifyColumn) require.True(t, ok) require.True(t, modifyC.Change == schema.ChangeNull) modifyF, ok := changes[1].(*schema.ModifyForeignKey) require.True(t, ok) require.True(t, modifyF.Change == schema.ChangeUpdateAction|schema.ChangeDeleteAction) t.migrate(&schema.ModifyTable{T: postsT, Changes: changes}) ensureNoChange(t, postsT, usersT) }) }) t.Run("AddDrop", func(t *testing.T) { tidbRun(t, func(t *myTest) { usersT := t.users() t.dropTables(usersT.Name) t.migrate(&schema.AddTable{T: usersT}) ensureNoChange(t, usersT) // Add foreign key. usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "spouse_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}, }) usersT.ForeignKeys = append(usersT.ForeignKeys, &schema.ForeignKey{ Symbol: "spouse_id", Table: usersT, Columns: usersT.Columns[len(usersT.Columns)-1:], RefTable: usersT, RefColumns: usersT.Columns[:1], OnDelete: schema.NoAction, }) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) addC, ok := changes[0].(*schema.AddColumn) require.True(t, ok) require.Equal(t, "spouse_id", addC.C.Name) addF, ok := changes[1].(*schema.AddForeignKey) require.True(t, ok) require.Equal(t, "spouse_id", addF.F.Symbol) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Drop foreign keys. usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] usersT.ForeignKeys = usersT.ForeignKeys[:len(usersT.ForeignKeys)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) } func TestTiDB_HCL_Realm(t *testing.T) { tidbRun(t, func(t *myTest) { t.dropSchemas("second") realm := t.loadRealm() hcl, err := mysql.MarshalHCL(realm) require.NoError(t, err) wa := string(hcl) + ` schema "second" { } ` t.applyRealmHcl(wa) realm, err = t.drv.InspectRealm(context.Background(), &schema.InspectRealmOption{}) require.NoError(t, err) _, ok := realm.Schema("test") require.True(t, ok) _, ok = realm.Schema("second") require.True(t, ok) }) } func TestTiDB_DefaultsHCL(t *testing.T) { n := "atlas_defaults" tidbRun(t, func(t *myTest) { ddl := ` create table atlas_defaults ( string varchar(255) default "hello_world", quoted varchar(100) default 'never say "never"', tBit bit(10) default b'10101', ts timestamp default CURRENT_TIMESTAMP, number int default 42 ) ` t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() spec, err := mysql.MarshalHCL(realm.Schemas[0]) require.NoError(t, err) var s schema.Realm err = mysql.EvalHCLBytes(spec, &s, nil) require.NoError(t, err) t.dropTables(n) t.applyHcl(string(spec)) ensureNoChange(t, realm.Schemas[0].Tables[0]) }) } func TestTiDB_CLI_MultiSchema(t *testing.T) { h := ` schema "test" { charset = "%s" collation = "%s" } table "users" { schema = schema.test column "id" { type = int } primary_key { columns = [table.users.column.id] } } schema "test2" { charset = "%s" collation = "%s" } table "users" { schema = schema.test2 column "id" { type = int } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { tidbRun(t, func(t *myTest) { t.dropSchemas("test2") t.dropTables("users") attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLIMultiSchemaInspect(t, fmt.Sprintf(h, charset.V, collate.V, charset.V, collate.V), t.url(""), []string{"test", "test2"}, mysql.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { tidbRun(t, func(t *myTest) { t.dropSchemas("test2") t.dropTables("users") attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLIMultiSchemaApply(t, fmt.Sprintf(h, charset.V, collate.V, charset.V, collate.V), t.url(""), []string{"test", "test2"}, mysql.EvalHCL) }) }) } func TestTiDB_CLI(t *testing.T) { h := ` schema "test" { charset = "%s" collation = "%s" } table "users" { schema = schema.test column "id" { type = int } primary_key { columns = [table.users.column.id] } }` t.Run("SchemaInspect", func(t *testing.T) { tidbRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaInspect(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test"), mysql.EvalHCL) }) }) t.Run("SchemaApply", func(t *testing.T) { tidbRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaApply(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test")) }) }) t.Run("SchemaApplyWithVars", func(t *testing.T) { h := ` variable "tenant" { type = string } schema "tenant" { name = var.tenant } table "users" { schema = schema.tenant column "id" { type = int } } ` tidbRun(t, func(t *myTest) { testCLISchemaApply(t, h, t.url("test"), "--var", "tenant=test") }) }) t.Run("SchemaApplyDryRun", func(t *testing.T) { tidbRun(t, func(t *myTest) { attrs := t.defaultAttrs() charset, collate := attrs[0].(*schema.Charset), attrs[1].(*schema.Collation) testCLISchemaApplyDry(t, fmt.Sprintf(h, charset.V, collate.V), t.url("test")) }) }) t.Run("SchemaDiffRun", func(t *testing.T) { tidbRun(t, func(t *myTest) { testCLISchemaDiff(t, t.url("test")) }) }) } func TestTiDB_HCL(t *testing.T) { full := ` schema "test" { } table "users" { schema = schema.test column "id" { type = int } primary_key { columns = [table.users.column.id] } } table "posts" { schema = schema.test column "id" { type = int } column "author_id" { type = int } foreign_key "author" { columns = [ table.posts.column.author_id, ] ref_columns = [ table.users.column.id, ] } primary_key { columns = [table.users.column.id] } } ` empty := ` schema "test" { } ` tidbRun(t, func(t *myTest) { testHCLIntegration(t, full, empty) }) } func TestTiDB_Ent_EntEngine(t *testing.T) { tidbRun(t, func(t *myTest) { testEntIntegration(t, dialect.MySQL, t.db, migrate.WithForeignKeys(false)) }) } func TestTiDB_Ent_AtlasEngine(t *testing.T) { tidbRun(t, func(t *myTest) { ctx := context.Background() drv := entsql.OpenDB(dialect.MySQL, t.db) client := ent.NewClient(ent.Driver(drv)) require.NoError(t, client.Schema.Create(ctx, entschema.WithAtlas(true))) sanity(client) realm := t.loadRealm() ensureNoChange(t, realm.Schemas[0].Tables...) // Drop tables. changes := make([]schema.Change, len(realm.Schemas[0].Tables)) for i, t := range realm.Schemas[0].Tables { changes[i] = &schema.DropTable{T: t} } t.migrate(changes...) // Add tables. for i, t := range realm.Schemas[0].Tables { changes[i] = &schema.AddTable{T: t} } t.migrate(changes...) ensureNoChange(t, realm.Schemas[0].Tables...) sanity(client) // Drop tables. for i, t := range realm.Schemas[0].Tables { changes[i] = &schema.DropTable{T: t} } t.migrate(changes...) }) } func TestTiDB_Sanity(t *testing.T) { n := "atlas_types_sanity" t.Run("Common", func(t *testing.T) { ddl := ` create table atlas_types_sanity ( tBit bit(10) default b'1000000001' null, tInt int(10) default 4 not null, tTinyInt tinyint(10) default 8 null, tSmallInt smallint(10) default 2 null, tMediumInt mediumint(10) default 11 null, tBigInt bigint(10) default 4 null, tDecimal decimal default 4 null, tNumeric numeric default 4 not null, tFloat float default 4 null, tDouble double(10, 0) default 4 null, tReal double(10, 0) default 4 null, tTimestamp timestamp default CURRENT_TIMESTAMP null, tTimestampFraction timestamp(6) default CURRENT_TIMESTAMP(6) null, tTimestampOnUpdate timestamp default CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP null, tTimestampFractionOnUpdate timestamp(6) default CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6) null, tDate date null, tTime time null, tDateTime datetime null, tYear year null, tVarchar varchar(10) default 'Titan' null, tChar char(25) default 'Olimpia' not null, tVarBinary varbinary(30) default 'Titan' null, tBinary binary(5) default 'Titan' null, tBlob blob(5) default null, tTinyBlob tinyblob null, tMediumBlob mediumblob default null, tLongBlob longblob default null, tText text(13) default null, tTinyText tinytext default null, tMediumText mediumtext default null, tLongText longtext default null, tEnum enum('a','b') default null, tSet set('a','b') default null ) CHARSET = latin1; ` tidbRun(t, func(t *myTest) { t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{ V: "latin1_bin", }, }, Schema: realm.Schemas[0], Columns: []*schema.Column{ { Name: "tBit", Type: &schema.ColumnType{Type: &mysql.BitType{T: "bit", Size: 10}, Raw: "bit(10) unsigned", Null: true}, Default: &schema.Literal{V: "b'1000000001'"}, }, { Name: "tInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "int"}, "int(10)"), Null: false}, Default: &schema.Literal{V: "4"}, }, { Name: "tTinyInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "tinyint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "tinyint"}, "tinyint(10)"), Null: true}, Default: &schema.Literal{V: "8"}, }, { Name: "tSmallInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "smallint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "smallint"}, "smallint(10)"), Null: true}, Default: &schema.Literal{V: "2"}, }, { Name: "tMediumInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "mediumint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "mediumint"}, "mediumint(10)"), Null: true}, Default: &schema.Literal{V: "11"}, }, { Name: "tBigInt", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "bigint"}, "bigint(10)"), Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tDecimal", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10}, Raw: "decimal(10,0)", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tNumeric", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal", Precision: 10}, Raw: "decimal(10,0)", Null: false}, Default: &schema.Literal{V: "4"}, }, { Name: "tFloat", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float"}, Raw: "float", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tDouble", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double"}, Raw: "double", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tReal", Type: &schema.ColumnType{Type: &schema.FloatType{T: "double"}, Raw: "double", Null: true}, Default: &schema.Literal{V: "4"}, }, { Name: "tTimestamp", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp"}, Raw: "timestamp", Null: true}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP", }, }, { Name: "tTimestampFraction", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp", Precision: intp(6)}, Raw: "timestamp(6)", Null: true}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP(6)", }, }, { Name: "tTimestampOnUpdate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp"}, Raw: "timestamp", Null: true}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP", }, Attrs: []schema.Attr{ &mysql.OnUpdate{ A: "CURRENT_TIMESTAMP", }, }, }, { Name: "tTimestampFractionOnUpdate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "timestamp", Precision: intp(6)}, Raw: "timestamp(6)", Null: true}, Default: &schema.RawExpr{ X: "CURRENT_TIMESTAMP(6)", }, Attrs: []schema.Attr{ &mysql.OnUpdate{ A: "CURRENT_TIMESTAMP(6)", }, }, }, { Name: "tDate", Type: &schema.ColumnType{Type: &schema.TimeType{T: "date"}, Raw: "date", Null: true}, }, { Name: "tTime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "time"}, Raw: "time", Null: true}, }, { Name: "tDateTime", Type: &schema.ColumnType{Type: &schema.TimeType{T: "datetime"}, Raw: "datetime", Null: true}, }, { Name: "tYear", Type: &schema.ColumnType{Type: &schema.TimeType{T: "year", Precision: intp(t.intByVersion(map[string]int{"mysql8": 0}, 4))}, Raw: t.valueByVersion(map[string]string{"mysql8": "year"}, "year(4) unsigned"), Null: true}, }, { Name: "tVarchar", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 10}, Raw: "varchar(10)", Null: true}, Default: &schema.Literal{V: t.quoted("Titan")}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tChar", Type: &schema.ColumnType{Type: &schema.StringType{T: "char", Size: 25}, Raw: "char(25)", Null: false}, Default: &schema.Literal{V: t.quoted("Olimpia")}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tVarBinary", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "varbinary", Size: intp(30)}, Raw: "varbinary(30)", Null: true}, Default: &schema.Literal{V: t.valueByVersion(map[string]string{"mysql8": "0x546974616E"}, t.quoted("Titan"))}, }, { Name: "tBinary", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "binary", Size: intp(5)}, Raw: "binary(5)", Null: true}, Default: &schema.Literal{V: t.valueByVersion(map[string]string{"mysql8": "0x546974616E"}, t.quoted("Titan"))}, }, { Name: "tBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "tinyblob"}, Raw: "tinyblob", Null: true}, }, { Name: "tTinyBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "tinyblob"}, Raw: "tinyblob", Null: true}, }, { Name: "tMediumBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "mediumblob"}, Raw: "mediumblob", Null: true}, }, { Name: "tLongBlob", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "longblob"}, Raw: "longblob", Null: true}, }, { Name: "tText", Type: &schema.ColumnType{Type: &schema.StringType{T: "tinytext"}, Raw: "tinytext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tTinyText", Type: &schema.ColumnType{Type: &schema.StringType{T: "tinytext"}, Raw: "tinytext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tMediumText", Type: &schema.ColumnType{Type: &schema.StringType{T: "mediumtext", Size: 0}, Raw: "mediumtext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tLongText", Type: &schema.ColumnType{Type: &schema.StringType{T: "longtext", Size: 0}, Raw: "longtext", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tEnum", Type: &schema.ColumnType{Type: &schema.EnumType{T: "enum", Values: []string{"a", "b"}}, Raw: "enum('a','b')", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, { Name: "tSet", Type: &schema.ColumnType{Type: &mysql.SetType{Values: []string{"a", "b"}}, Raw: "set('a','b')", Null: true}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, }, }, }, } rmCreateStmt(ts) require.EqualValues(t, &expected, ts) t.hclDriftTest(n, realm, expected) }) }) t.Run("JSON", func(t *testing.T) { ddl := ` create table atlas_types_sanity ( tJSON json default null ) CHARSET = latin1; ` tidbRun(t, func(t *myTest) { t.dropTables(n) _, err := t.db.Exec(ddl) require.NoError(t, err) realm := t.loadRealm() require.Len(t, realm.Schemas, 1) ts, ok := realm.Schemas[0].Table(n) require.True(t, ok) expected := schema.Table{ Name: n, Attrs: func() []schema.Attr { return []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_bin"}, } }(), Schema: realm.Schemas[0], Columns: []*schema.Column{ {Name: "tJSON", Type: &schema.ColumnType{Type: &schema.JSONType{T: "json"}, Raw: "json", Null: true}}, }, } rmCreateStmt(ts) require.EqualValues(t, &expected, ts) }) }) t.Run("ImplicitIndexes", func(t *testing.T) { tidbRun(t, func(t *myTest) { testImplicitIndexes(t, t.db) }) }) t.Run("AltersOrder", func(t *testing.T) { ddl := ` create table tidb_alter_order( tBigInt bigint(10) default 4 null, INDEX i (tBigInt) ); ` tidbRun(t, func(t *myTest) { t.dropTables("tidb_alter_order") _, err := t.db.Exec(ddl) require.NoError(t, err) tbl := t.loadTable("tidb_alter_order") require.NotNil(t, tbl) to := schema.Table{ Name: "tidb_alter_order", Attrs: func() []schema.Attr { return []schema.Attr{ &schema.Collation{V: "utf8mb4_bin"}, &schema.Charset{V: "utf8mb4"}, } }(), Columns: []*schema.Column{ { Name: "tBigInt2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint", Unsigned: false}, Raw: t.valueByVersion(map[string]string{"mysql8": "bigint"}, "bigint(10)"), Null: true}, Default: &schema.Literal{V: "4"}, }, }, } to.AddIndexes( &schema.Index{Name: "i2", Parts: []*schema.IndexPart{ { C: to.Columns[0], Desc: true, }, }}) changes, err := t.drv.SchemaDiff(schema.New("test").AddTables(tbl), schema.New("test").AddTables(&to)) require.NoError(t, err) err = t.drv.ApplyChanges(context.Background(), changes) require.NoError(t, err) t.migrate() rmCreateStmt(tbl) }) }) } atlas-0.7.2/schemahcl/000077500000000000000000000000001431455511600145605ustar00rootroot00000000000000atlas-0.7.2/schemahcl/context.go000066400000000000000000000204671431455511600166040ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "fmt" "reflect" "strconv" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/zclconf/go-cty/cty" ) // varDef is an HCL resource that defines an input variable to the Atlas DDL document. type varDef struct { Name string `hcl:",label"` Type cty.Value `hcl:"type"` Default cty.Value `hcl:"default,optional"` } // setInputVals sets the input values into the evaluation context. HCL documents can define // input variables in the document body by defining "variable" blocks: // // variable "name" { // type = string // also supported: int, bool // default = "rotemtam" // } func (s *State) setInputVals(ctx *hcl.EvalContext, body hcl.Body, input map[string]string) error { var c struct { Vars []*varDef `hcl:"variable,block"` Remain hcl.Body `hcl:",remain"` } nctx := ctx.NewChild() nctx.Variables = map[string]cty.Value{ "string": capsuleTypeVal("string"), "int": capsuleTypeVal("int"), "bool": capsuleTypeVal("bool"), } if diag := gohcl.DecodeBody(body, nctx, &c); diag.HasErrors() { return diag } ctxVars := make(map[string]cty.Value) for _, v := range c.Vars { inputVal, ok := input[v.Name] if ok { ctyVal, err := readVar(v, inputVal) if err != nil { return fmt.Errorf("failed reading var: %w", err) } ctxVars[v.Name] = ctyVal continue } if v.Default == cty.NilVal { return fmt.Errorf("missing value for required variable %q", v.Name) } ctxVars[v.Name] = v.Default } mergeCtxVar(ctx, ctxVars) return nil } func mergeCtxVar(ctx *hcl.EvalContext, vals map[string]cty.Value) { const key = "var" v, ok := ctx.Variables[key] if ok { v.ForEachElement(func(key cty.Value, val cty.Value) (stop bool) { vals[key.AsString()] = val return false }) } ctx.Variables[key] = cty.ObjectVal(vals) } // readVar reads the raw inputVal as a cty.Value using the type definition on v. func readVar(v *varDef, inputVal string) (cty.Value, error) { et := v.Type.EncapsulatedValue() typ, ok := et.(*Type) if !ok { return cty.NilVal, fmt.Errorf("expected schemahcl.Type got %T", et) } switch typ.T { case "string": return cty.StringVal(inputVal), nil case "int": i, err := strconv.Atoi(inputVal) if err != nil { return cty.NilVal, err } return cty.NumberIntVal(int64(i)), nil case "bool": b, err := strconv.ParseBool(inputVal) if err != nil { return cty.NilVal, err } return cty.BoolVal(b), nil default: return cty.NilVal, fmt.Errorf("unknown type: %q", typ.T) } } func capsuleTypeVal(t string) cty.Value { return cty.CapsuleVal(ctyTypeSpec, &Type{T: t}) } func setBlockVars(ctx *hcl.EvalContext, b *hclsyntax.Body) (*hcl.EvalContext, error) { defs := defRegistry(b) vars, err := blockVars(b.Blocks, "", defs) if err != nil { return nil, err } if ctx.Variables == nil { ctx.Variables = make(map[string]cty.Value) } for k, v := range vars { ctx.Variables[k] = v } return ctx, nil } func blockVars(blocks hclsyntax.Blocks, parentAddr string, defs *blockDef) (map[string]cty.Value, error) { vars := make(map[string]cty.Value) for name, def := range defs.children { v := make(map[string]cty.Value) qv := make(map[string]map[string]cty.Value) blocks := blocksOfType(blocks, name) if len(blocks) == 0 { vars[name] = cty.NullVal(def.asCty()) continue } var unlabeled int for _, blk := range blocks { qualifier, blkName := blockName(blk) if blkName == "" { blkName = strconv.Itoa(unlabeled) unlabeled++ } attrs := attrMap(blk.Body.Attributes) // Fill missing attributes with zero values. for n := range def.fields { if _, ok := attrs[n]; !ok { attrs[n] = cty.NullVal(ctySchemaLit) } } self := addr(parentAddr, name, blkName, qualifier) attrs["__ref"] = cty.StringVal(self) varMap, err := blockVars(blk.Body.Blocks, self, def) if err != nil { return nil, err } // Merge children blocks in. for k, v := range varMap { attrs[k] = v } switch { case qualifier != "": obj := cty.ObjectVal(attrs) if _, ok := qv[qualifier]; !ok { qv[qualifier] = make(map[string]cty.Value) } qv[qualifier][blkName] = obj obj = cty.ObjectVal(qv[qualifier]) v[qualifier] = obj default: v[blkName] = cty.ObjectVal(attrs) } } if len(v) > 0 { vars[name] = cty.ObjectVal(v) } } return vars, nil } func addr(parentAddr, typeName, blkName, qualifier string) string { var prefixDot string if len(parentAddr) > 0 { prefixDot = "." } suffix := blkName if qualifier != "" { suffix = qualifier + "." + blkName } return fmt.Sprintf("%s%s$%s.%s", parentAddr, prefixDot, typeName, suffix) } func blockName(blk *hclsyntax.Block) (qualifier string, name string) { switch len(blk.Labels) { case 0: case 1: name = blk.Labels[0] default: qualifier = blk.Labels[0] name = blk.Labels[1] } return } func blocksOfType(blocks hclsyntax.Blocks, typeName string) []*hclsyntax.Block { var out []*hclsyntax.Block for _, block := range blocks { if block.Type == typeName { out = append(out, block) } } return out } func attrMap(attrs hclsyntax.Attributes) map[string]cty.Value { out := make(map[string]cty.Value) for _, v := range attrs { value, diag := v.Expr.Value(nil) if diag.HasErrors() { continue } literalValue, err := extractLiteralValue(value) if err != nil { continue } out[v.Name] = cty.CapsuleVal(ctySchemaLit, literalValue) } return out } // ctySchemaLit is a cty.Capsule type the encapsulates a schemahcl.LiteralValue. var ( ctySchemaLit = cty.CapsuleWithOps("lit", reflect.TypeOf(LiteralValue{}), &cty.CapsuleOps{ // ConversionFrom facilitates reading the encapsulated type as a string, as is needed, for example, // when interpolating it in a string expression. ConversionFrom: func(src cty.Type) func(any, cty.Path) (cty.Value, error) { if src != cty.String { return nil } return func(i any, path cty.Path) (cty.Value, error) { lit, ok := i.(*LiteralValue) if !ok { return cty.Value{}, fmt.Errorf("schemahcl: expected *schemahcl.LiteralValue got %T", i) } uq, err := strconv.Unquote(lit.V) if err != nil { return cty.StringVal(lit.V), nil } return cty.StringVal(uq), nil } }, }) ctyTypeSpec = cty.Capsule("type", reflect.TypeOf(Type{})) ctyRawExpr = cty.Capsule("raw_expr", reflect.TypeOf(RawExpr{})) ) // varBlock is the block type for variable declarations. const varBlock = "variable" // defRegistry returns a tree of blockDef structs representing the schema of the // blocks in the *hclsyntax.Body. The returned fields and children of each type // are an intersection of all existing blocks of the same type. func defRegistry(b *hclsyntax.Body) *blockDef { reg := &blockDef{ fields: make(map[string]struct{}), children: make(map[string]*blockDef), } for _, blk := range b.Blocks { // variable definition blocks are available in the HCL source but not reachable by reference. if blk.Type == varBlock { continue } reg.child(extractDef(blk, reg)) } return reg } // blockDef describes a type of block in the HCL document. type blockDef struct { name string fields map[string]struct{} parent *blockDef children map[string]*blockDef } // child updates the definition for the child type of the blockDef. func (t *blockDef) child(c *blockDef) { ex, ok := t.children[c.name] if !ok { t.children[c.name] = c return } for f := range c.fields { ex.fields[f] = struct{}{} } for _, c := range c.children { ex.child(c) } } // asCty returns a cty.Type representing the blockDef. func (t *blockDef) asCty() cty.Type { f := make(map[string]cty.Type) for attr := range t.fields { f[attr] = ctySchemaLit } f["__ref"] = cty.String for _, c := range t.children { f[c.name] = c.asCty() } return cty.Object(f) } func extractDef(blk *hclsyntax.Block, parent *blockDef) *blockDef { cur := &blockDef{ name: blk.Type, parent: parent, fields: make(map[string]struct{}), children: make(map[string]*blockDef), } for _, a := range blk.Body.Attributes { cur.fields[a.Name] = struct{}{} } for _, c := range blk.Body.Blocks { cur.child(extractDef(c, cur)) } return cur } atlas-0.7.2/schemahcl/context_test.go000066400000000000000000000242601431455511600176360ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "reflect" "testing" "github.com/stretchr/testify/require" ) func TestReferences(t *testing.T) { f := ` backend "app" { image = "ariga/app:1.2.3" addr = "127.0.0.1:8081" } backend "admin" { image = "ariga/admin:1.2.3" addr = "127.0.0.1:8082" } endpoint "home" { path = "/" addr = backend.app.addr timeout_ms = config.defaults.timeout_ms retry = config.defaults.retry description = "default: ${config.defaults.description}" } endpoint "admin" { path = "/admin" addr = backend.admin.addr } config "defaults" { timeout_ms = 10 retry = false description = "generic" } ` type ( Backend struct { Name string `spec:",name"` Image string `spec:"image"` Addr string `spec:"addr"` } Endpoint struct { Name string `spec:",name"` Path string `spec:"path"` Addr string `spec:"addr"` TimeoutMs int `spec:"timeout_ms"` Retry bool `spec:"retry"` Desc string `spec:"description"` } ) var test struct { Backends []*Backend `spec:"backend"` Endpoints []*Endpoint `spec:"endpoint"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, []*Endpoint{ { Name: "home", Path: "/", Addr: "127.0.0.1:8081", Retry: false, TimeoutMs: 10, Desc: "default: generic", }, { Name: "admin", Path: "/admin", Addr: "127.0.0.1:8082", }, }, test.Endpoints) } func TestUnlabeledBlockReferences(t *testing.T) { f := ` country "israel" { metadata { phone_prefix = "972" } metadata { phone_prefix = "123" } metadata "geo" { continent = "asia" } } metadata = country.israel.metadata.0 phone_prefix = country.israel.metadata.0.phone_prefix phone_prefix_2 = country.israel.metadata.1.phone_prefix continent = country.israel.metadata.geo.continent ` type ( Metadata struct { PhonePrefix string `spec:"phone_prefix"` Continent string `spec:"continent"` } Country struct { Metadata []*Metadata `spec:"metadata"` } Test struct { Countries []*Country `spec:"country"` MetadataRef *Ref `spec:"metadata"` PhonePrefix string `spec:"phone_prefix"` PhonePrefix2 string `spec:"phone_prefix_2"` Continent string `spec:"continent"` } ) var test Test err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, test, Test{ Countries: []*Country{ { Metadata: []*Metadata{ {PhonePrefix: "972"}, {PhonePrefix: "123"}, {Continent: "asia"}, }, }, }, MetadataRef: &Ref{V: "$country.israel.$metadata.0"}, PhonePrefix: "972", PhonePrefix2: "123", Continent: "asia", }) } func TestNestedReferences(t *testing.T) { f := ` country "israel" { city "tel_aviv" { phone_area_code = "03" } city "jerusalem" { phone_area_code = "02" } city "givatayim" { phone_area_code = country.israel.city.tel_aviv.phone_area_code } } ` type ( City struct { Name string `spec:",name"` PhoneAreaCode string `spec:"phone_area_code"` } Country struct { Name string `spec:",name"` Cities []*City `spec:"city"` } ) var test struct { Countries []*Country `spec:"country"` } err := New().EvalBytes([]byte(f), &test, nil) israel := &Country{ Name: "israel", Cities: []*City{ {Name: "tel_aviv", PhoneAreaCode: "03"}, {Name: "jerusalem", PhoneAreaCode: "02"}, {Name: "givatayim", PhoneAreaCode: "03"}, }, } require.NoError(t, err) require.EqualValues(t, israel, test.Countries[0]) } func TestBlockReference(t *testing.T) { f := `person "jon" { } pet "garfield" { type = "cat" owner = person.jon } ` type ( Person struct { Name string `spec:",name"` } Pet struct { Name string `spec:",name"` Type string `spec:"type"` Owner *Ref `spec:"owner"` } ) var test struct { People []*Person `spec:"person"` Pets []*Pet `spec:"pet"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, &Pet{ Name: "garfield", Type: "cat", Owner: &Ref{V: "$person.jon"}, }, test.Pets[0]) marshal, err := Marshal(&test) require.NoError(t, err) require.EqualValues(t, f, string(marshal)) } func TestListRefs(t *testing.T) { f := ` user "simba" { } user "mufasa" { } group "lion_kings" { members = [ user.simba, user.mufasa, ] } ` type ( User struct { Name string `spec:",name"` } Group struct { Name string `spec:",name"` Members []*Ref `spec:"members"` } ) var test struct { Users []*User `spec:"user"` Groups []*Group `spec:"group"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, &Group{ Name: "lion_kings", Members: []*Ref{ {V: "$user.simba"}, {V: "$user.mufasa"}, }, }, test.Groups[0]) _, err = Marshal(&test) require.NoError(t, err) } func TestNestedDifference(t *testing.T) { f := ` person "john" { nickname = "jonnie" hobby "hockey" { active = true } } person "jane" { nickname = "janie" hobby "football" { budget = 1000 } car "ferrari" { year = 1960 } } ` type ( Hobby struct { Name string `spec:",name"` Active bool `spec:"active"` Budget int `spec:"budget"` } Car struct { Name string `spec:",name"` Year int `spec:"year"` } Person struct { Name string `spec:",name"` Nickname string `spec:"nickname"` Hobbies []*Hobby `spec:"hobby"` Car *Car `spec:"car"` } ) var test struct { People []*Person `spec:"person"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) john := &Person{ Name: "john", Nickname: "jonnie", Hobbies: []*Hobby{ {Name: "hockey", Active: true}, }, } require.EqualValues(t, john, test.People[0]) jane := &Person{ Name: "jane", Nickname: "janie", Hobbies: []*Hobby{ {Name: "football", Budget: 1000}, }, Car: &Car{ Name: "ferrari", Year: 1960, }, } require.EqualValues(t, jane, test.People[1]) } func TestSchemaRefParse(t *testing.T) { type Point struct { Z []*Ref `spec:"z"` } var test = struct { Points []*Point `spec:"point"` }{ Points: []*Point{ {Z: []*Ref{{V: "$a"}}}, {Z: []*Ref{{V: "b"}}}, }, } b, err := Marshal(&test) require.NoError(t, err) expected := `point { z = [a] } point { z = [b] } ` require.Equal(t, expected, string(b)) } func TestWithTypes(t *testing.T) { f := `first = int second = bool third = int(10) sized = varchar(255) variadic = enum("a","b","c") ` s := New( WithTypes( []*TypeSpec{ {Name: "bool", T: "bool"}, { Name: "int", T: "int", Attributes: []*TypeAttr{ {Name: "size", Kind: reflect.Int, Required: false}, {Name: "unsigned", Kind: reflect.Bool, Required: false}, }, }, { Name: "varchar", T: "varchar", Attributes: []*TypeAttr{ {Name: "size", Kind: reflect.Int, Required: false}, }, }, { Name: "enum", T: "enum", Attributes: []*TypeAttr{ {Name: "values", Kind: reflect.Slice, Required: false}, }, }, }, ), ) var test struct { First *Type `spec:"first"` Second *Type `spec:"second"` Third *Type `spec:"third"` Varchar *Type `spec:"sized"` Variadic *Type `spec:"variadic"` } err := s.EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, "int", test.First.T) require.EqualValues(t, "bool", test.Second.T) require.EqualValues(t, &Type{ T: "varchar", Attrs: []*Attr{ {K: "size", V: &LiteralValue{V: "255"}}, }, }, test.Varchar) require.EqualValues(t, &Type{ T: "enum", Attrs: []*Attr{ { K: "values", V: &ListValue{ V: []Value{ &LiteralValue{V: `"a"`}, &LiteralValue{V: `"b"`}, &LiteralValue{V: `"c"`}, }, }, }, }, }, test.Variadic) require.EqualValues(t, &Type{ T: "int", Attrs: []*Attr{ {K: "size", V: &LiteralValue{V: "10"}}, }, }, test.Third) after, err := s.MarshalSpec(&test) require.NoError(t, err) require.EqualValues(t, f, string(after)) } func TestEmptyStrSQL(t *testing.T) { s := New(WithTypes(nil)) h := `x = sql("")` err := s.EvalBytes([]byte(h), &struct{}{}, nil) require.ErrorContains(t, err, "empty expression") } func TestOptionalArgs(t *testing.T) { s := New( WithTypes([]*TypeSpec{ { T: "float", Name: "float", Attributes: []*TypeAttr{ {Name: "precision", Kind: reflect.Int, Required: false}, {Name: "scale", Kind: reflect.Int, Required: false}, }, }, }), ) f := `arg_0 = float arg_1 = float(10) arg_2 = float(10,2) ` var test struct { Arg0 *Type `spec:"arg_0"` Arg1 *Type `spec:"arg_1"` Arg2 *Type `spec:"arg_2"` } err := s.EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.Nil(t, test.Arg0.Attrs) require.EqualValues(t, []*Attr{ LitAttr("precision", "10"), }, test.Arg1.Attrs) require.EqualValues(t, []*Attr{ LitAttr("precision", "10"), LitAttr("scale", "2"), }, test.Arg2.Attrs) } func TestQualifiedRefs(t *testing.T) { h := `user "atlas" "cli" { version = "v0.3.9" } v = user.atlas.cli.version r = user.atlas.cli ` var test struct { V string `spec:"v"` R *Ref `spec:"r"` } err := New().EvalBytes([]byte(h), &test, nil) require.NoError(t, err) require.EqualValues(t, "v0.3.9", test.V) require.EqualValues(t, "$user.atlas.cli", test.R.V) } func TestInputValues(t *testing.T) { h := ` variable "name" { type = string } variable "default" { type = string default = "hello" } variable "int" { type = int } variable "bool" { type = bool } name = var.name default = var.default int = var.int bool = var.bool ` state := New() var test struct { Name string `spec:"name"` Default string `spec:"default"` Int int `spec:"int"` Bool bool `spec:"bool"` } err := state.EvalBytes([]byte(h), &test, map[string]string{ "name": "rotemtam", "int": "42", "bool": "true", }) require.NoError(t, err) require.EqualValues(t, "rotemtam", test.Name) require.EqualValues(t, "hello", test.Default) require.EqualValues(t, 42, test.Int) require.EqualValues(t, true, test.Bool) } atlas-0.7.2/schemahcl/extension.go000066400000000000000000000416311431455511600171300ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "errors" "fmt" "reflect" "strconv" "strings" "sync" ) // Remainer is the interface that is implemented by types that can store // additional attributes and children resources. type Remainer interface { // Remain returns a resource representing any extra children and attributes // that are related to the struct but were not mapped to any of its fields. Remain() *Resource } // DefaultExtension can be embedded in structs that need basic default behavior. // For instance, DefaultExtension implements Remainer, and has a private *Resource // field that can store additional attributes and children that do not match the // structs fields. type DefaultExtension struct { Extra Resource } // Remain implements the Remainer interface. func (d *DefaultExtension) Remain() *Resource { return &d.Extra } // Attr returns the Attr by the provided name and reports whether it was found. func (d *DefaultExtension) Attr(name string) (*Attr, bool) { return d.Extra.Attr(name) } type registry map[string]any var ( extensions = make(registry) extensionsMu sync.RWMutex ) func (r registry) lookup(ext any) (string, bool) { extensionsMu.RLock() defer extensionsMu.RUnlock() for k, v := range r { if reflect.TypeOf(ext) == reflect.TypeOf(v) { return k, true } } return "", false } // implementers returns a slice of the names of the extensions that implement i. func (r registry) implementers(i reflect.Type) ([]string, error) { if i.Kind() != reflect.Interface { return nil, fmt.Errorf("schemahcl: expected interface got %s", i.Kind()) } var names []string for name, typ := range r { if reflect.TypeOf(typ).Implements(i) { names = append(names, name) } } return names, nil } // Register records the type of ext in the global extension registry. // If Register is called twice with the same name or if ext is nil, // it panics. func Register(name string, ext any) { extensionsMu.Lock() defer extensionsMu.Unlock() if ext == nil { panic("schemahcl: Register extension is nil") } if _, dup := extensions[name]; dup { panic("schemahcl: Register called twice for type " + name) } extensions[name] = ext } // As reads the attributes and children resources of the resource into the target struct. func (r *Resource) As(target any) error { if err := validateStructPtr(target); err != nil { return err } existingAttrs, existingChildren := existingElements(r) var seenName, seenQualifier bool v := reflect.ValueOf(target).Elem() for _, ft := range specFields(target) { field := v.FieldByName(ft.Name) switch { case ft.isName() && !hasAttr(r, ft.tag): if seenName { return errors.New("schemahcl: extension must have only one isName field") } seenName = true if field.Kind() != reflect.String { return errors.New("schemahcl: extension isName field must be of type string") } field.SetString(r.Name) case ft.isQualifier(): if seenQualifier { return errors.New("schemahcl: extension must have only one qualifier field") } seenQualifier = true field.SetString(r.Qualifier) case hasAttr(r, ft.tag): attr, _ := r.Attr(ft.tag) if err := setField(field, attr); err != nil { return err } delete(existingAttrs, attr.K) case ft.isInterfaceSlice(): elem := field.Type().Elem() impls, err := extensions.implementers(elem) if err != nil { return err } children := childrenOfType(r, impls...) slc := reflect.MakeSlice(reflect.SliceOf(elem), 0, len(children)) for _, c := range children { typ, ok := extensions[c.Type] if !ok { return fmt.Errorf("extension %q not registered", c.Type) } n := reflect.New(reflect.TypeOf(typ).Elem()) ext := n.Interface() if err := c.As(ext); err != nil { return err } slc = reflect.Append(slc, reflect.ValueOf(ext)) } field.Set(slc) for _, i := range impls { delete(existingChildren, i) } case ft.isInterface(): impls, err := extensions.implementers(ft.Type) if err != nil { return err } children := childrenOfType(r, impls...) if len(children) == 0 { continue } if len(children) > 1 { return fmt.Errorf("more than one blocks implement %q", ft.Type) } c := children[0] typ, ok := extensions[c.Type] if !ok { return fmt.Errorf("extension %q not registered", c.Type) } n := reflect.New(reflect.TypeOf(typ).Elem()) ext := n.Interface() if err := c.As(ext); err != nil { return err } field.Set(n) case isResourceSlice(field.Type()): if err := setChildSlice(field, childrenOfType(r, ft.tag)); err != nil { return err } delete(existingChildren, ft.tag) case isSingleResource(field.Type()): c := childrenOfType(r, ft.tag) if len(c) == 0 { continue } res := c[0] n := reflect.New(field.Type().Elem()) ext := n.Interface() if err := res.As(ext); err != nil { return err } field.Set(n) delete(existingChildren, ft.tag) } } rem, ok := target.(Remainer) if !ok { return nil } extras := rem.Remain() for attrName := range existingAttrs { attr, ok := r.Attr(attrName) if !ok { return fmt.Errorf("schemahcl: expected attr %q to exist", attrName) } extras.SetAttr(attr) } for childType := range existingChildren { children := childrenOfType(r, childType) extras.Children = append(extras.Children, children...) } return nil } // FinalName returns the final name for the resource by examining the struct tags for // the extension of the Resource's type. If no such extension is registered or the // extension struct does not have a name field, an error is returned. func (r *Resource) FinalName() (string, error) { extensionsMu.RLock() defer extensionsMu.RUnlock() t, ok := extensions[r.Type] if !ok { return "", fmt.Errorf("no extension registered for %q", r.Type) } for _, fd := range specFields(t) { if fd.isName() { if fd.tag != "" { name, ok := r.Attr(fd.tag) if ok { return name.String() } } return r.Name, nil } } return "", fmt.Errorf("extension %q has no name field", r.Type) } func validateStructPtr(target any) error { typeOf := reflect.TypeOf(target) if typeOf.Kind() != reflect.Ptr { return errors.New("schemahcl: expected target to be a pointer") } if typeOf.Elem().Kind() != reflect.Struct { return errors.New("schemahcl: expected target to be a pointer to a struct") } return nil } func existingElements(r *Resource) (attrs, children map[string]struct{}) { attrs, children = make(map[string]struct{}), make(map[string]struct{}) for _, ea := range r.Attrs { attrs[ea.K] = struct{}{} } for _, ec := range r.Children { children[ec.Type] = struct{}{} } return } func setChildSlice(field reflect.Value, children []*Resource) error { if field.Type().Kind() != reflect.Slice { return fmt.Errorf("schemahcl: expected field to be of kind slice") } if len(children) == 0 { return nil } typ := field.Type().Elem() slc := reflect.MakeSlice(reflect.SliceOf(typ), 0, len(children)) for _, c := range children { n := reflect.New(typ.Elem()) ext := n.Interface() if err := c.As(ext); err != nil { return err } slc = reflect.Append(slc, reflect.ValueOf(ext)) } field.Set(slc) return nil } func setField(field reflect.Value, attr *Attr) error { switch field.Kind() { case reflect.Slice: return setSliceAttr(field, attr) case reflect.String: s, err := attr.String() if err != nil { return fmt.Errorf("schemahcl: value of attr %q cannot be read as string: %w", attr.K, err) } field.SetString(s) case reflect.Int, reflect.Int64: i, err := attr.Int() if err != nil { return fmt.Errorf("schemahcl: value of attr %q cannot be read as integer: %w", attr.K, err) } field.SetInt(int64(i)) case reflect.Bool: b, err := attr.Bool() if err != nil { return fmt.Errorf("schemahcl: value of attr %q cannot be read as bool: %w", attr.K, err) } field.SetBool(b) case reflect.Ptr: if err := setPtr(field, attr.V); err != nil { return fmt.Errorf("schemahcl: failed setting pointer field %q: %w", attr.K, err) } case reflect.Interface: field.Set(reflect.ValueOf(attr.V)) default: return fmt.Errorf("schemahcl: unsupported field kind %q", field.Kind()) } return nil } func setPtr(field reflect.Value, val Value) error { rt := reflect.TypeOf(val) if field.Type() == rt { field.Set(reflect.ValueOf(val)) return nil } // If we are setting a Type field handle RawExpr and Ref specifically. if _, ok := field.Interface().(*Type); ok { switch t := val.(type) { case *RawExpr: field.Set(reflect.ValueOf(&Type{T: t.X})) return nil case *Ref: field.Set(reflect.ValueOf(&Type{ T: t.V, IsRef: true, })) return nil } } if field.IsNil() { field.Set(reflect.New(field.Type().Elem())) } switch e := field.Interface().(type) { case *bool: b, err := BoolVal(val) if err != nil { return err } *e = b case *string: s, err := StrVal(val) if err != nil { return err } *e = s case *LiteralValue: s, err := StrVal(val) if err != nil { return err } e.V = s case *Ref: s, err := StrVal(val) if err != nil { return err } e.V = s default: return fmt.Errorf("unhandled pointer type %T", val) } return nil } // setSliceAttr sets the value of attr to the slice field. This function expects both the target field // and the source attr to be slices. func setSliceAttr(field reflect.Value, attr *Attr) error { lst, ok := attr.V.(*ListValue) if !ok { return fmt.Errorf("schemahcl: field is of type slice but attr %q does not contain a ListValue", attr.K) } typ := field.Type().Elem() slc := reflect.MakeSlice(reflect.SliceOf(typ), 0, len(lst.V)) switch typ.Kind() { case reflect.String: s, err := attr.Strings() if err != nil { return fmt.Errorf("cannot read attribute %q as string list: %w", attr.K, err) } for _, item := range s { slc = reflect.Append(slc, reflect.ValueOf(item)) } case reflect.Bool: bools, err := attr.Bools() if err != nil { return fmt.Errorf("cannot read attribute %q as bool list: %w", attr.K, err) } for _, item := range bools { slc = reflect.Append(slc, reflect.ValueOf(item)) } case reflect.Ptr: if typ != reflect.TypeOf(&Ref{}) { return fmt.Errorf("only pointers to refs supported, got %s", typ) } for _, c := range lst.V { slc = reflect.Append(slc, reflect.ValueOf(c)) } default: return fmt.Errorf("slice of unsupported kind: %q", typ.Kind()) } field.Set(slc) return nil } // Scan reads the Extension into the Resource. Scan will override the Resource // name or type if they are set for the extension. func (r *Resource) Scan(ext any) error { if lookup, ok := extensions.lookup(ext); ok { r.Type = lookup } v := reflect.ValueOf(ext).Elem() for _, ft := range specFields(ext) { field := v.FieldByName(ft.Name) switch { case ft.omitempty() && isEmpty(field): case ft.isName(): if field.Kind() != reflect.String { return errors.New("schemahcl: extension name field must be string") } r.Name = field.String() case ft.isQualifier(): if field.Kind() != reflect.String { return errors.New("schemahcl: extension qualifer field must be string") } r.Qualifier = field.String() case isResourceSlice(field.Type()): for i := 0; i < field.Len(); i++ { ext := field.Index(i).Interface() child := &Resource{} if err := child.Scan(ext); err != nil { return err } child.Type = ft.tag r.Children = append(r.Children, child) } case isSingleResource(field.Type()): if field.IsNil() { continue } ext := field.Interface() child := &Resource{} if err := child.Scan(ext); err != nil { return err } child.Type = ft.tag r.Children = append(r.Children, child) case field.Kind() == reflect.Ptr: if field.IsNil() { continue } if err := scanPtr(ft.tag, r, field); err != nil { return err } default: if err := scanAttr(ft.tag, r, field); err != nil { return err } } } rem, ok := ext.(Remainer) if !ok { return nil } extra := rem.Remain() for _, attr := range extra.Attrs { r.SetAttr(attr) } r.Children = append(r.Children, extra.Children...) return nil } func scanPtr(key string, r *Resource, field reflect.Value) error { attr := &Attr{K: key} switch e := field.Interface().(type) { case *LiteralValue: attr.V = e case *Ref: attr.V = e case *Type: attr.V = e case *bool: attr.V = &LiteralValue{V: strconv.FormatBool(*e)} case *string: attr.V = &LiteralValue{V: strconv.Quote(*e)} default: return fmt.Errorf("schemahcl: unsupported pointer to %s", e) } r.SetAttr(attr) return nil } func scanAttr(key string, r *Resource, field reflect.Value) error { var lit string switch field.Kind() { case reflect.Slice: return scanSliceAttr(key, r, field) case reflect.String: lit = strconv.Quote(field.String()) case reflect.Int: lit = fmt.Sprintf("%d", field.Int()) case reflect.Bool: lit = strconv.FormatBool(field.Bool()) case reflect.Interface: if field.IsNil() { return nil } i := field.Interface() v, ok := i.(Value) if !ok { return fmt.Errorf("schemahcl: unsupported interface type %T for field %q", i, key) } r.SetAttr(&Attr{ K: key, V: v, }) return nil default: return fmt.Errorf("schemahcl: unsupported field kind %q", field.Kind()) } r.SetAttr(&Attr{ K: key, V: &LiteralValue{V: lit}, }) return nil } // scanSliceAttr sets an Attr named "key" into the Resource r, by converting // the value stored in "field" into a *ListValue. func scanSliceAttr(key string, r *Resource, field reflect.Value) error { typ := field.Type() lst := &ListValue{} switch typ.Elem().Kind() { case reflect.String: for i := 0; i < field.Len(); i++ { item := field.Index(i).Interface().(string) lst.V = append(lst.V, &LiteralValue{V: strconv.Quote(item)}) } case reflect.Bool: for i := 0; i < field.Len(); i++ { item := field.Index(i).Interface().(bool) lst.V = append(lst.V, &LiteralValue{V: strconv.FormatBool(item)}) } case reflect.Ptr: if typ.Elem() != reflect.TypeOf(&Ref{}) { return fmt.Errorf("schemahcl: currently on ref slice values supported, got %s", typ) } for i := 0; i < field.Len(); i++ { item := field.Index(i).Interface().(*Ref) lst.V = append(lst.V, item) } default: return fmt.Errorf("unsupported kind %q for %q", typ.Kind(), key) } r.SetAttr(&Attr{ K: key, V: lst, }) return nil } // specFields uses reflection to find struct fields that are tagged with "spec" // and returns a list of mappings from the tag to the field name. func specFields(ext any) []fieldDesc { t := reflect.TypeOf(ext) var fields []fieldDesc for i := 0; i < t.Elem().NumField(); i++ { f := t.Elem().Field(i) tag, ok := f.Tag.Lookup("spec") if !ok { continue } d := fieldDesc{tag: tag, StructField: f} if idx := strings.IndexByte(tag, ','); idx != -1 { d.tag, d.options = tag[:idx], tag[idx+1:] } fields = append(fields, d) } return fields } func isEmpty(v reflect.Value) bool { switch v.Kind() { case reflect.Array, reflect.Map, reflect.Slice, reflect.String: return v.Len() == 0 case reflect.Bool: return !v.Bool() case reflect.Interface, reflect.Ptr: return v.IsNil() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return v.Int() == 0 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return v.Uint() == 0 case reflect.Float32, reflect.Float64: return v.Float() == 0 } return false } type fieldDesc struct { tag string // tag name. options string // rest of the options. reflect.StructField } func (f fieldDesc) isName() bool { return f.is("name") } func (f fieldDesc) isQualifier() bool { return f.is("qualifier") } func (f fieldDesc) omitempty() bool { return f.is("omitempty") } func (f fieldDesc) is(t string) bool { for _, opt := range strings.Split(f.options, ",") { if opt == t { return true } } return false } func (f fieldDesc) isInterfaceSlice() bool { return f.Type.Kind() == reflect.Slice && f.Type.Elem().Kind() == reflect.Interface } func (f fieldDesc) isInterface() bool { return f.Type.Kind() == reflect.Interface } func childrenOfType(r *Resource, types ...string) []*Resource { var out []*Resource for _, c := range r.Children { for _, typ := range types { if c.Type == typ { out = append(out, c) } } } return out } func isSingleResource(t reflect.Type) bool { if t.Kind() != reflect.Ptr { return false } elem := t.Elem() if elem.Kind() != reflect.Struct { return false } for i := 0; i < elem.NumField(); i++ { f := elem.Field(i) if _, ok := f.Tag.Lookup("spec"); ok { return true } if f.Type == reflect.TypeOf(DefaultExtension{}) { return true } } return false } func isResourceSlice(t reflect.Type) bool { if t.Kind() != reflect.Slice { return false } return isSingleResource(t.Elem()) } func hasAttr(r *Resource, name string) bool { _, ok := r.Attr(name) return ok } atlas-0.7.2/schemahcl/extension_test.go000066400000000000000000000130021431455511600201560ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl_test import ( "testing" "ariga.io/atlas/schemahcl" "github.com/stretchr/testify/require" ) type OwnerBlock struct { schemahcl.DefaultExtension ID string `spec:",name"` FirstName string `spec:"first_name"` Born int `spec:"born"` Active bool `spec:"active"` BoolPtr *bool `spec:"bool_ptr"` OmitBool1 bool `spec:"omit_bool1,omitempty"` OmitBool2 bool `spec:"omit_bool2,omitempty"` Lit *schemahcl.LiteralValue `spec:"lit"` } type PetBlock struct { schemahcl.DefaultExtension ID string `spec:",name"` Breed string `spec:"breed"` Born int `spec:"born"` Owners []*OwnerBlock `spec:"owner"` RoleModel *PetBlock `spec:"role_model"` } func TestInvalidExt(t *testing.T) { r := &schemahcl.Resource{} err := r.As(1) require.EqualError(t, err, "schemahcl: expected target to be a pointer") var p *string err = r.As(p) require.EqualError(t, err, "schemahcl: expected target to be a pointer to a struct") } func TestExtension(t *testing.T) { schemahcl.Register("owner", &OwnerBlock{}) original := &schemahcl.Resource{ Name: "name", Type: "owner", Attrs: []*schemahcl.Attr{ schemahcl.StrLitAttr("first_name", "tzuri"), schemahcl.LitAttr("born", "2019"), schemahcl.LitAttr("active", "true"), schemahcl.LitAttr("bool_ptr", "true"), schemahcl.LitAttr("omit_bool1", "true"), schemahcl.LitAttr("lit", "1000"), schemahcl.LitAttr("extra", "true"), }, Children: []*schemahcl.Resource{ { Name: "extra", Type: "extra", }, }, } owner := OwnerBlock{} err := original.As(&owner) require.NoError(t, err) require.EqualValues(t, "tzuri", owner.FirstName) require.EqualValues(t, "name", owner.ID) require.EqualValues(t, 2019, owner.Born) require.EqualValues(t, true, owner.Active) require.NotNil(t, owner.BoolPtr) require.EqualValues(t, true, *owner.BoolPtr) require.EqualValues(t, schemahcl.LitAttr("lit", "1000").V, owner.Lit) attr, ok := owner.Remain().Attr("extra") require.True(t, ok) eb, err := attr.Bool() require.NoError(t, err) require.True(t, eb) scan := &schemahcl.Resource{} err = scan.Scan(&owner) require.NoError(t, err) require.EqualValues(t, original, scan) } func TestNested(t *testing.T) { schemahcl.Register("pet", &PetBlock{}) pet := &schemahcl.Resource{ Name: "donut", Type: "pet", Attrs: []*schemahcl.Attr{ schemahcl.StrLitAttr("breed", "golden retriever"), schemahcl.LitAttr("born", "2002"), }, Children: []*schemahcl.Resource{ { Name: "rotemtam", Type: "owner", Attrs: []*schemahcl.Attr{ schemahcl.StrLitAttr("first_name", "rotem"), schemahcl.LitAttr("born", "1985"), schemahcl.LitAttr("active", "true"), }, }, { Name: "gonnie", Type: "role_model", Attrs: []*schemahcl.Attr{ schemahcl.StrLitAttr("breed", "golden retriever"), schemahcl.LitAttr("born", "1998"), }, }, }, } pb := PetBlock{} err := pet.As(&pb) require.NoError(t, err) require.EqualValues(t, PetBlock{ ID: "donut", Breed: "golden retriever", Born: 2002, Owners: []*OwnerBlock{ {ID: "rotemtam", FirstName: "rotem", Born: 1985, Active: true}, }, RoleModel: &PetBlock{ ID: "gonnie", Breed: "golden retriever", Born: 1998, }, }, pb) scan := &schemahcl.Resource{} err = scan.Scan(&pb) require.NoError(t, err) require.EqualValues(t, pet, scan) name, err := pet.FinalName() require.NoError(t, err) require.EqualValues(t, "donut", name) } func TestRef(t *testing.T) { type A struct { Name string `spec:",name"` User *schemahcl.Ref `spec:"user"` } schemahcl.Register("a", &A{}) resource := &schemahcl.Resource{ Name: "x", Type: "a", Attrs: []*schemahcl.Attr{ { K: "user", V: &schemahcl.Ref{V: "$user.rotemtam"}, }, }, } var a A err := resource.As(&a) require.NoError(t, err) require.EqualValues(t, &schemahcl.Ref{V: "$user.rotemtam"}, a.User) scan := &schemahcl.Resource{} err = scan.Scan(&a) require.NoError(t, err) require.EqualValues(t, resource, scan) } func TestListRef(t *testing.T) { type B struct { Name string `spec:",name"` Users []*schemahcl.Ref `spec:"users"` } schemahcl.Register("b", &B{}) resource := &schemahcl.Resource{ Name: "x", Type: "b", Attrs: []*schemahcl.Attr{ { K: "users", V: &schemahcl.ListValue{ V: []schemahcl.Value{ &schemahcl.Ref{V: "$user.a8m"}, &schemahcl.Ref{V: "$user.rotemtam"}, }, }, }, }, } var b B err := resource.As(&b) require.NoError(t, err) require.Len(t, b.Users, 2) require.EqualValues(t, []*schemahcl.Ref{ {V: "$user.a8m"}, {V: "$user.rotemtam"}, }, b.Users) scan := &schemahcl.Resource{} err = scan.Scan(&b) require.NoError(t, err) require.EqualValues(t, resource, scan) } func TestNameAttr(t *testing.T) { type Named struct { Name string `spec:"name,name"` } schemahcl.Register("named", &Named{}) resource := &schemahcl.Resource{ Name: "id", Type: "named", Attrs: []*schemahcl.Attr{ schemahcl.StrLitAttr("name", "atlas"), }, } var tgt Named err := resource.As(&tgt) require.NoError(t, err) require.EqualValues(t, "atlas", tgt.Name) name, err := resource.FinalName() require.NoError(t, err) require.EqualValues(t, name, "atlas") } atlas-0.7.2/schemahcl/hcl.go000066400000000000000000000371471431455511600156710ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "bytes" "errors" "fmt" "sort" "strconv" "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/zclconf/go-cty/cty" ) // Marshal returns the Atlas HCL encoding of v. var Marshal = MarshalerFunc(New().MarshalSpec) type ( // State is used to evaluate and marshal Atlas HCL documents and stores a configuration for these operations. State struct { config *Config } // Evaluator is the interface that wraps the Eval function. Evaluator interface { // Eval evaluates parsed HCL files using input variables into a schema.Realm. Eval(*hclparse.Parser, any, map[string]string) error } // EvalFunc is an adapter that allows the use of an ordinary function as an Evaluator. EvalFunc func(*hclparse.Parser, any, map[string]string) error // Marshaler is the interface that wraps the MarshalSpec function. Marshaler interface { // MarshalSpec marshals the provided input into a valid Atlas HCL document. MarshalSpec(any) ([]byte, error) } // MarshalerFunc is the function type that is implemented by the MarshalSpec // method of the Marshaler interface. MarshalerFunc func(any) ([]byte, error) ) // MarshalSpec implements Marshaler for Atlas HCL documents. func (s *State) MarshalSpec(v any) ([]byte, error) { r := &Resource{} if err := r.Scan(v); err != nil { return nil, fmt.Errorf("schemahcl: failed scanning %T to resource: %w", v, err) } return s.encode(r) } // EvalFiles evaluates the files in the provided paths using the input variables and // populates v with the result. func (s *State) EvalFiles(paths []string, v any, input map[string]string) error { parser := hclparse.NewParser() for _, path := range paths { if _, diag := parser.ParseHCLFile(path); diag.HasErrors() { return diag } } return s.Eval(parser, v, input) } // Eval evaluates the parsed HCL documents using the input variables and populates v // using the result. func (s *State) Eval(parsed *hclparse.Parser, v any, input map[string]string) error { ctx := s.config.newCtx() reg := &blockDef{ fields: make(map[string]struct{}), children: make(map[string]*blockDef), } files := parsed.Files() fileNames := make([]string, 0, len(files)) allBlocks := make([]*hclsyntax.Block, 0, len(files)) // Prepare reg and allBlocks. for name, file := range files { fileNames = append(fileNames, name) if err := s.setInputVals(ctx, file.Body, input); err != nil { return err } body := file.Body.(*hclsyntax.Body) for _, blk := range body.Blocks { // Variable definition blocks are available in the HCL source but not reachable by reference. if blk.Type == varBlock { continue } allBlocks = append(allBlocks, blk) reg.child(extractDef(blk, reg)) } } vars, err := blockVars(allBlocks, "", reg) if err != nil { return err } if ctx.Variables == nil { ctx.Variables = make(map[string]cty.Value) } for k, v := range vars { ctx.Variables[k] = v } spec := &Resource{} sort.Slice(fileNames, func(i, j int) bool { return fileNames[i] < fileNames[j] }) for _, fn := range fileNames { file := files[fn] r, err := s.resource(ctx, file) if err != nil { return err } spec.Children = append(spec.Children, r.Children...) spec.Attrs = append(spec.Attrs, r.Attrs...) } if err := patchRefs(spec); err != nil { return err } if err := spec.As(v); err != nil { return fmt.Errorf("schemahcl: failed reading spec as %T: %w", v, err) } return nil } // EvalBytes evaluates the data byte-slice as an Atlas HCL document using the input variables // and stores the result in v. func (s *State) EvalBytes(data []byte, v any, input map[string]string) error { parser := hclparse.NewParser() if _, diag := parser.ParseHCL(data, ""); diag.HasErrors() { return diag } return s.Eval(parser, v, input) } // addrRef maps addresses to their referenced resource. type addrRef map[string]*Resource // patchRefs recursively searches for schemahcl.Ref under the provided schemahcl.Resource // and patches any variables with their concrete names. func patchRefs(spec *Resource) error { return make(addrRef).patch(spec) } func (r addrRef) patch(resource *Resource) error { cp := r.copy().load(resource, "") for _, attr := range resource.Attrs { if ref, ok := attr.V.(*Ref); ok { referenced, ok := cp[ref.V] if !ok { return fmt.Errorf("broken reference to %q", ref.V) } if name, err := referenced.FinalName(); err == nil { ref.V = strings.ReplaceAll(ref.V, referenced.Name, name) } } } for _, ch := range resource.Children { if err := cp.patch(ch); err != nil { return err } } return nil } func (r addrRef) copy() addrRef { n := make(addrRef) for k, v := range r { n[k] = v } return n } // load the references from the children of the resource. func (r addrRef) load(res *Resource, track string) addrRef { unlabeled := 0 for _, ch := range res.Children { current := rep(ch) if ch.Name == "" { current += strconv.Itoa(unlabeled) unlabeled++ } if track != "" { current = track + "." + current } r[current] = ch r.load(ch, current) } return r } func rep(r *Resource) string { n := r.Name if r.Qualifier != "" { n = r.Qualifier + "." + n } return fmt.Sprintf("$%s.%s", r.Type, n) } // resource converts the hcl file to a schemahcl.Resource. func (s *State) resource(ctx *hcl.EvalContext, file *hcl.File) (*Resource, error) { body, ok := file.Body.(*hclsyntax.Body) if !ok { return nil, fmt.Errorf("schemahcl: expected remainder to be of type *hclsyntax.Body") } attrs, err := s.toAttrs(ctx, body.Attributes, nil) if err != nil { return nil, err } res := &Resource{ Attrs: attrs, } for _, blk := range body.Blocks { // variable blocks may be included in the document but are skipped in unmarshaling. if blk.Type == varBlock { continue } ctx, err := setBlockVars(ctx.NewChild(), blk.Body) if err != nil { return nil, err } resource, err := s.toResource(ctx, blk, []string{blk.Type}) if err != nil { return nil, err } res.Children = append(res.Children, resource) } return res, nil } // mayExtendVars gets the current scope context, and extend it with additional // variables if it was configured this way using WithScopedEnums. func (s *State) mayExtendVars(ctx *hcl.EvalContext, scope []string) *hcl.EvalContext { vars, ok := s.config.pathVars[strings.Join(scope, ".")] if !ok { return ctx } ctx = ctx.NewChild() ctx.Variables = vars return ctx } func (s *State) toAttrs(ctx *hcl.EvalContext, hclAttrs hclsyntax.Attributes, scope []string) ([]*Attr, error) { var attrs []*Attr for _, hclAttr := range hclAttrs { ctx := s.mayExtendVars(ctx, append(scope, hclAttr.Name)) at := &Attr{K: hclAttr.Name} value, diag := hclAttr.Expr.Value(ctx) if diag.HasErrors() { return nil, s.typeError(diag) } var err error switch { case isRef(value): at.V = &Ref{V: value.GetAttr("__ref").AsString()} case value.Type() == ctyRawExpr: at.V = value.EncapsulatedValue().(*RawExpr) case value.Type() == ctyTypeSpec: at.V = value.EncapsulatedValue().(*Type) case value.Type().IsTupleType(): at.V, err = extractListValue(value) default: at.V, err = extractLiteralValue(value) } if err != nil { return nil, err } attrs = append(attrs, at) } // hclsyntax.Attrs is an alias for map[string]*Attribute sort.Slice(attrs, func(i, j int) bool { return attrs[i].K < attrs[j].K }) return attrs, nil } // typeError improves diagnostic reporting in case of parse error. func (s *State) typeError(diag hcl.Diagnostics) error { for _, d := range diag { switch e := d.Expression.(type) { case *hclsyntax.FunctionCallExpr: if d.Summary != "Call to unknown function" { continue } if t, ok := s.findTypeSpec(e.Name); ok && len(t.Attributes) == 0 { d.Detail = fmt.Sprintf("Type %q does not accept attributes", t.Name) } case *hclsyntax.ScopeTraversalExpr: if d.Summary != "Unknown variable" { continue } if t, ok := s.findTypeSpec(e.Traversal.RootName()); ok && len(t.Attributes) > 0 { d.Detail = fmt.Sprintf("Type %q requires at least 1 argument", t.Name) } } } return diag } func isRef(v cty.Value) bool { return v.Type().IsObjectType() && v.Type().HasAttribute("__ref") } func extractListValue(value cty.Value) (*ListValue, error) { lst := &ListValue{} it := value.ElementIterator() for it.Next() { _, v := it.Element() if isRef(v) { lst.V = append(lst.V, &Ref{V: v.GetAttr("__ref").AsString()}) continue } litv, err := extractLiteralValue(v) if err != nil { return nil, err } lst.V = append(lst.V, litv) } return lst, nil } func extractLiteralValue(value cty.Value) (*LiteralValue, error) { switch value.Type() { case ctySchemaLit: return value.EncapsulatedValue().(*LiteralValue), nil case cty.String: return &LiteralValue{V: strconv.Quote(value.AsString())}, nil case cty.Number: bf := value.AsBigFloat() num, _ := bf.Float64() return &LiteralValue{V: strconv.FormatFloat(num, 'f', -1, 64)}, nil case cty.Bool: return &LiteralValue{V: strconv.FormatBool(value.True())}, nil default: return nil, fmt.Errorf("schemahcl: unsupported type %q", value.Type().GoString()) } } func (s *State) toResource(ctx *hcl.EvalContext, block *hclsyntax.Block, scope []string) (*Resource, error) { spec := &Resource{ Type: block.Type, } switch len(block.Labels) { case 0: case 1: spec.Name = block.Labels[0] case 2: spec.Qualifier = block.Labels[0] spec.Name = block.Labels[1] default: return nil, fmt.Errorf("too many labels for block: %s", block.Labels) } ctx = s.mayExtendVars(ctx, scope) attrs, err := s.toAttrs(ctx, block.Body.Attributes, scope) if err != nil { return nil, err } spec.Attrs = attrs for _, blk := range block.Body.Blocks { res, err := s.toResource(ctx, blk, append(scope, blk.Type)) if err != nil { return nil, err } spec.Children = append(spec.Children, res) } return spec, nil } // encode encodes the give *schemahcl.Resource into a byte slice containing an Atlas HCL // document representing it. func (s *State) encode(r *Resource) ([]byte, error) { f := hclwrite.NewFile() body := f.Body() // If the resource has a Type then it is rendered as an HCL block. if r.Type != "" { blk := body.AppendNewBlock(r.Type, labels(r)) body = blk.Body() } for _, attr := range r.Attrs { if err := s.writeAttr(attr, body); err != nil { return nil, err } } for _, res := range r.Children { if err := s.writeResource(res, body); err != nil { return nil, err } } var buf bytes.Buffer _, err := f.WriteTo(&buf) return buf.Bytes(), err } func (s *State) writeResource(b *Resource, body *hclwrite.Body) error { blk := body.AppendNewBlock(b.Type, labels(b)) nb := blk.Body() for _, attr := range b.Attrs { if err := s.writeAttr(attr, nb); err != nil { return err } } for _, b := range b.Children { if err := s.writeResource(b, nb); err != nil { return err } } return nil } func labels(r *Resource) []string { var l []string if r.Qualifier != "" { l = append(l, r.Qualifier) } if r.Name != "" { l = append(l, r.Name) } return l } func (s *State) writeAttr(attr *Attr, body *hclwrite.Body) error { attr = normalizeLiterals(attr) switch v := attr.V.(type) { case *Ref: body.SetAttributeRaw(attr.K, hclRefTokens(v.V)) case *Type: if v.IsRef { body.SetAttributeRaw(attr.K, hclRefTokens(v.T)) break } spec, ok := s.findTypeSpec(v.T) if !ok { v := fmt.Sprintf("sql(%q)", v.T) body.SetAttributeRaw(attr.K, hclRawTokens(v)) break } st, err := hclType(spec, v) if err != nil { return err } body.SetAttributeRaw(attr.K, hclRawTokens(st)) case *LiteralValue: body.SetAttributeRaw(attr.K, hclRawTokens(v.V)) case *RawExpr: // TODO(rotemtam): the func name should be decided on contextual basis. fnc := fmt.Sprintf("sql(%q)", v.X) body.SetAttributeRaw(attr.K, hclRawTokens(fnc)) case *ListValue: // Skip scanning nil slices ([]T(nil)) by default. Users that // want to print empty lists, should use make([]T, 0) instead. if v.V == nil { return nil } lst := make([]hclwrite.Tokens, 0, len(v.V)) for _, item := range v.V { switch v := item.(type) { case *Ref: lst = append(lst, hclRefTokens(v.V)) case *LiteralValue: lst = append(lst, hclRawTokens(v.V)) default: return fmt.Errorf("cannot write elem type %T of attr %q to HCL list", v, attr) } } body.SetAttributeRaw(attr.K, hclList(lst)) default: return fmt.Errorf("schemacl: unknown literal type %T", v) } return nil } // normalizeLiterals transforms attributes with LiteralValue that cannot be // written as correct HCL into RawExpr. func normalizeLiterals(attr *Attr) *Attr { lv, ok := attr.V.(*LiteralValue) if !ok { return attr } exp := "x = " + lv.V p := hclparse.NewParser() if _, diag := p.ParseHCL([]byte(exp), ""); diag != nil { return &Attr{K: attr.K, V: &RawExpr{X: lv.V}} } return attr } func (s *State) findTypeSpec(t string) (*TypeSpec, bool) { for _, v := range s.config.types { if v.T == t { return v, true } } return nil, false } func hclType(spec *TypeSpec, typ *Type) (string, error) { if spec.Format != nil { return spec.Format(typ) } if len(typeFuncArgs(spec)) == 0 { return spec.Name, nil } args := make([]string, 0, len(spec.Attributes)) for _, param := range typeFuncArgs(spec) { arg, ok := findAttr(typ.Attrs, param.Name) if !ok { continue } switch val := arg.V.(type) { case *LiteralValue: args = append(args, val.V) case *ListValue: for _, li := range val.V { lit, ok := li.(*LiteralValue) if !ok { return "", errors.New("expecting literal value") } args = append(args, lit.V) } } } // If no args were chosen and the type can be described without a function. if len(args) == 0 && len(typeFuncReqArgs(spec)) == 0 { return spec.Name, nil } return fmt.Sprintf("%s(%s)", spec.Name, strings.Join(args, ",")), nil } func findAttr(attrs []*Attr, k string) (*Attr, bool) { for _, attr := range attrs { if attr.K == k { return attr, true } } return nil, false } func hclRefTokens(ref string) hclwrite.Tokens { var t []*hclwrite.Token for i, s := range strings.Split(ref, ".") { // Ignore the first $ as token for reference. if len(s) > 1 && s[0] == '$' { s = s[1:] } switch { case i == 0: t = append(t, hclRawTokens(s)...) case hclsyntax.ValidIdentifier(s): t = append(t, &hclwrite.Token{ Type: hclsyntax.TokenDot, Bytes: []byte{'.'}, }, &hclwrite.Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(s), }) default: t = append(t, &hclwrite.Token{ Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}, }) t = append(t, hclwrite.TokensForValue(cty.StringVal(s))...) t = append(t, &hclwrite.Token{ Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}, }) } } return t } func hclRawTokens(s string) hclwrite.Tokens { return hclwrite.Tokens{ &hclwrite.Token{ Type: hclsyntax.TokenIdent, Bytes: []byte(s), }, } } func hclList(items []hclwrite.Tokens) hclwrite.Tokens { t := hclwrite.Tokens{&hclwrite.Token{ Type: hclsyntax.TokenOBrack, Bytes: []byte("["), }} for i, item := range items { if i > 0 { t = append(t, &hclwrite.Token{Type: hclsyntax.TokenComma, Bytes: []byte(",")}) } t = append(t, item...) } t = append(t, &hclwrite.Token{ Type: hclsyntax.TokenCBrack, Bytes: []byte("]"), }) return t } // Eval implements the Evaluator interface. func (f EvalFunc) Eval(p *hclparse.Parser, i any, input map[string]string) error { return f(p, i, input) } atlas-0.7.2/schemahcl/hcl_test.go000066400000000000000000000154451431455511600167250ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "fmt" "log" "os" "path/filepath" "testing" "github.com/stretchr/testify/require" ) func TestAttributes(t *testing.T) { f := `i = 1 b = true s = "hello, world" sl = ["hello", "world"] bl = [true, false] ` var test struct { Int int `spec:"i"` Bool bool `spec:"b"` Str string `spec:"s"` StringList []string `spec:"sl"` BoolList []bool `spec:"bl"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, 1, test.Int) require.EqualValues(t, true, test.Bool) require.EqualValues(t, "hello, world", test.Str) require.EqualValues(t, []string{"hello", "world"}, test.StringList) require.EqualValues(t, []bool{true, false}, test.BoolList) marshal, err := Marshal(&test) require.NoError(t, err) require.EqualValues(t, f, string(marshal)) } func TestResource(t *testing.T) { f := `endpoint "/hello" { description = "the hello handler" timeout_ms = 100 handler { active = true addr = ":8080" } } ` type ( Handler struct { Active bool `spec:"active"` Addr string `spec:"addr"` } Endpoint struct { Name string `spec:",name"` Description string `spec:"description"` TimeoutMs int `spec:"timeout_ms"` Handler *Handler `spec:"handler"` } File struct { Endpoints []*Endpoint `spec:"endpoint"` } ) var test File err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.Len(t, test.Endpoints, 1) expected := &Endpoint{ Name: "/hello", Description: "the hello handler", TimeoutMs: 100, Handler: &Handler{ Active: true, Addr: ":8080", }, } require.EqualValues(t, expected, test.Endpoints[0]) buf, err := Marshal(&test) require.NoError(t, err) require.EqualValues(t, f, string(buf)) } func ExampleUnmarshal() { f := ` show "seinfeld" { day = SUN writer "jerry" { full_name = "Jerry Seinfeld" } writer "larry" { full_name = "Larry David" } }` type ( Writer struct { ID string `spec:",name"` FullName string `spec:"full_name"` } Show struct { Name string `spec:",name"` Day string `spec:"day"` Writers []*Writer `spec:"writer"` } ) var ( test struct { Shows []*Show `spec:"show"` } opts = []Option{ WithScopedEnums("show.day", "SUN", "MON", "TUE"), } ) err := New(opts...).EvalBytes([]byte(f), &test, nil) if err != nil { panic(err) } seinfeld := test.Shows[0] fmt.Printf("the show %q at day %s has %d writers.", seinfeld.Name, seinfeld.Day, len(seinfeld.Writers)) // Output: the show "seinfeld" at day SUN has 2 writers. } func ExampleMarshal() { type ( Point struct { ID string `spec:",name"` X int `spec:"x"` Y int `spec:"y"` } ) var test = struct { Points []*Point `spec:"point"` }{ Points: []*Point{ {ID: "start", X: 0, Y: 0}, {ID: "end", X: 1, Y: 1}, }, } b, err := Marshal(&test) if err != nil { log.Fatalln(err) } fmt.Println(string(b)) // Output: // point "start" { // x = 0 // y = 0 // } // point "end" { // x = 1 // y = 1 // } } func TestInterface(t *testing.T) { type ( Animal interface { animal() } Parrot struct { Animal Name string `spec:",name"` Boss string `spec:"boss"` } Lion struct { Animal Name string `spec:",name"` Friend string `spec:"friend"` } Zoo struct { Animals []Animal `spec:""` } Cast struct { Animal Animal `spec:""` } ) Register("lion", &Lion{}) Register("parrot", &Parrot{}) t.Run("single", func(t *testing.T) { f := ` cast "lion_king" { lion "simba" { friend = "rafiki" } } ` var test struct { Cast *Cast `spec:"cast"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, &Cast{ Animal: &Lion{ Name: "simba", Friend: "rafiki", }, }, test.Cast) }) t.Run("slice", func(t *testing.T) { f := ` zoo "ramat_gan" { lion "simba" { friend = "rafiki" } parrot "iago" { boss = "jafar" } } ` var test struct { Zoo *Zoo `spec:"zoo"` } err := New().EvalBytes([]byte(f), &test, nil) require.NoError(t, err) require.EqualValues(t, &Zoo{ Animals: []Animal{ &Lion{ Name: "simba", Friend: "rafiki", }, &Parrot{ Name: "iago", Boss: "jafar", }, }, }, test.Zoo) }) } func TestQualified(t *testing.T) { type Person struct { Name string `spec:",name"` Title string `spec:",qualifier"` } var test struct { Person *Person `spec:"person"` } h := `person "dr" "jekyll" { } ` err := New().EvalBytes([]byte(h), &test, nil) require.NoError(t, err) require.EqualValues(t, test.Person, &Person{ Title: "dr", Name: "jekyll", }) out, err := Marshal(&test) require.NoError(t, err) require.EqualValues(t, h, string(out)) } func TestNameAttr(t *testing.T) { h := ` named "block_id" { name = "atlas" } ref = named.block_id.name ` type Named struct { Name string `spec:"name,name"` } var test struct { Named *Named `spec:"named"` Ref string `spec:"ref"` } err := New().EvalBytes([]byte(h), &test, nil) require.NoError(t, err) require.EqualValues(t, &Named{ Name: "atlas", }, test.Named) require.EqualValues(t, "atlas", test.Ref) } func TestRefPatch(t *testing.T) { type ( Family struct { Name string `spec:"name,name"` } Person struct { Name string `spec:",name"` Family *Ref `spec:"family"` } ) Register("family", &Family{}) Register("person", &Person{}) var test struct { Families []*Family `spec:"family"` People []*Person `spec:"person"` } h := ` variable "family_name" { type = string } family "default" { name = var.family_name } person "rotem" { family = family.default } ` err := New().EvalBytes([]byte(h), &test, map[string]string{"family_name": "tam"}) require.NoError(t, err) require.EqualValues(t, "$family.tam", test.People[0].Family.V) } func TestMultiFile(t *testing.T) { type Person struct { Name string `spec:",name"` Hobby string `spec:"hobby"` Parent *Ref `spec:"parent"` } var test struct { People []*Person `spec:"person"` } paths := make([]string, 0) testDir := "testdata/" dir, err := os.ReadDir(testDir) require.NoError(t, err) for _, file := range dir { if file.IsDir() { continue } paths = append(paths, filepath.Join(testDir, file.Name())) } err = New().EvalFiles(paths, &test, map[string]string{ "hobby": "coding", }) require.NoError(t, err) require.Len(t, test.People, 2) require.EqualValues(t, &Person{Name: "rotemtam", Hobby: "coding"}, test.People[0]) require.EqualValues(t, &Person{ Name: "tzuri", Hobby: "ice-cream", Parent: &Ref{V: "$person.rotemtam"}, }, test.People[1]) } atlas-0.7.2/schemahcl/opts.go000066400000000000000000000136561431455511600161070ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "errors" "fmt" "reflect" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" ) type ( // Config configures an unmarshaling. Config struct { types []*TypeSpec newCtx func() *hcl.EvalContext pathVars map[string]map[string]cty.Value } // Option configures a Config. Option func(*Config) ) // New returns a State configured with options. func New(opts ...Option) *State { cfg := &Config{ pathVars: make(map[string]map[string]cty.Value), newCtx: func() *hcl.EvalContext { return &hcl.EvalContext{ Variables: make(map[string]cty.Value), Functions: make(map[string]function.Function), } }, } for _, opt := range opts { opt(cfg) } return &State{config: cfg} } // WithScopedEnums configured a list of allowed ENUMs to be used in // the given context, block or attribute. For example, the following // option allows setting HASH or BTREE to the "using" attribute in // "index" block. // // WithScopedEnums("table.index.type", "HASH", "BTREE") // // table "t" { // ... // index "i" { // type = HASH // Allowed. // type = INVALID // Not Allowed. // } // } // // func WithScopedEnums(path string, enums ...string) Option { return func(c *Config) { vars := make(map[string]cty.Value, len(enums)) for i := range enums { vars[enums[i]] = cty.StringVal(enums[i]) } c.pathVars[path] = vars } } // WithTypes configures the list of given types as identifiers in the unmarshaling context. func WithTypes(typeSpecs []*TypeSpec) Option { newCtx := func() *hcl.EvalContext { ctx := &hcl.EvalContext{ Variables: make(map[string]cty.Value), Functions: make(map[string]function.Function), } for _, ts := range typeSpecs { typeSpec := ts // If no required args exist, register the type as a variable in the HCL context. if len(typeFuncReqArgs(typeSpec)) == 0 { typ := &Type{T: typeSpec.T} ctx.Variables[typeSpec.Name] = cty.CapsuleVal(ctyTypeSpec, typ) } // If func args exist, register the type as a function in HCL. if len(typeFuncArgs(typeSpec)) > 0 { ctx.Functions[typeSpec.Name] = typeFuncSpec(typeSpec) } } ctx.Functions["sql"] = rawExprImpl() return ctx } return func(config *Config) { config.newCtx = newCtx config.types = append(config.types, typeSpecs...) } } func rawExprImpl() function.Function { return function.New(&function.Spec{ Params: []function.Parameter{ {Name: "def", Type: cty.String, AllowNull: false}, }, Type: function.StaticReturnType(ctyRawExpr), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { x := args[0].AsString() if len(x) == 0 { return cty.NilVal, errors.New("empty expression") } t := &RawExpr{X: x} return cty.CapsuleVal(ctyRawExpr, t), nil }, }) } // typeFuncSpec returns the HCL function for defining the type in the spec. func typeFuncSpec(typeSpec *TypeSpec) function.Function { spec := &function.Spec{ Type: function.StaticReturnType(ctyTypeSpec), } for _, arg := range typeFuncArgs(typeSpec) { if arg.Kind == reflect.Slice || !arg.Required { spec.VarParam = &function.Parameter{ Name: "args", Type: cty.DynamicPseudoType, } continue } p := function.Parameter{ Name: arg.Name, AllowNull: !arg.Required, } switch arg.Kind { case reflect.String: p.Type = cty.String case reflect.Int, reflect.Float32, reflect.Int64: p.Type = cty.Number case reflect.Bool: p.Type = cty.Bool } spec.Params = append(spec.Params, p) } spec.Impl = typeFuncSpecImpl(spec, typeSpec) return function.New(spec) } // typeFuncSpecImpl returns the function implementation for the HCL function spec. func typeFuncSpecImpl(_ *function.Spec, typeSpec *TypeSpec) function.ImplFunc { return func(args []cty.Value, retType cty.Type) (cty.Value, error) { t := &Type{ T: typeSpec.T, } if len(args) > len(typeSpec.Attributes) && typeSpec.Attributes[len(typeSpec.Attributes)-1].Kind != reflect.Slice { return cty.NilVal, fmt.Errorf("too many arguments for type definition %q", typeSpec.Name) } // TypeRegistry enforces that: // 1. Required attrs come before optionals // 2. Slice attrs can only be last for _, attr := range typeFuncArgs(typeSpec) { // If the attribute is a slice, read all remaining args into a list value. if attr.Kind == reflect.Slice { lst := &ListValue{} for _, arg := range args { v, err := extractLiteralValue(arg) if err != nil { return cty.NilVal, err } lst.V = append(lst.V, v) } t.Attrs = append(t.Attrs, &Attr{K: attr.Name, V: lst}) break } if len(args) == 0 { break } // Pop the first arg and add it as a literal to the type. var arg cty.Value arg, args = args[0], args[1:] v, err := extractLiteralValue(arg) if err != nil { return cty.NilVal, err } t.Attrs = append(t.Attrs, &Attr{K: attr.Name, V: v}) } return cty.CapsuleVal(ctyTypeSpec, t), nil } } // typeFuncArgs returns the type attributes that are configured via arguments to the // type definition, for example precision and scale in a decimal definition, i.e `decimal(10,2)`. func typeFuncArgs(spec *TypeSpec) []*TypeAttr { var args []*TypeAttr for _, attr := range spec.Attributes { // TODO(rotemtam): this should be defined on the TypeSpec. if attr.Name == "unsigned" { continue } args = append(args, attr) } return args } // typeFuncReqArgs returns the required type attributes that are configured via arguments. // for instance, in MySQL a field may be defined as both `int` and `int(10)`, in this case // it is not a required parameter. func typeFuncReqArgs(spec *TypeSpec) []*TypeAttr { var args []*TypeAttr for _, arg := range typeFuncArgs(spec) { if arg.Required { args = append(args, arg) } } return args } atlas-0.7.2/schemahcl/spec.go000066400000000000000000000223211431455511600160410ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "fmt" "reflect" "strconv" "ariga.io/atlas/sql/schema" ) type ( // Resource is a generic container for resources described in configurations. Resource struct { Name string Qualifier string Type string Attrs []*Attr Children []*Resource } // Attr is an attribute of a Resource. Attr struct { K string V Value } // Value represents the value of an Attr. Value interface { val() } // LiteralValue implements Value and represents a literal value (string, number, etc.) LiteralValue struct { V string } // RawExpr implements Value and represents any raw expression. RawExpr struct { X string } // ListValue implements Value and represents a list of Values. ListValue struct { V []Value } // Ref implements Value and represents a reference to another Resource. // The path to a Resource under the root Resource is expressed as "$...." // recursively. For example, a resource of type "table" that is named "users" and is a direct // child of the root Resource's address shall be "$table.users". A child resource of that table // of type "column" and named "id", shall be referenced as "$table.users.$column.id", and so on. Ref struct { V string } // TypeSpec represents a specification for defining a Type. TypeSpec struct { // Name is the identifier for the type in an Atlas DDL document. Name string // T is the database identifier for the type. T string Attributes []*TypeAttr // RType is the reflect.Type of the schema.Type used to describe the TypeSpec. // This field is optional and used to determine the TypeSpec in cases where the // schema.Type does not have a `T` field. RType reflect.Type // Format is an optional formatting function. // If exists, it will be used instead the registry one. Format func(*Type) (string, error) // FromSpec is an optional function that can be attached // to the type spec and allows converting the schema spec // type to a schema type (from document to databse). FromSpec func(*Type) (schema.Type, error) // ToSpec is an optional function that can be attached // to the type spec and allows converting the schema type // to a schema spec type (from database to document). ToSpec func(schema.Type) (*Type, error) } // TypeAttr describes an attribute of a TypeSpec, for example `varchar` fields // can have a `size` attribute. TypeAttr struct { // Name should be a snake_case of related the schema.Type struct field. Name string Kind reflect.Kind Required bool } // Type represents the type of the field in a schema. Type struct { T string Attrs []*Attr IsRef bool } ) // Int returns an int from the Value of the Attr. If The value is not a LiteralValue or the value // cannot be converted to an integer an error is returned. func (a *Attr) Int() (int, error) { i, err := a.Int64() if err != nil { return 0, err } return int(i), nil } // Int64 returns an int64 from the Value of the Attr. If The value is not a LiteralValue or the value // cannot be converted to an integer an error is returned. func (a *Attr) Int64() (int64, error) { lit, ok := a.V.(*LiteralValue) if !ok { return 0, fmt.Errorf("schema: cannot read attribute %q as literal", a.K) } i, err := strconv.ParseInt(lit.V, 10, 64) if err != nil { return 0, fmt.Errorf("schema: cannot read attribute %q as integer: %w", a.K, err) } return i, nil } // String returns a string from the Value of the Attr. If The value is not a LiteralValue // an error is returned. String values are expected to be quoted. If the value is not // properly quoted an error is returned. func (a *Attr) String() (string, error) { return StrVal(a.V) } // Bool returns a boolean from the Value of the Attr. If The value is not a LiteralValue or the value // cannot be converted to a boolean an error is returned. func (a *Attr) Bool() (bool, error) { lit, ok := a.V.(*LiteralValue) if !ok { return false, fmt.Errorf("schema: cannot read attribute %q as literal", a.K) } b, err := strconv.ParseBool(lit.V) if err != nil { return false, fmt.Errorf("schema: cannot read attribute %q as bool: %w", a.K, err) } return b, nil } // Ref returns the string representation of the Attr. If the value is not a Ref or the value // an error is returned. func (a *Attr) Ref() (string, error) { ref, ok := a.V.(*Ref) if !ok { return "", fmt.Errorf("schema: cannot read attribute %q as ref", a.K) } return ref.V, nil } // Refs returns a slice of references. func (a *Attr) Refs() ([]*Ref, error) { l, ok := a.V.(*ListValue) if !ok { return nil, fmt.Errorf("schema: attribute %q is not a list", a.K) } refs := make([]*Ref, 0, len(l.V)) for _, v := range l.V { r, ok := v.(*Ref) if !ok { return nil, fmt.Errorf("schemahcl: expected %T to be Ref", v) } refs = append(refs, r) } return refs, nil } // Strings returns a slice of strings from the Value of the Attr. If The value is not a ListValue or its // values cannot be converted to strings an error is returned. func (a *Attr) Strings() ([]string, error) { l, ok := a.V.(*ListValue) if !ok { return nil, fmt.Errorf("schema: attribute %q is not a list", a.K) } out := make([]string, 0, len(l.V)) for _, item := range l.V { sv, err := StrVal(item) if err != nil { return nil, fmt.Errorf("schemahcl: failed parsing item %q to string: %w", item, err) } out = append(out, sv) } return out, nil } // Bools returns a slice of bools from the Value of the Attr. If The value is not a ListValue or its // values cannot be converted to bools an error is returned. func (a *Attr) Bools() ([]bool, error) { lst, ok := a.V.(*ListValue) if !ok { return nil, fmt.Errorf("schemahcl: attribute %q is not a list", a.K) } out := make([]bool, 0, len(lst.V)) for _, item := range lst.V { b, err := BoolVal(item) if err != nil { return nil, err } out = append(out, b) } return out, nil } // Resource returns the first child Resource by its type and reports whether it was found. func (r *Resource) Resource(t string) (*Resource, bool) { if r == nil { return nil, false } for i := range r.Children { if r.Children[i].Type == t { return r.Children[i], true } } return nil, false } // Attr returns the Attr by the provided name and reports whether it was found. func (r *Resource) Attr(name string) (*Attr, bool) { return attrVal(r.Attrs, name) } // SetAttr sets the Attr on the Resource. If r is nil, a zero value Resource // is initialized. If an Attr with the same key exists, it is replaced by attr. func (r *Resource) SetAttr(attr *Attr) { if r == nil { *r = Resource{} } r.Attrs = replaceOrAppendAttr(r.Attrs, attr) } // MarshalSpec implements Marshaler. func (f MarshalerFunc) MarshalSpec(v any) ([]byte, error) { return f(v) } func attrVal(attrs []*Attr, name string) (*Attr, bool) { for _, attr := range attrs { if attr.K == name { return attr, true } } return nil, false } func replaceOrAppendAttr(attrs []*Attr, attr *Attr) []*Attr { for i, v := range attrs { if v.K == attr.K { attrs[i] = attr return attrs } } return append(attrs, attr) } // StrVal returns the raw string representation of v. If v is not a *LiteralValue // it returns an error. If the raw string representation of v cannot be read as // a string by unquoting it, an error is returned as well. func StrVal(v Value) (string, error) { lit, ok := v.(*LiteralValue) if !ok { return "", fmt.Errorf("schemahcl: expected %T to be LiteralValue", v) } return strconv.Unquote(lit.V) } // BoolVal returns the bool representation of v. If v is not a *LiteralValue // it returns an error. If the raw string representation of v cannot be read as // a bool, an error is returned as well. func BoolVal(v Value) (bool, error) { lit, ok := v.(*LiteralValue) if !ok { return false, fmt.Errorf("schemahcl: expected %T to be LiteralValue", v) } b, err := strconv.ParseBool(lit.V) if err != nil { return false, fmt.Errorf("schemahcl: failed parsing %q as bool: %w", lit.V, err) } return b, nil } // Attr returns a TypeAttr by name and reports if one was found. func (s *TypeSpec) Attr(name string) (*TypeAttr, bool) { for _, ta := range s.Attributes { if ta.Name == name { return ta, true } } return nil, false } func (*LiteralValue) val() {} func (*RawExpr) val() {} func (*ListValue) val() {} func (*Ref) val() {} func (*Type) val() {} var ( _ Marshaler = MarshalerFunc(nil) ) // LitAttr is a helper method for constructing *schemahcl.Attr instances that contain literal values. func LitAttr(k, v string) *Attr { return &Attr{ K: k, V: &LiteralValue{V: v}, } } // StrLitAttr is a helper method for constructing *schemahcl.Attr instances that contain literal values // representing string literals. func StrLitAttr(k, v string) *Attr { return LitAttr(k, strconv.Quote(v)) } // ListAttr is a helper method for constructing *schemahcl.Attr instances that contain list values. func ListAttr(k string, litValues ...string) *Attr { lv := &ListValue{} for _, v := range litValues { lv.V = append(lv.V, &LiteralValue{V: v}) } return &Attr{ K: k, V: lv, } } atlas-0.7.2/schemahcl/spec_test.go000066400000000000000000000013001431455511600170720ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "testing" "github.com/stretchr/testify/require" ) func TestBoolVal(t *testing.T) { b, err := BoolVal(&LiteralValue{V: "true"}) require.NoError(t, err) require.True(t, b) } func TestBools(t *testing.T) { a := Attr{ K: "b", V: &ListValue{ V: []Value{ &LiteralValue{V: "true"}, &LiteralValue{V: "false"}, &LiteralValue{V: "true"}, }, }, } bls, err := a.Bools() require.NoError(t, err) require.EqualValues(t, []bool{true, false, true}, bls) } atlas-0.7.2/schemahcl/testdata/000077500000000000000000000000001431455511600163715ustar00rootroot00000000000000atlas-0.7.2/schemahcl/testdata/a.hcl000066400000000000000000000000511431455511600172750ustar00rootroot00000000000000person "rotemtam" { hobby = var.hobby }atlas-0.7.2/schemahcl/testdata/b.hcl000066400000000000000000000001031431455511600172740ustar00rootroot00000000000000person "tzuri" { hobby = "ice-cream" parent = person.rotemtam }atlas-0.7.2/schemahcl/testdata/variables.hcl000066400000000000000000000000441431455511600210270ustar00rootroot00000000000000variable "hobby" { type = string }atlas-0.7.2/schemahcl/types.go000066400000000000000000000250001431455511600162500ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "errors" "fmt" "log" "reflect" "strconv" "strings" "ariga.io/atlas/sql/schema" "github.com/go-openapi/inflect" ) // PrintType returns the string representation of a column type which can be parsed // by the driver into a schema.Type. func (r *TypeRegistry) PrintType(typ *Type) (string, error) { spec, ok := r.findT(typ.T) if !ok { return "", fmt.Errorf("specutil: type %q not found in registry", typ.T) } if len(spec.Attributes) == 0 { return typ.T, nil } var ( args []string mid, suffix string ) for _, arg := range typ.Attrs { // TODO(rotemtam): make this part of the TypeSpec if arg.K == "unsigned" { b, err := arg.Bool() if err != nil { return "", err } if b { suffix += " unsigned" } continue } switch v := arg.V.(type) { case *LiteralValue: args = append(args, v.V) case *ListValue: for _, li := range v.V { lit, ok := li.(*LiteralValue) if !ok { return "", fmt.Errorf("expecting literal value. got: %T", li) } uq, err := strconv.Unquote(lit.V) if err != nil { return "", fmt.Errorf("expecting list items to be quoted strings: %w", err) } args = append(args, "'"+uq+"'") } default: return "", fmt.Errorf("unsupported type %T for PrintType", v) } } if len(args) > 0 { mid = "(" + strings.Join(args, ",") + ")" } return typ.T + mid + suffix, nil } // TypeRegistry is a collection of *schemahcl.TypeSpec. type TypeRegistry struct { r []*TypeSpec spec func(schema.Type) (*Type, error) parser func(string) (schema.Type, error) } // WithFormatter configures the registry to use a formatting function for printing // schema.Type as string. func WithFormatter(f func(schema.Type) (string, error)) TypeRegistryOption { return func(registry *TypeRegistry) error { registry.spec = func(t schema.Type) (*Type, error) { s, err := f(t) if err != nil { return nil, fmt.Errorf("specutil: cannot format type %T: %w", t, err) } return &Type{T: s}, nil } return nil } } // WithSpecFunc configures the registry to use the given function for converting // a schema.Type to schemahcl.Type func WithSpecFunc(spec func(schema.Type) (*Type, error)) TypeRegistryOption { return func(registry *TypeRegistry) error { registry.spec = spec return nil } } // WithParser configures the registry to use a parsing function for converting // a string to a schema.Type. func WithParser(parser func(string) (schema.Type, error)) TypeRegistryOption { return func(registry *TypeRegistry) error { registry.parser = parser return nil } } // Register adds one or more TypeSpec to the registry. func (r *TypeRegistry) Register(specs ...*TypeSpec) error { for _, s := range specs { if err := validSpec(s); err != nil { return fmt.Errorf("specutil: invalid typespec %q: %w", s.Name, err) } if _, exists := r.findT(s.T); exists { return fmt.Errorf("specutil: type with T of %q already registered", s.T) } if _, exists := r.findName(s.Name); exists { return fmt.Errorf("specutil: type with name of %q already registered", s.T) } r.r = append(r.r, s) } return nil } func validSpec(typeSpec *TypeSpec) error { var seenOptional bool for i, attr := range typeSpec.Attributes { if attr.Kind == reflect.Slice && i < len(typeSpec.Attributes)-1 { return fmt.Errorf("attr %q is of kind slice but not last", attr.Name) } if seenOptional && attr.Required { return fmt.Errorf("attr %q required after optional attr", attr.Name) } seenOptional = !attr.Required } return nil } // TypeRegistryOption configures a TypeRegistry. type TypeRegistryOption func(*TypeRegistry) error // WithSpecs configures the registry to register the given list of type specs. func WithSpecs(specs ...*TypeSpec) TypeRegistryOption { return func(registry *TypeRegistry) error { if err := registry.Register(specs...); err != nil { return fmt.Errorf("failed registering types: %s", err) } return nil } } // NewRegistry creates a new *TypeRegistry, registers the provided types and panics // if an error occurs. func NewRegistry(opts ...TypeRegistryOption) *TypeRegistry { r := &TypeRegistry{} for _, opt := range opts { if err := opt(r); err != nil { log.Fatalf("failed configuring registry: %s", err) } } return r } // findName searches the registry for types that have the provided name. func (r *TypeRegistry) findName(name string) (*TypeSpec, bool) { for _, current := range r.r { if current.Name == name { return current, true } } return nil, false } // findT searches the registry for types that have the provided T. func (r *TypeRegistry) findT(t string) (*TypeSpec, bool) { for _, current := range r.r { if current.T == t { return current, true } } return nil, false } // Convert converts the schema.Type to a *schemahcl.Type. func (r *TypeRegistry) Convert(typ schema.Type) (*Type, error) { if ut, ok := typ.(*schema.UnsupportedType); ok { return &Type{ T: ut.T, }, nil } rv := reflect.ValueOf(typ) if rv.Kind() == reflect.Ptr { rv = rv.Elem() } if !rv.IsValid() { return nil, errors.New("specutil: invalid schema.Type on Convert") } typeSpec, ok := r.findType(rv) if !ok { return r.spec(typ) } if typeSpec.ToSpec != nil { return typeSpec.ToSpec(typ) } s := &Type{T: typeSpec.T} // Iterate the attributes in reverse order, so we can skip zero value and optional attrs. for i := len(typeSpec.Attributes) - 1; i >= 0; i-- { attr := typeSpec.Attributes[i] n := inflect.Camelize(attr.Name) field := rv.FieldByName(n) // If TypeSpec has an attribute that isn't mapped to a field on the schema.Type skip it. if !field.IsValid() || field.Kind() == reflect.Ptr && field.IsNil() { continue } if field = reflect.Indirect(field); field.Kind() != attr.Kind { return nil, errors.New("incompatible kinds on typespec attr and typefield") } switch attr.Kind { case reflect.Int, reflect.Int64: v := int(field.Int()) if v == 0 && len(s.Attrs) == 0 { break } i := strconv.Itoa(v) s.Attrs = append([]*Attr{LitAttr(attr.Name, i)}, s.Attrs...) case reflect.Bool: v := field.Bool() if !v && len(s.Attrs) == 0 { break } b := strconv.FormatBool(v) s.Attrs = append([]*Attr{LitAttr(attr.Name, b)}, s.Attrs...) case reflect.Slice: lits := make([]string, 0, field.Len()) for i := 0; i < field.Len(); i++ { fi := field.Index(i) if fi.Kind() != reflect.String { return nil, errors.New("specutil: only string slices currently supported") } lits = append(lits, strconv.Quote(fi.String())) } s.Attrs = append([]*Attr{ListAttr(attr.Name, lits...)}, s.Attrs...) default: return nil, fmt.Errorf("specutil: unsupported attr kind %s for attribute %q of %q", attr.Kind, attr.Name, typeSpec.Name) } } return s, nil } func (r *TypeRegistry) findType(rv reflect.Value) (*TypeSpec, bool) { tf := rv.FieldByName("T") if tf.IsValid() && tf.Kind() == reflect.String { name := tf.String() if typeSpec, ok := r.findT(name); ok { return typeSpec, true } } if typeSpec, ok := r.findRType(rv.Type()); ok { return typeSpec, true } return nil, false } func (r *TypeRegistry) findRType(rt reflect.Type) (*TypeSpec, bool) { for _, ts := range r.Specs() { if ts.RType != nil && ts.RType == rt { return ts, true } } return nil, false } // Specs returns the TypeSpecs in the registry. func (r *TypeRegistry) Specs() []*TypeSpec { return r.r } // Type converts a *schemahcl.Type into a schema.Type. func (r *TypeRegistry) Type(typ *Type, extra []*Attr) (schema.Type, error) { typeSpec, ok := r.findT(typ.T) if !ok { return r.parser(typ.T) } nfa := typeNonFuncArgs(typeSpec) picked := pickTypeAttrs(extra, nfa) cp := &Type{ T: typ.T, } cp.Attrs = appendIfNotExist(typ.Attrs, picked) if typeSpec.FromSpec != nil { return typeSpec.FromSpec(cp) } printType, err := r.PrintType(cp) if err != nil { return nil, err } return r.parser(printType) } // TypeSpecOption configures a schemahcl.TypeSpec. type TypeSpecOption func(*TypeSpec) // WithAttributes returns an attributes TypeSpecOption. func WithAttributes(attrs ...*TypeAttr) TypeSpecOption { return func(spec *TypeSpec) { spec.Attributes = attrs } } // WithTypeFormatter allows overriding the Format function for the Type. func WithTypeFormatter(f func(*Type) (string, error)) TypeSpecOption { return func(spec *TypeSpec) { spec.Format = f } } // WithFromSpec allows configuring the FromSpec convert function using functional options. func WithFromSpec(f func(*Type) (schema.Type, error)) TypeSpecOption { return func(spec *TypeSpec) { spec.FromSpec = f } } // WithToSpec allows configuring the ToSpec convert function using functional options. func WithToSpec(f func(schema.Type) (*Type, error)) TypeSpecOption { return func(spec *TypeSpec) { spec.ToSpec = f } } // NewTypeSpec returns a TypeSpec with the provided name. func NewTypeSpec(name string, opts ...TypeSpecOption) *TypeSpec { return AliasTypeSpec(name, name, opts...) } // AliasTypeSpec returns a TypeSpec with the provided name. func AliasTypeSpec(name, dbType string, opts ...TypeSpecOption) *TypeSpec { ts := &TypeSpec{ Name: name, T: dbType, } for _, opt := range opts { opt(ts) } return ts } // SizeTypeAttr returns a TypeAttr for a size attribute. func SizeTypeAttr(required bool) *TypeAttr { return &TypeAttr{ Name: "size", Kind: reflect.Int, Required: required, } } // typeNonFuncArgs returns the type attributes that are NOT configured via arguments to the // type definition, `int unsigned`. func typeNonFuncArgs(spec *TypeSpec) []*TypeAttr { var args []*TypeAttr for _, attr := range spec.Attributes { // TODO(rotemtam): this should be defined on the TypeSpec. if attr.Name == "unsigned" { args = append(args, attr) } } return args } // pickTypeAttrs returns the relevant Attrs matching the wanted TypeAttrs. func pickTypeAttrs(src []*Attr, wanted []*TypeAttr) []*Attr { keys := make(map[string]struct{}) for _, w := range wanted { keys[w.Name] = struct{}{} } var picked []*Attr for _, attr := range src { if _, ok := keys[attr.K]; ok { picked = append(picked, attr) } } return picked } func appendIfNotExist(base []*Attr, additional []*Attr) []*Attr { exists := make(map[string]struct{}) for _, attr := range base { exists[attr.K] = struct{}{} } for _, attr := range additional { if _, ok := exists[attr.K]; !ok { base = append(base, attr) } } return base } atlas-0.7.2/schemahcl/types_test.go000066400000000000000000000131411431455511600173120ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schemahcl import ( "reflect" "testing" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestTypePrint(t *testing.T) { intSpec := &TypeSpec{ Name: "int", T: "int", Attributes: []*TypeAttr{ unsignedTypeAttr(), }, } for _, tt := range []struct { spec *TypeSpec typ *Type expected string }{ { spec: intSpec, typ: &Type{T: "int"}, expected: "int", }, { spec: intSpec, typ: &Type{T: "int", Attrs: []*Attr{LitAttr("unsigned", "true")}}, expected: "int unsigned", }, { spec: &TypeSpec{ Name: "float", T: "float", Attributes: []*TypeAttr{unsignedTypeAttr()}, }, typ: &Type{T: "float", Attrs: []*Attr{LitAttr("unsigned", "true")}}, expected: "float unsigned", }, { spec: &TypeSpec{ T: "varchar", Name: "varchar", Attributes: []*TypeAttr{ {Name: "size", Kind: reflect.Int, Required: true}, }, }, typ: &Type{T: "varchar", Attrs: []*Attr{LitAttr("size", "255")}}, expected: "varchar(255)", }, } { t.Run(tt.expected, func(t *testing.T) { r := &TypeRegistry{} err := r.Register(tt.spec) require.NoError(t, err) s, err := r.PrintType(tt.typ) require.NoError(t, err) require.EqualValues(t, tt.expected, s) }) } } func TestRegistry(t *testing.T) { r := &TypeRegistry{} text := &TypeSpec{Name: "text", T: "text"} err := r.Register(text) require.NoError(t, err) err = r.Register(text) require.EqualError(t, err, `specutil: type with T of "text" already registered`) spec, ok := r.findName("text") require.True(t, ok) require.EqualValues(t, spec, text) } func TestValidSpec(t *testing.T) { registry := &TypeRegistry{} err := registry.Register(&TypeSpec{ Name: "X", T: "X", Attributes: []*TypeAttr{ {Name: "a", Required: false, Kind: reflect.Slice}, {Name: "b", Required: true}, }, }) require.EqualError(t, err, `specutil: invalid typespec "X": attr "a" is of kind slice but not last`) err = registry.Register(&TypeSpec{ Name: "Z", T: "Z", Attributes: []*TypeAttr{ {Name: "b", Required: true}, {Name: "a", Required: false, Kind: reflect.Slice}, }, }) require.NoError(t, err) err = registry.Register(&TypeSpec{ Name: "Z2", T: "Z2", Attributes: []*TypeAttr{ {Name: "a", Required: false, Kind: reflect.Slice}, }, }) require.NoError(t, err) err = registry.Register(&TypeSpec{ Name: "X", T: "X", Attributes: []*TypeAttr{ {Name: "a", Required: false}, {Name: "b", Required: true}, }, }) require.EqualError(t, err, `specutil: invalid typespec "X": attr "b" required after optional attr`) err = registry.Register(&TypeSpec{ Name: "X", T: "X", Attributes: []*TypeAttr{ {Name: "a", Required: true}, {Name: "b", Required: false}, }, }) require.NoError(t, err) err = registry.Register(&TypeSpec{ Name: "Y", T: "Y", Attributes: []*TypeAttr{ {Name: "a", Required: false}, {Name: "b", Required: false}, }, }) require.NoError(t, err) } func TestRegistryConvert(t *testing.T) { r := &TypeRegistry{} err := r.Register( NewTypeSpec("varchar", WithAttributes(SizeTypeAttr(true))), NewTypeSpec("int", WithAttributes(unsignedTypeAttr())), NewTypeSpec( "decimal", WithAttributes( &TypeAttr{ Name: "precision", Kind: reflect.Int, Required: false, }, &TypeAttr{ Name: "scale", Kind: reflect.Int, Required: false, }, ), ), NewTypeSpec("enum", WithAttributes(&TypeAttr{ Name: "values", Kind: reflect.Slice, Required: true, })), ) require.NoError(t, err) for _, tt := range []struct { typ schema.Type expected *Type expectedErr string }{ { typ: &schema.StringType{T: "varchar", Size: 255}, expected: &Type{T: "varchar", Attrs: []*Attr{LitAttr("size", "255")}}, }, { typ: &schema.IntegerType{T: "int", Unsigned: true}, expected: &Type{T: "int", Attrs: []*Attr{LitAttr("unsigned", "true")}}, }, { typ: &schema.IntegerType{T: "int", Unsigned: true}, expected: &Type{T: "int", Attrs: []*Attr{LitAttr("unsigned", "true")}}, }, { typ: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 2}, // decimal(10,2) expected: &Type{T: "decimal", Attrs: []*Attr{ LitAttr("precision", "10"), LitAttr("scale", "2"), }}, }, { typ: &schema.DecimalType{T: "decimal", Precision: 10}, // decimal(10) expected: &Type{T: "decimal", Attrs: []*Attr{ LitAttr("precision", "10"), }}, }, { typ: &schema.DecimalType{T: "decimal", Scale: 2}, // decimal(0,2) expected: &Type{T: "decimal", Attrs: []*Attr{ LitAttr("precision", "0"), LitAttr("scale", "2"), }}, }, { typ: &schema.DecimalType{T: "decimal"}, // decimal expected: &Type{T: "decimal"}, }, { typ: &schema.EnumType{T: "enum", Values: []string{"on", "off"}}, expected: &Type{T: "enum", Attrs: []*Attr{ ListAttr("values", `"on"`, `"off"`), }}, }, { typ: nil, expected: &Type{}, expectedErr: "specutil: invalid schema.Type on Convert", }, } { t.Run(tt.expected.T, func(t *testing.T) { convert, err := r.Convert(tt.typ) if tt.expectedErr != "" { require.EqualError(t, err, tt.expectedErr) return } require.NoError(t, err) require.EqualValues(t, tt.expected, convert) }) } } func unsignedTypeAttr() *TypeAttr { return &TypeAttr{ Name: "unsigned", Kind: reflect.Bool, } } atlas-0.7.2/sql/000077500000000000000000000000001431455511600134305ustar00rootroot00000000000000atlas-0.7.2/sql/internal/000077500000000000000000000000001431455511600152445ustar00rootroot00000000000000atlas-0.7.2/sql/internal/spectest/000077500000000000000000000000001431455511600170765ustar00rootroot00000000000000atlas-0.7.2/sql/internal/spectest/spectest.go000066400000000000000000000050731431455511600212640ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package spectest import ( "reflect" "testing" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/specutil" "ariga.io/atlas/sql/schema" "github.com/hashicorp/hcl/v2/hclparse" "github.com/stretchr/testify/require" ) // RegistrySanityTest runs a sanity for a TypeRegistry, generated a dummy *schemahcl.Type // then converting it to a schema.Type and back to a *schemahcl.Type. func RegistrySanityTest(t *testing.T, registry *schemahcl.TypeRegistry, skip []string) { for _, ts := range registry.Specs() { if contains(ts.Name, skip) { continue } t.Run(ts.Name, func(t *testing.T) { spec := dummyType(t, ts) styp, err := registry.Type(spec, nil) require.NoError(t, err) require.NoErrorf(t, err, "failed formatting: %styp", err) convert, err := registry.Convert(styp) require.NoError(t, err) after, err := registry.Type(convert, nil) require.NoError(t, err) require.EqualValues(t, styp, after) }) } } // TestInputVars runs a test verifying that the driver's exposed Eval function uses // input variables properly. func TestInputVars(t *testing.T, evaluator schemahcl.Evaluator) { h := ` variable "tenant" { type = string default = "test" } schema "tenant" { name = var.tenant } table "users" { schema = schema.tenant column "id" { type = int } index "user_name" { on { column = column.id unique = true } } } ` var test schema.Realm p := hclparse.NewParser() _, diag := p.ParseHCL([]byte(h), "") require.False(t, diag.HasErrors()) err := evaluator.Eval(p, &test, map[string]string{"tenant": "rotemtam"}) require.NoError(t, err) require.EqualValues(t, "rotemtam", test.Schemas[0].Name) require.Len(t, test.Schemas[0].Tables, 1) } func contains(s string, l []string) bool { for i := range l { if s == l[i] { return true } } return false } func dummyType(t *testing.T, ts *schemahcl.TypeSpec) *schemahcl.Type { spec := &schemahcl.Type{T: ts.T} for _, attr := range ts.Attributes { var a *schemahcl.Attr switch attr.Kind { case reflect.Int, reflect.Int64: a = specutil.LitAttr(attr.Name, "2") case reflect.String: a = specutil.LitAttr(attr.Name, `"a"`) case reflect.Slice: a = specutil.ListAttr(attr.Name, `"a"`, `"b"`) case reflect.Bool: a = specutil.LitAttr(attr.Name, "false") default: t.Fatalf("unsupported kind: %s", attr.Kind) } spec.Attrs = append(spec.Attrs, a) } return spec } atlas-0.7.2/sql/internal/specutil/000077500000000000000000000000001431455511600170745ustar00rootroot00000000000000atlas-0.7.2/sql/internal/specutil/convert.go000066400000000000000000000465321431455511600211150ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package specutil import ( "errors" "fmt" "strconv" "strings" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlspec" ) // List of convert function types. type ( ConvertTableFunc func(*sqlspec.Table, *schema.Schema) (*schema.Table, error) ConvertColumnFunc func(*sqlspec.Column, *schema.Table) (*schema.Column, error) ConvertTypeFunc func(*sqlspec.Column) (schema.Type, error) ConvertPrimaryKeyFunc func(*sqlspec.PrimaryKey, *schema.Table) (*schema.Index, error) ConvertIndexFunc func(*sqlspec.Index, *schema.Table) (*schema.Index, error) ConvertCheckFunc func(*sqlspec.Check) (*schema.Check, error) ColumnSpecFunc func(*schema.Column, *schema.Table) (*sqlspec.Column, error) ColumnTypeSpecFunc func(schema.Type) (*sqlspec.Column, error) TableSpecFunc func(*schema.Table) (*sqlspec.Table, error) PrimaryKeySpecFunc func(*schema.Index) (*sqlspec.PrimaryKey, error) IndexSpecFunc func(*schema.Index) (*sqlspec.Index, error) ForeignKeySpecFunc func(*schema.ForeignKey) (*sqlspec.ForeignKey, error) CheckSpecFunc func(*schema.Check) *sqlspec.Check ) // Scan populates the Realm from the schemas and table specs. func Scan(r *schema.Realm, schemas []*sqlspec.Schema, tables []*sqlspec.Table, convertTable ConvertTableFunc) error { // Build the schemas. for _, schemaSpec := range schemas { sch := &schema.Schema{Name: schemaSpec.Name, Realm: r} for _, tableSpec := range tables { name, err := SchemaName(tableSpec.Schema) if err != nil { return fmt.Errorf("specutil: cannot extract schema name for table %q: %w", tableSpec.Name, err) } if name == schemaSpec.Name { tbl, err := convertTable(tableSpec, sch) if err != nil { return err } sch.Tables = append(sch.Tables, tbl) } } r.Schemas = append(r.Schemas, sch) } // Link the foreign keys. for _, sch := range r.Schemas { for _, tbl := range sch.Tables { tableSpec, err := findTableSpec(tables, sch.Name, tbl.Name) if err != nil { return err } if err := linkForeignKeys(tbl, sch, tableSpec); err != nil { return err } } } return nil } // findTableSpec searches tableSpecs for a spec of a table named tableName in a schema named schemaName. func findTableSpec(tableSpecs []*sqlspec.Table, schemaName, tableName string) (*sqlspec.Table, error) { for _, tbl := range tableSpecs { n, err := SchemaName(tbl.Schema) if err != nil { return nil, err } if n == schemaName && tbl.Name == tableName { return tbl, nil } } return nil, fmt.Errorf("table %s.%s not found", schemaName, tableName) } // Table converts a sqlspec.Table to a schema.Table. Table conversion is done without converting // ForeignKeySpecs into ForeignKeys, as the target tables do not necessarily exist in the schema // at this point. Instead, the linking is done by the Schema function. func Table(spec *sqlspec.Table, parent *schema.Schema, convertColumn ConvertColumnFunc, convertPK ConvertPrimaryKeyFunc, convertIndex ConvertIndexFunc, convertCheck ConvertCheckFunc) (*schema.Table, error) { tbl := &schema.Table{ Name: spec.Name, Schema: parent, } for _, csp := range spec.Columns { col, err := convertColumn(csp, tbl) if err != nil { return nil, err } tbl.Columns = append(tbl.Columns, col) } if spec.PrimaryKey != nil { pk, err := convertPK(spec.PrimaryKey, tbl) if err != nil { return nil, err } tbl.PrimaryKey = pk } for _, idx := range spec.Indexes { i, err := convertIndex(idx, tbl) if err != nil { return nil, err } tbl.Indexes = append(tbl.Indexes, i) } for _, c := range spec.Checks { c, err := convertCheck(c) if err != nil { return nil, err } tbl.AddChecks(c) } if err := convertCommentFromSpec(spec, &tbl.Attrs); err != nil { return nil, err } return tbl, nil } // Column converts a sqlspec.Column into a schema.Column. func Column(spec *sqlspec.Column, conv ConvertTypeFunc) (*schema.Column, error) { out := &schema.Column{ Name: spec.Name, Type: &schema.ColumnType{ Null: spec.Null, }, } if spec.Default != nil { switch d := spec.Default.(type) { case *schemahcl.LiteralValue: out.Default = &schema.Literal{V: d.V} case *schemahcl.RawExpr: out.Default = &schema.RawExpr{X: d.X} default: return nil, fmt.Errorf("unsupported value type for default: %T", d) } } ct, err := conv(spec) if err != nil { return nil, err } out.Type.Type = ct if err := convertCommentFromSpec(spec, &out.Attrs); err != nil { return nil, err } return out, err } // Index converts a sqlspec.Index to a schema.Index. The optional arguments allow // passing functions for mutating the created index-part (e.g. add attributes). func Index(spec *sqlspec.Index, parent *schema.Table, partFns ...func(*sqlspec.IndexPart, *schema.IndexPart) error) (*schema.Index, error) { parts := make([]*schema.IndexPart, 0, len(spec.Columns)+len(spec.Parts)) switch n, m := len(spec.Columns), len(spec.Parts); { case n == 0 && m == 0: return nil, fmt.Errorf("missing definition for index %q", spec.Name) case n > 0 && m > 0: return nil, fmt.Errorf(`multiple definitions for index %q, use "columns" or "on"`, spec.Name) case n > 0: for i, c := range spec.Columns { c, err := ColumnByRef(parent, c) if err != nil { return nil, err } parts = append(parts, &schema.IndexPart{ SeqNo: i, C: c, }) } case m > 0: for i, p := range spec.Parts { part := &schema.IndexPart{SeqNo: i, Desc: p.Desc} switch { case p.Column == nil && p.Expr == "": return nil, fmt.Errorf(`"column" or "expr" are required for index %q at position %d`, spec.Name, i) case p.Column != nil && p.Expr != "": return nil, fmt.Errorf(`cannot use both "column" and "expr" in index %q at position %d`, spec.Name, i) case p.Expr != "": part.X = &schema.RawExpr{X: p.Expr} case p.Column != nil: c, err := ColumnByRef(parent, p.Column) if err != nil { return nil, err } part.C = c } for _, f := range partFns { if err := f(p, part); err != nil { return nil, err } } parts = append(parts, part) } } i := &schema.Index{ Name: spec.Name, Unique: spec.Unique, Table: parent, Parts: parts, } if err := convertCommentFromSpec(spec, &i.Attrs); err != nil { return nil, err } return i, nil } // Check converts a sqlspec.Check to a schema.Check. func Check(spec *sqlspec.Check) (*schema.Check, error) { return &schema.Check{ Name: spec.Name, Expr: spec.Expr, }, nil } // PrimaryKey converts a sqlspec.PrimaryKey to a schema.Index. func PrimaryKey(spec *sqlspec.PrimaryKey, parent *schema.Table) (*schema.Index, error) { parts := make([]*schema.IndexPart, 0, len(spec.Columns)) for seqno, c := range spec.Columns { c, err := ColumnByRef(parent, c) if err != nil { return nil, nil } parts = append(parts, &schema.IndexPart{ SeqNo: seqno, C: c, }) } return &schema.Index{ Table: parent, Parts: parts, }, nil } // linkForeignKeys creates the foreign keys defined in the Table's spec by creating references // to column in the provided Schema. It is assumed that all tables referenced FK definitions in the spec // are reachable from the provided schema or its connected realm. func linkForeignKeys(tbl *schema.Table, sch *schema.Schema, table *sqlspec.Table) error { for _, spec := range table.ForeignKeys { fk := &schema.ForeignKey{Symbol: spec.Symbol, Table: tbl} if spec.OnUpdate != nil { fk.OnUpdate = schema.ReferenceOption(FromVar(spec.OnUpdate.V)) } if spec.OnDelete != nil { fk.OnDelete = schema.ReferenceOption(FromVar(spec.OnDelete.V)) } if n, m := len(spec.Columns), len(spec.RefColumns); n != m { return fmt.Errorf("sqlspec: number of referencing and referenced columns do not match for foreign-key %q", fk.Symbol) } for _, ref := range spec.Columns { c, err := ColumnByRef(tbl, ref) if err != nil { return err } fk.Columns = append(fk.Columns, c) } for i, ref := range spec.RefColumns { t, c, err := externalRef(ref, sch) if isLocalRef(ref) { t = fk.Table c, err = ColumnByRef(fk.Table, ref) } if err != nil { return err } if i > 0 && fk.RefTable != t { return fmt.Errorf("sqlspec: more than 1 table was referenced for foreign-key %q", fk.Symbol) } fk.RefTable = t fk.RefColumns = append(fk.RefColumns, c) } tbl.ForeignKeys = append(tbl.ForeignKeys, fk) } return nil } // FromSchema converts a schema.Schema into sqlspec.Schema and []sqlspec.Table. func FromSchema(s *schema.Schema, fn TableSpecFunc) (*sqlspec.Schema, []*sqlspec.Table, error) { spec := &sqlspec.Schema{ Name: s.Name, } tables := make([]*sqlspec.Table, 0, len(s.Tables)) for _, t := range s.Tables { table, err := fn(t) if err != nil { return nil, nil, err } if s.Name != "" { table.Schema = SchemaRef(s.Name) } tables = append(tables, table) } return spec, tables, nil } // FromTable converts a schema.Table to a sqlspec.Table. func FromTable(t *schema.Table, colFn ColumnSpecFunc, pkFn PrimaryKeySpecFunc, idxFn IndexSpecFunc, fkFn ForeignKeySpecFunc, ckFn CheckSpecFunc) (*sqlspec.Table, error) { spec := &sqlspec.Table{ Name: t.Name, } for _, c := range t.Columns { col, err := colFn(c, t) if err != nil { return nil, err } spec.Columns = append(spec.Columns, col) } if t.PrimaryKey != nil { pk, err := pkFn(t.PrimaryKey) if err != nil { return nil, err } spec.PrimaryKey = pk } for _, idx := range t.Indexes { i, err := idxFn(idx) if err != nil { return nil, err } spec.Indexes = append(spec.Indexes, i) } for _, fk := range t.ForeignKeys { f, err := fkFn(fk) if err != nil { return nil, err } spec.ForeignKeys = append(spec.ForeignKeys, f) } for _, attr := range t.Attrs { if c, ok := attr.(*schema.Check); ok { spec.Checks = append(spec.Checks, ckFn(c)) } } convertCommentFromSchema(t.Attrs, &spec.Extra.Attrs) return spec, nil } // FromPrimaryKey converts schema.Index to a sqlspec.PrimaryKey. func FromPrimaryKey(s *schema.Index) (*sqlspec.PrimaryKey, error) { c := make([]*schemahcl.Ref, 0, len(s.Parts)) for _, v := range s.Parts { c = append(c, ColumnRef(v.C.Name)) } return &sqlspec.PrimaryKey{ Columns: c, }, nil } // FromColumn converts a *schema.Column into a *sqlspec.Column using the ColumnTypeSpecFunc. func FromColumn(col *schema.Column, columnTypeSpec ColumnTypeSpecFunc) (*sqlspec.Column, error) { ct, err := columnTypeSpec(col.Type.Type) if err != nil { return nil, err } spec := &sqlspec.Column{ Name: col.Name, Type: ct.Type, Null: col.Type.Null, DefaultExtension: schemahcl.DefaultExtension{ Extra: schemahcl.Resource{Attrs: ct.DefaultExtension.Extra.Attrs}, }, } if col.Default != nil { lv, err := toValue(col.Default) if err != nil { return nil, err } spec.Default = lv } convertCommentFromSchema(col.Attrs, &spec.Extra.Attrs) return spec, nil } // FromGenExpr returns the spec for a generated expression. func FromGenExpr(x schema.GeneratedExpr, t func(string) string) *schemahcl.Resource { return &schemahcl.Resource{ Type: "as", Attrs: []*schemahcl.Attr{ StrAttr("expr", x.Expr), VarAttr("type", t(x.Type)), }, } } // ConvertGenExpr converts the "as" attribute or the block under the given resource. func ConvertGenExpr(r *schemahcl.Resource, c *schema.Column, t func(string) string) error { asA, okA := r.Attr("as") asR, okR := r.Resource("as") switch { case okA && okR: return fmt.Errorf("multiple as definitions for column %q", c.Name) case okA: expr, err := asA.String() if err != nil { return err } c.Attrs = append(c.Attrs, &schema.GeneratedExpr{ Type: t(""), // default type. Expr: expr, }) case okR: var spec struct { Expr string `spec:"expr"` Type string `spec:"type"` } if err := asR.As(&spec); err != nil { return err } c.Attrs = append(c.Attrs, &schema.GeneratedExpr{ Expr: spec.Expr, Type: t(spec.Type), }) } return nil } func toValue(expr schema.Expr) (schemahcl.Value, error) { var ( v string err error ) switch expr := expr.(type) { case *schema.RawExpr: return &schemahcl.RawExpr{X: expr.X}, nil case *schema.Literal: v, err = normalizeQuotes(expr.V) if err != nil { return nil, err } return &schemahcl.LiteralValue{V: v}, nil default: return nil, fmt.Errorf("converting expr %T to literal value", expr) } } func normalizeQuotes(s string) (string, error) { if len(s) < 2 { return s, nil } // If string is quoted with single quotes: if strings.HasPrefix(s, `'`) && strings.HasSuffix(s, `'`) { uq := strings.ReplaceAll(s[1:len(s)-1], "''", "'") return strconv.Quote(uq), nil } return s, nil } // FromIndex converts schema.Index to sqlspec.Index. func FromIndex(idx *schema.Index, partFns ...func(*schema.IndexPart, *sqlspec.IndexPart)) (*sqlspec.Index, error) { spec := &sqlspec.Index{Name: idx.Name, Unique: idx.Unique} convertCommentFromSchema(idx.Attrs, &spec.Extra.Attrs) if parts, ok := columnsOnly(idx); ok { spec.Columns = parts return spec, nil } spec.Parts = make([]*sqlspec.IndexPart, len(idx.Parts)) for i, p := range idx.Parts { part := &sqlspec.IndexPart{Desc: p.Desc} switch { case p.C == nil && p.X == nil: return nil, fmt.Errorf("missing column or expression for key part of index %q", idx.Name) case p.C != nil && p.X != nil: return nil, fmt.Errorf("multiple key part definitions for index %q", idx.Name) case p.C != nil: part.Column = ColumnRef(p.C.Name) case p.X != nil: x, ok := p.X.(*schema.RawExpr) if !ok { return nil, fmt.Errorf("unexpected expression %T for index %q", p.X, idx.Name) } part.Expr = x.X } for _, f := range partFns { f(p, part) } spec.Parts[i] = part } return spec, nil } func columnsOnly(idx *schema.Index) ([]*schemahcl.Ref, bool) { parts := make([]*schemahcl.Ref, len(idx.Parts)) for i, p := range idx.Parts { if p.C == nil || p.Desc { return nil, false } parts[i] = ColumnRef(p.C.Name) } return parts, true } // FromForeignKey converts schema.ForeignKey to sqlspec.ForeignKey. func FromForeignKey(s *schema.ForeignKey) (*sqlspec.ForeignKey, error) { c := make([]*schemahcl.Ref, 0, len(s.Columns)) for _, v := range s.Columns { c = append(c, ColumnRef(v.Name)) } r := make([]*schemahcl.Ref, 0, len(s.RefColumns)) for _, v := range s.RefColumns { ref := ColumnRef(v.Name) if s.Table != s.RefTable { ref = externalColRef(v.Name, s.RefTable.Name) } r = append(r, ref) } fk := &sqlspec.ForeignKey{ Symbol: s.Symbol, Columns: c, RefColumns: r, } if s.OnUpdate != "" { fk.OnUpdate = &schemahcl.Ref{V: Var(string(s.OnUpdate))} } if s.OnDelete != "" { fk.OnDelete = &schemahcl.Ref{V: Var(string(s.OnDelete))} } return fk, nil } // FromCheck converts schema.Check to sqlspec.Check. func FromCheck(s *schema.Check) *sqlspec.Check { return &sqlspec.Check{ Name: s.Name, Expr: s.Expr, } } // SchemaName returns the name from a ref to a schema. func SchemaName(ref *schemahcl.Ref) (string, error) { if ref == nil { return "", errors.New("missing 'schema' attribute") } parts := strings.Split(ref.V, ".") if len(parts) < 2 || parts[0] != "$schema" { return "", errors.New("expected ref format of $schema.name") } return parts[1], nil } // ColumnByRef returns a column from the table by its reference. func ColumnByRef(t *schema.Table, ref *schemahcl.Ref) (*schema.Column, error) { s := strings.Split(ref.V, "$column.") if len(s) != 2 { return nil, fmt.Errorf("specutil: failed to extract column name from %q", ref) } c, ok := t.Column(s[1]) if !ok { return nil, fmt.Errorf("specutil: unknown column %q in table %q", s[1], t.Name) } return c, nil } func externalRef(ref *schemahcl.Ref, sch *schema.Schema) (*schema.Table, *schema.Column, error) { tbl, err := findTable(ref, sch) if err != nil { return nil, nil, err } c, err := ColumnByRef(tbl, ref) if err != nil { return nil, nil, err } return tbl, c, nil } // findTable finds the table referenced by ref in the provided schema. If the table // is not in the provided schema.Schema other schemas in the connected schema.Realm // are searched as well. func findTable(ref *schemahcl.Ref, sch *schema.Schema) (*schema.Table, error) { qualifier, tblName, err := tableName(ref) if err != nil { return nil, err } // Search the same schema. if qualifier == "" || qualifier == sch.Name { tbl, ok := sch.Table(tblName) if !ok { return tbl, fmt.Errorf("sqlspec: table %q not found", tblName) } return tbl, nil } if sch.Realm == nil { return nil, fmt.Errorf("sqlspec: table %s.%s not found", qualifier, tblName) } // Search for the table in another schemas in the realm. sch, ok := sch.Realm.Schema(qualifier) if !ok { return nil, fmt.Errorf("sqlspec: schema %q not found", qualifier) } tbl, ok := sch.Table(tblName) if !ok { return tbl, fmt.Errorf("sqlspec: table %q not found", tblName) } return tbl, nil } func tableName(ref *schemahcl.Ref) (qualifier, name string, err error) { s := strings.Split(ref.V, "$column.") if len(s) != 2 { return "", "", fmt.Errorf("sqlspec: failed to split by column name from %q", ref) } table := strings.TrimSuffix(s[0], ".") s = strings.Split(table, ".") switch len(s) { case 2: name = s[1] case 3: qualifier, name = s[1], s[2] default: return "", "", fmt.Errorf("sqlspec: failed to extract table name from %q", s) } return } func isLocalRef(r *schemahcl.Ref) bool { return strings.HasPrefix(r.V, "$column") } // ColumnRef returns the reference of a column by its name. func ColumnRef(cName string) *schemahcl.Ref { return &schemahcl.Ref{V: "$column." + cName} } func externalColRef(cName string, tName string) *schemahcl.Ref { return &schemahcl.Ref{V: "$table." + tName + ".$column." + cName} } func qualifiedExternalColRef(cName, tName, sName string) *schemahcl.Ref { return &schemahcl.Ref{V: "$table." + sName + "." + tName + ".$column." + cName} } // SchemaRef returns the schemahcl.Ref to the schema with the given name. func SchemaRef(n string) *schemahcl.Ref { return &schemahcl.Ref{V: "$schema." + n} } // Attrer is the interface that wraps the Attr method. type Attrer interface { Attr(string) (*schemahcl.Attr, bool) } // convertCommentFromSpec converts a spec comment attribute to a schema element attribute. func convertCommentFromSpec(spec Attrer, attrs *[]schema.Attr) error { if c, ok := spec.Attr("comment"); ok { s, err := c.String() if err != nil { return err } *attrs = append(*attrs, &schema.Comment{Text: s}) } return nil } // convertCommentFromSchema converts a schema element comment attribute to a spec comment attribute. func convertCommentFromSchema(src []schema.Attr, trgt *[]*schemahcl.Attr) { var c schema.Comment if sqlx.Has(src, &c) { *trgt = append(*trgt, StrAttr("comment", c.Text)) } } // ReferenceVars holds the HCL variables // for foreign keys' referential-actions. var ReferenceVars = []string{ Var(string(schema.NoAction)), Var(string(schema.Restrict)), Var(string(schema.Cascade)), Var(string(schema.SetNull)), Var(string(schema.SetDefault)), } // Var formats a string as variable to make it HCL compatible. // The result is simple, replace each space with underscore. func Var(s string) string { return strings.ReplaceAll(s, " ", "_") } // FromVar is the inverse function of Var. func FromVar(s string) string { return strings.ReplaceAll(s, "_", " ") } atlas-0.7.2/sql/internal/specutil/convert_test.go000066400000000000000000000036421431455511600221470ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package specutil import ( "testing" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlspec" "github.com/stretchr/testify/require" ) func TestFromSpec_SchemaName(t *testing.T) { sc := &schema.Schema{ Name: "schema", Tables: []*schema.Table{ {}, }, } sc.Tables[0].Schema = sc s, ta, err := FromSchema(sc, func(table *schema.Table) (*sqlspec.Table, error) { return &sqlspec.Table{}, nil }) require.NoError(t, err) require.Equal(t, sc.Name, s.Name) require.Equal(t, "$schema."+sc.Name, ta[0].Schema.V) } func TestFromForeignKey(t *testing.T) { tbl := &schema.Table{ Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "int", }, }, }, { Name: "parent_id", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "int", }, }, }, }, } fk := &schema.ForeignKey{ Symbol: "fk", Table: tbl, Columns: tbl.Columns[1:], RefTable: tbl, RefColumns: tbl.Columns[:1], OnUpdate: schema.NoAction, OnDelete: schema.Cascade, } key, err := FromForeignKey(fk) require.NoError(t, err) require.EqualValues(t, &sqlspec.ForeignKey{ Symbol: "fk", Columns: []*schemahcl.Ref{ {V: "$column.parent_id"}, }, RefColumns: []*schemahcl.Ref{ {V: "$column.id"}, }, OnUpdate: &schemahcl.Ref{V: "NO_ACTION"}, OnDelete: &schemahcl.Ref{V: "CASCADE"}, }, key) fk.OnDelete = "" fk.OnUpdate = "" key, err = FromForeignKey(fk) require.NoError(t, err) require.EqualValues(t, &sqlspec.ForeignKey{ Symbol: "fk", Columns: []*schemahcl.Ref{ {V: "$column.parent_id"}, }, RefColumns: []*schemahcl.Ref{ {V: "$column.id"}, }, }, key) } atlas-0.7.2/sql/internal/specutil/spec.go000066400000000000000000000140451431455511600203610ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package specutil import ( "fmt" "strconv" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlspec" "github.com/hashicorp/hcl/v2/hclparse" ) // StrAttr is a helper method for constructing *schemahcl.Attr of type string. func StrAttr(k, v string) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: &schemahcl.LiteralValue{V: strconv.Quote(v)}, } } // BoolAttr is a helper method for constructing *schemahcl.Attr of type bool. func BoolAttr(k string, v bool) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: &schemahcl.LiteralValue{V: strconv.FormatBool(v)}, } } // IntAttr is a helper method for constructing *schemahcl.Attr with the numeric value of v. func IntAttr(k string, v int) *schemahcl.Attr { return Int64Attr(k, int64(v)) } // Int64Attr is a helper method for constructing *schemahcl.Attr with the numeric value of v. func Int64Attr(k string, v int64) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: &schemahcl.LiteralValue{V: strconv.FormatInt(v, 10)}, } } // LitAttr is a helper method for constructing *schemahcl.Attr instances that contain literal values. func LitAttr(k, v string) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: &schemahcl.LiteralValue{V: v}, } } // RawAttr is a helper method for constructing *schemahcl.Attr instances that contain sql expressions. func RawAttr(k, v string) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: &schemahcl.RawExpr{X: v}, } } // VarAttr is a helper method for constructing *schemahcl.Attr instances that contain a variable reference. func VarAttr(k, v string) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: &schemahcl.Ref{V: v}, } } // RefAttr is a helper method for constructing *schemahcl.Attr instances that contain a reference. func RefAttr(k string, r *schemahcl.Ref) *schemahcl.Attr { return &schemahcl.Attr{ K: k, V: r, } } // ListAttr is a helper method for constructing *schemahcl.Attr instances that contain list values. func ListAttr(k string, litValues ...string) *schemahcl.Attr { lv := &schemahcl.ListValue{} for _, v := range litValues { lv.V = append(lv.V, &schemahcl.LiteralValue{V: v}) } return &schemahcl.Attr{ K: k, V: lv, } } type doc struct { Tables []*sqlspec.Table `spec:"table"` Schemas []*sqlspec.Schema `spec:"schema"` } // Marshal marshals v into an Atlas DDL document using a schemahcl.Marshaler. Marshal uses the given // schemaSpec function to convert a *schema.Schema into *sqlspec.Schema and []*sqlspec.Table. func Marshal(v any, marshaler schemahcl.Marshaler, schemaSpec func(schem *schema.Schema) (*sqlspec.Schema, []*sqlspec.Table, error)) ([]byte, error) { d := &doc{} switch s := v.(type) { case *schema.Schema: spec, tables, err := schemaSpec(s) if err != nil { return nil, fmt.Errorf("specutil: failed converting schema to spec: %w", err) } d.Tables = tables d.Schemas = []*sqlspec.Schema{spec} case *schema.Realm: for _, s := range s.Schemas { spec, tables, err := schemaSpec(s) if err != nil { return nil, fmt.Errorf("specutil: failed converting schema to spec: %w", err) } d.Tables = append(d.Tables, tables...) d.Schemas = append(d.Schemas, spec) } if err := QualifyDuplicates(d.Tables); err != nil { return nil, err } if err := QualifyReferences(d.Tables, s); err != nil { return nil, err } default: return nil, fmt.Errorf("specutil: failed marshaling spec. %T is not supported", v) } return marshaler.MarshalSpec(d) } // QualifyDuplicates sets the Qualified field equal to the schema name in any tables // with duplicate names in the provided table specs. func QualifyDuplicates(tableSpecs []*sqlspec.Table) error { seen := make(map[string]*sqlspec.Table, len(tableSpecs)) for _, tbl := range tableSpecs { if s, ok := seen[tbl.Name]; ok { schemaName, err := SchemaName(s.Schema) if err != nil { return err } s.Qualifier = schemaName schemaName, err = SchemaName(tbl.Schema) if err != nil { return err } tbl.Qualifier = schemaName } seen[tbl.Name] = tbl } return nil } // QualifyReferences qualifies any reference with qualifier. func QualifyReferences(tableSpecs []*sqlspec.Table, realm *schema.Realm) error { type cref struct{ s, t string } byRef := make(map[cref]*sqlspec.Table) for _, t := range tableSpecs { r := cref{s: t.Qualifier, t: t.Name} if byRef[r] != nil { return fmt.Errorf("duplicate references were found for: %v", r) } byRef[r] = t } for _, t := range tableSpecs { sname, err := SchemaName(t.Schema) if err != nil { return err } s1, ok := realm.Schema(sname) if !ok { return fmt.Errorf("schema %q was not found in realm", sname) } t1, ok := s1.Table(t.Name) if !ok { return fmt.Errorf("table %q.%q was not found in realm", sname, t.Name) } for _, fk := range t.ForeignKeys { fk1, ok := t1.ForeignKey(fk.Symbol) if !ok { return fmt.Errorf("table %q.%q.%q was not found in realm", sname, t.Name, fk.Symbol) } for i, c := range fk.RefColumns { if r, ok := byRef[cref{s: fk1.RefTable.Schema.Name, t: fk1.RefTable.Name}]; ok && r.Qualifier != "" { fk.RefColumns[i] = qualifiedExternalColRef(fk1.RefColumns[i].Name, r.Name, r.Qualifier) } else if r, ok := byRef[cref{t: fk1.RefTable.Name}]; ok && r.Qualifier == "" { fk.RefColumns[i] = externalColRef(fk1.RefColumns[i].Name, r.Name) } else { return fmt.Errorf("missing reference for column %q in %q.%q.%q", c.V, sname, t.Name, fk.Symbol) } } } } return nil } // HCLBytesFunc returns a helper that evaluates an HCL document from a byte slice instead // of from an hclparse.Parser instance. func HCLBytesFunc(ev schemahcl.Evaluator) func(b []byte, v any, inp map[string]string) error { return func(b []byte, v any, inp map[string]string) error { parser := hclparse.NewParser() if _, diag := parser.ParseHCL(b, ""); diag.HasErrors() { return diag } return ev.Eval(parser, v, inp) } } atlas-0.7.2/sql/internal/sqltest/000077500000000000000000000000001431455511600167435ustar00rootroot00000000000000atlas-0.7.2/sql/internal/sqltest/sqltest.go000066400000000000000000000037241431455511600207770ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqltest import ( "database/sql/driver" "regexp" "strings" "unicode" "github.com/DATA-DOG/go-sqlmock" ) // Rows converts MySQL/PostgreSQL table output to sql.Rows. // All row values are parsed as text except the "nil" and NULL keywords. // For example: // // +-------------+-------------+-------------+----------------+ // | column_name | column_type | is_nullable | column_default | // +-------------+-------------+-------------+----------------+ // | c1 | float | YES | nil | // | c2 | int | YES | | // | c3 | double | YES | NULL | // +-------------+-------------+-------------+----------------+ // func Rows(table string) *sqlmock.Rows { var ( nc int rows *sqlmock.Rows lines = strings.Split(table, "\n") ) for i := 0; i < len(lines); i++ { line := strings.TrimFunc(lines[i], unicode.IsSpace) // Skip new lines, header and footer. if line == "" || strings.IndexAny(line, "+-") == 0 { continue } columns := strings.FieldsFunc(line, func(r rune) bool { return r == '|' }) for i, c := range columns { columns[i] = strings.TrimSpace(c) } if rows == nil { nc = len(columns) rows = sqlmock.NewRows(columns) } else { values := make([]driver.Value, nc) for i, c := range columns { switch c { case "", "nil", "NULL": default: values[i] = c } } rows.AddRow(values...) } } return rows } // Escape escapes all regular expression metacharacters in the given query. func Escape(query string) string { rows := strings.Split(query, "\n") for i := range rows { rows[i] = strings.TrimPrefix(rows[i], " ") } query = strings.Join(rows, " ") return strings.TrimSpace(regexp.QuoteMeta(query)) + "$" } atlas-0.7.2/sql/internal/sqlx/000077500000000000000000000000001431455511600162335ustar00rootroot00000000000000atlas-0.7.2/sql/internal/sqlx/dev.go000066400000000000000000000073341431455511600173470ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "context" "fmt" "hash/fnv" "time" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) // DevDriver is a driver that provides additional functionality // to interact with the development database. type DevDriver struct { // A Driver connected to the dev database. migrate.Driver // MaxNameLen configures the max length of object names in // the connected database (e.g. 64 in MySQL). Longer names // are trimmed and suffixed with their hash. MaxNameLen int // DropClause holds optional clauses that // can be added to the DropSchema change. DropClause []schema.Clause // PatchColumn allows providing a custom function to patch // columns that hold a schema reference. PatchColumn func(*schema.Schema, *schema.Column) } // NormalizeRealm implements the schema.Normalizer interface. // // The implementation converts schema objects in "natural form" (e.g. HCL or DSL) // to their "normal presentation" in the database, by creating them temporarily in // a "dev database", and then inspects them from there. func (d *DevDriver) NormalizeRealm(ctx context.Context, r *schema.Realm) (nr *schema.Realm, err error) { var ( names = make(map[string]string) changes = make([]schema.Change, 0, len(r.Schemas)) reverse = make([]schema.Change, 0, len(r.Schemas)) opts = &schema.InspectRealmOption{ Schemas: make([]string, 0, len(r.Schemas)), } ) for _, s := range r.Schemas { if s.Realm != r { s.Realm = r } dev := d.formatName(s.Name) names[dev] = s.Name s.Name = dev opts.Schemas = append(opts.Schemas, s.Name) // Skip adding the schema.IfNotExists clause // to fail if the schema exists. st := schema.New(dev).AddAttrs(s.Attrs...) changes = append(changes, &schema.AddSchema{S: st}) reverse = append(reverse, &schema.DropSchema{S: st, Extra: append(d.DropClause, &schema.IfExists{})}) for _, t := range s.Tables { // If objects are not strongly connected. if t.Schema != s { t.Schema = s } for _, c := range t.Columns { if e, ok := c.Type.Type.(*schema.EnumType); ok && e.Schema != s { e.Schema = s } if d.PatchColumn != nil { d.PatchColumn(s, c) } } changes = append(changes, &schema.AddTable{T: t}) } } patch := func(r *schema.Realm) { for _, s := range r.Schemas { s.Name = names[s.Name] } } // Delete the dev resources, and return // the source realm to its initial state. defer func() { patch(r) if rerr := d.ApplyChanges(ctx, reverse); rerr != nil { if err != nil { rerr = fmt.Errorf("%w: %v", err, rerr) } err = rerr } }() if err := d.ApplyChanges(ctx, changes); err != nil { return nil, err } if nr, err = d.InspectRealm(ctx, opts); err != nil { return nil, err } patch(nr) return nr, nil } // NormalizeSchema returns the normal representation of the given database. See NormalizeRealm for more info. func (d *DevDriver) NormalizeSchema(ctx context.Context, s *schema.Schema) (*schema.Schema, error) { r := &schema.Realm{} if s.Realm != nil { r.Attrs = s.Realm.Attrs } r.Schemas = append(r.Schemas, s) nr, err := d.NormalizeRealm(ctx, r) if err != nil { return nil, err } ns, ok := nr.Schema(s.Name) if !ok { return nil, fmt.Errorf("missing normalized schema %q", s.Name) } return ns, nil } func (d *DevDriver) formatName(name string) string { dev := fmt.Sprintf("atlas_dev_%s_%d", name, time.Now().Unix()) if d.MaxNameLen == 0 || len(dev) <= d.MaxNameLen { return dev } h := fnv.New128() h.Write([]byte(dev)) return fmt.Sprintf("%s_%x", dev[:d.MaxNameLen-1-h.Size()*2], h.Sum(nil)) } atlas-0.7.2/sql/internal/sqlx/dev_test.go000066400000000000000000000032161431455511600204010ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "context" "strings" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestDriver_NormalizeRealm(t *testing.T) { var ( drv = &mockDriver{ realm: schema.NewRealm(schema.New("test").SetCharset("utf8mb4")), } dev = &DevDriver{ Driver: drv, MaxNameLen: 64, } ) normal, err := dev.NormalizeRealm(context.Background(), schema.NewRealm(schema.New("test"))) require.NoError(t, err) require.Equal(t, normal, drv.realm) require.Len(t, drv.schemas, 1) require.True(t, strings.HasPrefix(drv.schemas[0], "atlas_dev_test_")) require.Len(t, drv.changes, 2, "expect 2 calls (create and drop)") require.Len(t, drv.changes[0], 1) require.Equal(t, &schema.AddSchema{S: schema.New(drv.schemas[0])}, drv.changes[0][0]) require.Len(t, drv.changes[1], 1) require.Equal(t, &schema.DropSchema{S: schema.New(drv.schemas[0]), Extra: []schema.Clause{&schema.IfExists{}}}, drv.changes[1][0]) } type mockDriver struct { migrate.Driver // Inspect. schemas []string realm *schema.Realm // Apply. changes [][]schema.Change } func (m *mockDriver) InspectRealm(_ context.Context, opts *schema.InspectRealmOption) (*schema.Realm, error) { m.schemas = append(m.schemas, opts.Schemas...) return m.realm, nil } func (m *mockDriver) ApplyChanges(_ context.Context, changes []schema.Change, _ ...migrate.PlanOption) error { m.changes = append(m.changes, changes) return nil } atlas-0.7.2/sql/internal/sqlx/diff.go000066400000000000000000000365711431455511600175060ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "fmt" "reflect" "sort" "strconv" "strings" "ariga.io/atlas/sql/schema" ) type ( // A Diff provides a generic schema.Differ for diffing schema elements. // // The DiffDriver is required for supporting database/dialect specific // diff capabilities, like diffing custom types or attributes. Diff struct { DiffDriver } // A DiffDriver wraps all required methods for diffing elements that may // have database-specific diff logic. See sql/schema/mysql/diff.go for an // implementation example. DiffDriver interface { // SchemaAttrDiff returns a changeset for migrating schema attributes // from one state to the other. For example, changing schema collation. SchemaAttrDiff(from, to *schema.Schema) []schema.Change // TableAttrDiff returns a changeset for migrating table attributes from // one state to the other. For example, dropping or adding a `CHECK` constraint. TableAttrDiff(from, to *schema.Table) ([]schema.Change, error) // ColumnChange returns the schema changes (if any) for migrating one column to the other. ColumnChange(fromT *schema.Table, from, to *schema.Column) (schema.ChangeKind, error) // IndexAttrChanged reports if the index attributes were changed. // For example, an index type or predicate (for partial indexes). IndexAttrChanged(from, to []schema.Attr) bool // IndexPartAttrChanged reports if the index-part attributes were // changed. For example, an index-part collation. IndexPartAttrChanged(from, to *schema.IndexPart) bool // IsGeneratedIndexName reports if the index name was generated by the database // for unnamed INDEX or UNIQUE constraints. In such cases, the differ will look // for unnamed schema.Indexes on the desired state, before tagging the index as // a candidate for deletion. IsGeneratedIndexName(*schema.Table, *schema.Index) bool // ReferenceChanged reports if the foreign key referential action was // changed. For example, action was changed from RESTRICT to CASCADE. ReferenceChanged(from, to schema.ReferenceOption) bool } // A Normalizer wraps the Normalize method for normalizing the from and to tables before // running diffing. The "from" usually represents the inspected database state (current), // and the second represents the desired state. // // If the DiffDriver implements the Normalizer interface, TableDiff normalizes its table // inputs before starting the diff process. Normalizer interface { Normalize(from, to *schema.Table) error } ) // RealmDiff implements the schema.Differ for Realm objects and returns a list of changes // that need to be applied in order to move a database from the current state to the desired. func (d *Diff) RealmDiff(from, to *schema.Realm) ([]schema.Change, error) { var changes []schema.Change // Drop or modify schema. for _, s1 := range from.Schemas { s2, ok := to.Schema(s1.Name) if !ok { changes = append(changes, &schema.DropSchema{S: s1}) continue } change, err := d.SchemaDiff(s1, s2) if err != nil { return nil, err } changes = append(changes, change...) } // Add schemas. for _, s1 := range to.Schemas { if _, ok := from.Schema(s1.Name); ok { continue } changes = append(changes, &schema.AddSchema{S: s1}) for _, t := range s1.Tables { changes = append(changes, &schema.AddTable{T: t}) } } return changes, nil } // SchemaDiff implements the schema.Differ interface and returns a list of // changes that need to be applied in order to move from one state to the other. func (d *Diff) SchemaDiff(from, to *schema.Schema) ([]schema.Change, error) { if from.Name != to.Name { return nil, fmt.Errorf("mismatched schema names: %q != %q", from.Name, to.Name) } var changes []schema.Change // Drop or modify attributes (collations, charset, etc). if change := d.SchemaAttrDiff(from, to); len(change) > 0 { changes = append(changes, &schema.ModifySchema{ S: to, Changes: change, }) } // Drop or modify tables. for _, t1 := range from.Tables { t2, ok := to.Table(t1.Name) if !ok { changes = append(changes, &schema.DropTable{T: t1}) continue } change, err := d.TableDiff(t1, t2) if err != nil { return nil, err } if len(change) > 0 { changes = append(changes, &schema.ModifyTable{ T: t2, Changes: change, }) } } // Add tables. for _, t1 := range to.Tables { if _, ok := from.Table(t1.Name); !ok { changes = append(changes, &schema.AddTable{T: t1}) } } return changes, nil } // TableDiff implements the schema.TableDiffer interface and returns a list of // changes that need to be applied in order to move from one state to the other. func (d *Diff) TableDiff(from, to *schema.Table) ([]schema.Change, error) { if from.Name != to.Name { return nil, fmt.Errorf("mismatched table names: %q != %q", from.Name, to.Name) } // Normalizing tables before starting the diff process. if n, ok := d.DiffDriver.(Normalizer); ok { if err := n.Normalize(from, to); err != nil { return nil, err } } var changes []schema.Change if from.Name != to.Name { return nil, fmt.Errorf("mismatched table names: %q != %q", from.Name, to.Name) } // PK modification is not supported. if pk1, pk2 := from.PrimaryKey, to.PrimaryKey; (pk1 != nil) != (pk2 != nil) || (pk1 != nil) && d.pkChange(pk1, pk2) != schema.NoChange { return nil, fmt.Errorf("changing %q table primary key is not supported", to.Name) } // Drop or modify attributes (collations, checks, etc). change, err := d.TableAttrDiff(from, to) if err != nil { return nil, err } changes = append(changes, change...) // Drop or modify columns. for _, c1 := range from.Columns { c2, ok := to.Column(c1.Name) if !ok { changes = append(changes, &schema.DropColumn{C: c1}) continue } change, err := d.ColumnChange(from, c1, c2) if err != nil { return nil, err } if change != schema.NoChange { changes = append(changes, &schema.ModifyColumn{ From: c1, To: c2, Change: change, }) } } // Add columns. for _, c1 := range to.Columns { if _, ok := from.Column(c1.Name); !ok { changes = append(changes, &schema.AddColumn{C: c1}) } } // Index changes. changes = append(changes, d.indexDiff(from, to)...) // Drop or modify foreign-keys. for _, fk1 := range from.ForeignKeys { fk2, ok := to.ForeignKey(fk1.Symbol) if !ok { changes = append(changes, &schema.DropForeignKey{F: fk1}) continue } if change := d.fkChange(fk1, fk2); change != schema.NoChange { changes = append(changes, &schema.ModifyForeignKey{ From: fk1, To: fk2, Change: change, }) } } // Add foreign-keys. for _, fk1 := range to.ForeignKeys { if _, ok := from.ForeignKey(fk1.Symbol); !ok { changes = append(changes, &schema.AddForeignKey{F: fk1}) } } return changes, nil } // indexDiff returns the schema changes (if any) for migrating table // indexes from current state to the desired state. func (d *Diff) indexDiff(from, to *schema.Table) []schema.Change { var ( changes []schema.Change exists = make(map[*schema.Index]bool) ) // Drop or modify indexes. for _, idx1 := range from.Indexes { idx2, ok := to.Index(idx1.Name) // Found directly. if ok { if change := d.indexChange(idx1, idx2); change != schema.NoChange { changes = append(changes, &schema.ModifyIndex{ From: idx1, To: idx2, Change: change, }) } exists[idx2] = true continue } // Found indirectly. if d.IsGeneratedIndexName(from, idx1) { if idx2, ok := d.similarUnnamedIndex(to, idx1); ok { exists[idx2] = true continue } } // Not found. changes = append(changes, &schema.DropIndex{I: idx1}) } // Add indexes. for _, idx := range to.Indexes { if exists[idx] { continue } if _, ok := from.Index(idx.Name); !ok { changes = append(changes, &schema.AddIndex{I: idx}) } } return changes } // pkChange returns the schema changes (if any) for migrating one primary key to the other. func (d *Diff) pkChange(from, to *schema.Index) schema.ChangeKind { change := d.indexChange(from, to) return change & ^schema.ChangeUnique } // indexChange returns the schema changes (if any) for migrating one index to the other. func (d *Diff) indexChange(from, to *schema.Index) schema.ChangeKind { var change schema.ChangeKind if from.Unique != to.Unique { change |= schema.ChangeUnique } if d.IndexAttrChanged(from.Attrs, to.Attrs) { change |= schema.ChangeAttr } change |= d.partsChange(from.Parts, to.Parts) change |= CommentChange(from.Attrs, to.Attrs) return change } func (d *Diff) partsChange(from, to []*schema.IndexPart) schema.ChangeKind { if len(from) != len(to) { return schema.ChangeParts } sort.Slice(to, func(i, j int) bool { return to[i].SeqNo < to[j].SeqNo }) sort.Slice(from, func(i, j int) bool { return from[i].SeqNo < from[j].SeqNo }) for i := range from { switch { case from[i].Desc != to[i].Desc || d.IndexPartAttrChanged(from[i], to[i]): return schema.ChangeParts case from[i].C != nil && to[i].C != nil: if from[i].C.Name != to[i].C.Name { return schema.ChangeParts } case from[i].X != nil && to[i].X != nil: x1, x2 := from[i].X.(*schema.RawExpr).X, to[i].X.(*schema.RawExpr).X if x1 != x2 && x1 != MayWrap(x2) { return schema.ChangeParts } default: // (C1 != nil) != (C2 != nil) || (X1 != nil) != (X2 != nil). return schema.ChangeParts } } return schema.NoChange } // fkChange returns the schema changes (if any) for migrating one index to the other. func (d *Diff) fkChange(from, to *schema.ForeignKey) schema.ChangeKind { var change schema.ChangeKind switch { case from.Table.Name != to.Table.Name: change |= schema.ChangeRefTable | schema.ChangeRefColumn case len(from.RefColumns) != len(to.RefColumns): change |= schema.ChangeRefColumn default: for i := range from.RefColumns { if from.RefColumns[i].Name != to.RefColumns[i].Name { change |= schema.ChangeRefColumn } } } switch { case len(from.Columns) != len(to.Columns): change |= schema.ChangeColumn default: for i := range from.Columns { if from.Columns[i].Name != to.Columns[i].Name { change |= schema.ChangeColumn } } } if d.ReferenceChanged(from.OnUpdate, to.OnUpdate) { change |= schema.ChangeUpdateAction } if d.ReferenceChanged(from.OnDelete, to.OnDelete) { change |= schema.ChangeDeleteAction } return change } // similarUnnamedIndex searches for an unnamed index with the same index-parts in the table. func (d *Diff) similarUnnamedIndex(t *schema.Table, idx1 *schema.Index) (*schema.Index, bool) { for _, idx2 := range t.Indexes { if idx2.Name != "" || len(idx2.Parts) != len(idx1.Parts) || idx2.Unique != idx1.Unique { continue } if d.partsChange(idx1.Parts, idx2.Parts) == schema.NoChange { return idx2, true } } return nil, false } // CommentChange reports if the element comment was changed. func CommentChange(from, to []schema.Attr) schema.ChangeKind { var c1, c2 schema.Comment if Has(from, &c1) != Has(to, &c2) || c1.Text != c2.Text { return schema.ChangeComment } return schema.NoChange } var ( attrsType = reflect.TypeOf(([]schema.Attr)(nil)) clausesType = reflect.TypeOf(([]schema.Clause)(nil)) exprsType = reflect.TypeOf(([]schema.Expr)(nil)) ) // Has finds the first element in the elements list that // matches target, and if so, sets target to that attribute // value and returns true. func Has(elements, target any) bool { ev := reflect.ValueOf(elements) if t := ev.Type(); t != attrsType && t != clausesType && t != exprsType { panic(fmt.Sprintf("unexpected elements type: %T", elements)) } tv := reflect.ValueOf(target) if tv.Kind() != reflect.Ptr || tv.IsNil() { panic("target must be a non-nil pointer") } for i := 0; i < ev.Len(); i++ { idx := ev.Index(i) if idx.IsNil() { continue } if e := idx.Elem(); e.Type().AssignableTo(tv.Type()) { tv.Elem().Set(e.Elem()) return true } } return false } // UnsupportedTypeError describes an unsupported type error. type UnsupportedTypeError struct { schema.Type } func (e UnsupportedTypeError) Error() string { return fmt.Sprintf("unsupported type %T", e.Type) } // CommentDiff computes the comment diff between the 2 attribute list. // Note that, the implementation relies on the fact that both PostgreSQL // and MySQL treat empty comment as "no comment" and a way to clear comments. func CommentDiff(from, to []schema.Attr) schema.Change { var fromC, toC schema.Comment switch fromHas, toHas := Has(from, &fromC), Has(to, &toC); { case !fromHas && !toHas: case !fromHas && toC.Text != "": return &schema.AddAttr{ A: &toC, } case !toHas: // In MySQL, there is no way to DROP a comment. Instead, setting it to empty ('') // will remove it from INFORMATION_SCHEMA. We use the same approach in PostgreSQL, // because comments can be dropped either by setting them to NULL or empty string. // See: postgres/backend/commands/comment.c#CreateComments. return &schema.ModifyAttr{ From: &fromC, To: &toC, } default: v1, err1 := Unquote(fromC.Text) v2, err2 := Unquote(toC.Text) if err1 == nil && err2 == nil && v1 != v2 { return &schema.ModifyAttr{ From: &fromC, To: &toC, } } } return nil } // CheckDiff computes the change diff between the 2 tables. A compare // function is provided to check if a Check object was modified. func CheckDiff(from, to *schema.Table, compare ...func(c1, c2 *schema.Check) bool) []schema.Change { var changes []schema.Change // Drop or modify checks. for _, c1 := range checks(from.Attrs) { switch c2, ok := similarCheck(to.Attrs, c1); { case !ok: changes = append(changes, &schema.DropCheck{ C: c1, }) case len(compare) == 1 && !compare[0](c1, c2): changes = append(changes, &schema.ModifyCheck{ From: c1, To: c2, }) } } // Add checks. for _, c1 := range checks(to.Attrs) { if _, ok := similarCheck(from.Attrs, c1); !ok { changes = append(changes, &schema.AddCheck{ C: c1, }) } } return changes } // checks extracts all constraints from table attributes. func checks(attr []schema.Attr) (checks []*schema.Check) { for i := range attr { if c, ok := attr[i].(*schema.Check); ok { checks = append(checks, c) } } return checks } // similarCheck returns a CHECK by its constraints name or expression. func similarCheck(attrs []schema.Attr, c *schema.Check) (*schema.Check, bool) { var byName, byExpr *schema.Check for i := 0; i < len(attrs) && (byName == nil || byExpr == nil); i++ { check, ok := attrs[i].(*schema.Check) if !ok { continue } if check.Name != "" && check.Name == c.Name { byName = check } if check.Expr == c.Expr { byExpr = check } } // Give precedence to constraint name. if byName != nil { return byName, true } if byExpr != nil { return byExpr, true } return nil, false } // Unquote single or double quotes. func Unquote(s string) (string, error) { switch { case IsQuoted(s, '"'): return strconv.Unquote(s) case IsQuoted(s, '\''): return strings.ReplaceAll(s[1:len(s)-1], "''", "'"), nil default: return s, nil } } // SingleQuote quotes the given string with single quote. func SingleQuote(s string) (string, error) { switch { case IsQuoted(s, '\''): return s, nil case IsQuoted(s, '"'): v, err := strconv.Unquote(s) if err != nil { return "", err } s = v fallthrough default: return "'" + strings.ReplaceAll(s, "'", "''") + "'", nil } } atlas-0.7.2/sql/internal/sqlx/exclude.go000066400000000000000000000074011431455511600202150ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "encoding/csv" "fmt" "path/filepath" "strings" "ariga.io/atlas/sql/schema" ) // ExcludeRealm filters resources in the realm based on the given patterns. func ExcludeRealm(r *schema.Realm, patterns []string) (*schema.Realm, error) { if len(patterns) == 0 { return r, nil } var schemas []*schema.Schema globs, err := split(patterns) if err != nil { return nil, err } Filter: for _, s := range r.Schemas { for i, g := range globs { if len(g) > 3 { return nil, fmt.Errorf("too many parts in pattern: %q", patterns[i]) } match, err := filepath.Match(g[0], s.Name) if err != nil { return nil, err } if match { // In case there is a match, and it is // a single glob we exclude this schema. if len(g) == 1 { continue Filter } if err := excludeS(s, g[1:]); err != nil { return nil, err } } } schemas = append(schemas, s) } r.Schemas = schemas return r, nil } // ExcludeSchema filters resources in the schema based on the given patterns. func ExcludeSchema(s *schema.Schema, patterns []string) (*schema.Schema, error) { if len(patterns) == 0 { return s, nil } if s.Realm == nil { return nil, fmt.Errorf("missing realm for schema %q", s.Name) } for i, p := range patterns { patterns[i] = fmt.Sprintf("%s.%s", s.Name, p) } if _, err := ExcludeRealm(s.Realm, patterns); err != nil { return nil, err } return s, nil } // split parses the list of patterns into chain of resource-globs. // For example, 's*.t.*' is split to ['s*', 't', *]. func split(patterns []string) ([][]string, error) { globs := make([][]string, len(patterns)) for i, p := range patterns { r := csv.NewReader(strings.NewReader(p)) r.Comma = '.' switch parts, err := r.ReadAll(); { case err != nil: return nil, err case len(parts) != 1: return nil, fmt.Errorf("unexpected pattern: %q", p) case len(parts[0]) == 0: return nil, fmt.Errorf("empty pattern: %q", p) default: globs[i] = parts[0] } } return globs, nil } func excludeS(s *schema.Schema, glob []string) error { var tables []*schema.Table for _, t := range s.Tables { match, err := filepath.Match(glob[0], t.Name) if err != nil { return err } if match { // In case there is a match, and it is // a single glob we exclude this table. if len(glob) == 1 { continue } if err := excludeT(t, glob[1]); err != nil { return err } } // No match or glob has more than one pattern. tables = append(tables, t) } s.Tables = tables return nil } func excludeT(t *schema.Table, pattern string) (err error) { ex := make(map[*schema.Index]struct{}) ef := make(map[*schema.ForeignKey]struct{}) t.Columns, err = filter(t.Columns, func(c *schema.Column) (bool, error) { match, err := filepath.Match(pattern, c.Name) if !match || err != nil { return false, err } for _, idx := range c.Indexes { ex[idx] = struct{}{} } for _, fk := range c.ForeignKeys { ef[fk] = struct{}{} } return true, nil }) t.Indexes, err = filter(t.Indexes, func(idx *schema.Index) (bool, error) { if _, ok := ex[idx]; ok { return true, nil } return filepath.Match(pattern, idx.Name) }) t.ForeignKeys, err = filter(t.ForeignKeys, func(fk *schema.ForeignKey) (bool, error) { if _, ok := ef[fk]; ok { return true, nil } return filepath.Match(pattern, fk.Symbol) }) return } func filter[T any](s []T, f func(T) (bool, error)) ([]T, error) { r := make([]T, 0, len(s)) for i := range s { match, err := f(s[i]) if err != nil { return nil, err } if !match { r = append(r, s[i]) } } return r, nil } atlas-0.7.2/sql/internal/sqlx/exclude_test.go000066400000000000000000000144571431455511600212650ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "testing" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestExcludeRealm_Schemas(t *testing.T) { r := schema.NewRealm(schema.New("s1"), schema.New("s2"), schema.New("s3")) r, err := ExcludeRealm(r, []string{"s4"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) r, err = ExcludeRealm(r, []string{"s1", "s2.t2", "s3.t3.c3"}) require.NoError(t, err) require.Len(t, r.Schemas, 2) require.Equal(t, "s2", r.Schemas[0].Name) require.Equal(t, "s3", r.Schemas[1].Name) r, err = ExcludeRealm(r, []string{"*"}) require.NoError(t, err) require.Empty(t, r.Schemas) r = schema.NewRealm(schema.New("s1"), schema.New("s2"), schema.New("s3")) r, err = ExcludeRealm(r, []string{"s*.*", "s*.*.*"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) r, err = ExcludeRealm(r, []string{"s*"}) require.NoError(t, err) require.Empty(t, r.Schemas) } func TestExcludeRealm_Tables(t *testing.T) { r := schema.NewRealm( schema.New("s0"), schema.New("s1").AddTables( schema.NewTable("t1"), ), schema.New("s2").AddTables( schema.NewTable("t1"), schema.NewTable("t2"), ), schema.New("s3").AddTables( schema.NewTable("t1"), schema.NewTable("t2"), schema.NewTable("t3"), ), ) r, err := ExcludeRealm(r, []string{"s4"}) require.NoError(t, err) require.Len(t, r.Schemas, 4) r, err = ExcludeRealm(r, []string{"s0"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Equal(t, "s1", r.Schemas[0].Name) require.Equal(t, "s2", r.Schemas[1].Name) require.Equal(t, "s3", r.Schemas[2].Name) r, err = ExcludeRealm(r, []string{"*.t1.*"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Len(t, r.Schemas[0].Tables, 1) require.Len(t, r.Schemas[1].Tables, 2) require.Len(t, r.Schemas[2].Tables, 3) r, err = ExcludeRealm(r, []string{"*.t1"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Empty(t, r.Schemas[0].Tables) require.Len(t, r.Schemas[1].Tables, 1) require.Len(t, r.Schemas[2].Tables, 2) r, err = ExcludeRealm(r, []string{"s[12].t2"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Empty(t, r.Schemas[0].Tables) require.Empty(t, r.Schemas[1].Tables) require.Len(t, r.Schemas[2].Tables, 2) r, err = ExcludeRealm(r, []string{"*.t[23].*"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Len(t, r.Schemas[2].Tables, 2) r, err = ExcludeRealm(r, []string{"*.t[23]"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Empty(t, r.Schemas[2].Tables) } func TestExcludeRealm_Columns(t *testing.T) { r := schema.NewRealm( schema.New("s1").AddTables( func() *schema.Table { t := schema.NewTable("t1").AddColumns(schema.NewColumn("c1")) t.AddIndexes(schema.NewIndex("i1").AddColumns(t.Columns[0])) return t }(), ), schema.New("s2").AddTables( func() *schema.Table { t := schema.NewTable("t1").AddColumns(schema.NewColumn("c1")) t.AddIndexes(schema.NewIndex("i1").AddColumns(t.Columns[0])) return t }(), func() *schema.Table { t := schema.NewTable("t2").AddColumns(schema.NewColumn("c1"), schema.NewColumn("c2")) t.AddIndexes(schema.NewIndex("i1").AddColumns(t.Columns[0]), schema.NewIndex("i2").AddColumns(t.Columns[1])) return t }(), ), schema.New("s3").AddTables( func() *schema.Table { t := schema.NewTable("t1").AddColumns(schema.NewColumn("c1")) t.AddIndexes(schema.NewIndex("i1").AddColumns(t.Columns[0])) return t }(), func() *schema.Table { t := schema.NewTable("t2").AddColumns(schema.NewColumn("c1"), schema.NewColumn("c2")) t.AddIndexes(schema.NewIndex("i1").AddColumns(t.Columns[0]), schema.NewIndex("i2").AddColumns(t.Columns[1])) return t }(), func() *schema.Table { t := schema.NewTable("t3").AddColumns(schema.NewColumn("c1"), schema.NewColumn("c2"), schema.NewColumn("c3")) t.AddIndexes(schema.NewIndex("i1").AddColumns(t.Columns[0]), schema.NewIndex("i2").AddColumns(t.Columns[1])) return t }(), ), ) r, err := ExcludeRealm(r, []string{"s[23].t[23].c1"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Len(t, r.Schemas[0].Tables, 1) require.Len(t, r.Schemas[0].Tables[0].Columns, 1) require.Len(t, r.Schemas[0].Tables[0].Indexes, 1) require.Len(t, r.Schemas[1].Tables, 2) require.Len(t, r.Schemas[1].Tables[0].Columns, 1) require.Len(t, r.Schemas[1].Tables[0].Indexes, 1) require.Len(t, r.Schemas[1].Tables[1].Columns, 1) require.Len(t, r.Schemas[1].Tables[1].Indexes, 1) require.Len(t, r.Schemas[2].Tables, 3) require.Len(t, r.Schemas[2].Tables[0].Columns, 1) require.Len(t, r.Schemas[2].Tables[0].Indexes, 1) require.Len(t, r.Schemas[2].Tables[1].Columns, 1) require.Len(t, r.Schemas[2].Tables[1].Indexes, 1) require.Len(t, r.Schemas[2].Tables[2].Columns, 2) require.Len(t, r.Schemas[2].Tables[2].Indexes, 1) r, err = ExcludeRealm(r, []string{"s[23].t*.*"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Len(t, r.Schemas[0].Tables, 1) require.Len(t, r.Schemas[0].Tables[0].Columns, 1) require.Len(t, r.Schemas[0].Tables[0].Indexes, 1) require.Len(t, r.Schemas[1].Tables, 2) require.Empty(t, r.Schemas[1].Tables[0].Columns) require.Empty(t, r.Schemas[1].Tables[0].Indexes) require.Empty(t, r.Schemas[1].Tables[1].Columns) require.Empty(t, r.Schemas[1].Tables[1].Indexes) require.Len(t, r.Schemas[2].Tables, 3) require.Empty(t, r.Schemas[2].Tables[0].Columns) require.Empty(t, r.Schemas[2].Tables[0].Indexes) require.Empty(t, r.Schemas[2].Tables[1].Columns) require.Empty(t, r.Schemas[2].Tables[1].Indexes) require.Empty(t, r.Schemas[2].Tables[2].Columns) require.Empty(t, r.Schemas[2].Tables[2].Indexes) r, err = ExcludeRealm(r, []string{"*.*.*"}) require.NoError(t, err) require.Len(t, r.Schemas, 3) require.Len(t, r.Schemas[0].Tables, 1) require.Empty(t, r.Schemas[0].Tables[0].Columns) require.Empty(t, r.Schemas[0].Tables[0].Indexes) } func TestExcludeSchema(t *testing.T) { r := schema.NewRealm( schema.New("s1").AddTables( schema.NewTable("t1"), schema.NewTable("t2"), ), ) _, err := ExcludeSchema(r.Schemas[0], []string{"t2"}) require.NoError(t, err) require.Len(t, r.Schemas, 1) require.Len(t, r.Schemas[0].Tables, 1) } atlas-0.7.2/sql/internal/sqlx/plan.go000066400000000000000000000170131431455511600175160ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "context" "database/sql" "errors" "fmt" "sort" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) type execPlanner interface { ExecContext(context.Context, string, ...any) (sql.Result, error) PlanChanges(context.Context, string, []schema.Change, ...migrate.PlanOption) (*migrate.Plan, error) } // ApplyChanges is a helper used by the different drivers to apply changes. func ApplyChanges(ctx context.Context, changes []schema.Change, p execPlanner, opts ...migrate.PlanOption) error { plan, err := p.PlanChanges(ctx, "apply", changes, opts...) if err != nil { return err } for _, c := range plan.Changes { if _, err := p.ExecContext(ctx, c.Cmd, c.Args...); err != nil { if c.Comment != "" { err = fmt.Errorf("%s: %w", c.Comment, err) } return err } } return nil } // DetachCycles takes a list of schema changes, and detaches // references between changes if there is at least one circular // reference in the changeset. More explicitly, it postpones fks // creation, or deletes fks before deletes their tables. func DetachCycles(changes []schema.Change) ([]schema.Change, error) { sorted, err := sortMap(changes) if err == errCycle { return detachReferences(changes), nil } if err != nil { return nil, err } planned := make([]schema.Change, len(changes)) copy(planned, changes) sort.Slice(planned, func(i, j int) bool { return sorted[table(planned[i])] < sorted[table(planned[j])] }) return planned, nil } // detachReferences detaches all table references. func detachReferences(changes []schema.Change) []schema.Change { var planned, deferred []schema.Change for _, change := range changes { switch change := change.(type) { case *schema.AddTable: var ( ext []schema.Change self []*schema.ForeignKey ) for _, fk := range change.T.ForeignKeys { if fk.RefTable == change.T { self = append(self, fk) } else { ext = append(ext, &schema.AddForeignKey{F: fk}) } } if len(ext) > 0 { deferred = append(deferred, &schema.ModifyTable{T: change.T, Changes: ext}) t := *change.T t.ForeignKeys = self change = &schema.AddTable{T: &t, Extra: change.Extra} } planned = append(planned, change) case *schema.DropTable: var fks []schema.Change for _, fk := range change.T.ForeignKeys { if fk.RefTable != change.T { fks = append(fks, &schema.DropForeignKey{F: fk}) } } if len(fks) > 0 { planned = append(planned, &schema.ModifyTable{T: change.T, Changes: fks}) t := *change.T t.ForeignKeys = nil change = &schema.DropTable{T: &t, Extra: change.Extra} } deferred = append(deferred, change) case *schema.ModifyTable: var fks, rest []schema.Change for _, c := range change.Changes { switch c := c.(type) { case *schema.AddForeignKey: fks = append(fks, c) default: rest = append(rest, c) } } if len(fks) > 0 { deferred = append(deferred, &schema.ModifyTable{T: change.T, Changes: fks}) } if len(rest) > 0 { planned = append(planned, &schema.ModifyTable{T: change.T, Changes: rest}) } default: planned = append(planned, change) } } return append(planned, deferred...) } // errCycle is an internal error to indicate a case of a cycle. var errCycle = errors.New("cycle detected") // sortMap returns an index-map indicates the position of table in a topological // sort in reversed order based on its references, and a boolean indicate if there // is a non-self loop. func sortMap(changes []schema.Change) (map[string]int, error) { var ( visit func(string) bool sorted = make(map[string]int) progress = make(map[string]bool) deps, err = dependencies(changes) ) if err != nil { return nil, err } visit = func(name string) bool { if _, done := sorted[name]; done { return false } if progress[name] { return true } progress[name] = true for _, ref := range deps[name] { if visit(ref.Name) { return true } } delete(progress, name) sorted[name] = len(sorted) return false } for node := range deps { if visit(node) { return nil, errCycle } } return sorted, nil } // dependencies returned an adjacency list of all tables and the table they depend on func dependencies(changes []schema.Change) (map[string][]*schema.Table, error) { deps := make(map[string][]*schema.Table) for _, change := range changes { switch change := change.(type) { case *schema.AddTable: for _, fk := range change.T.ForeignKeys { if err := checkFK(fk); err != nil { return nil, err } if fk.RefTable != change.T { deps[change.T.Name] = append(deps[change.T.Name], fk.RefTable) } } case *schema.DropTable: for _, fk := range change.T.ForeignKeys { if err := checkFK(fk); err != nil { return nil, err } if isDropped(changes, fk.RefTable) { deps[fk.RefTable.Name] = append(deps[fk.RefTable.Name], fk.Table) } } case *schema.ModifyTable: for _, c := range change.Changes { switch c := c.(type) { case *schema.AddForeignKey: if err := checkFK(c.F); err != nil { return nil, err } if c.F.RefTable != change.T { deps[change.T.Name] = append(deps[change.T.Name], c.F.RefTable) } case *schema.ModifyForeignKey: if err := checkFK(c.To); err != nil { return nil, err } if c.To.RefTable != change.T { deps[change.T.Name] = append(deps[change.T.Name], c.To.RefTable) } } } } } return deps, nil } func checkFK(fk *schema.ForeignKey) error { var cause []string if fk.Table == nil { cause = append(cause, "child table") } if len(fk.Columns) == 0 { cause = append(cause, "child columns") } if fk.RefTable == nil { cause = append(cause, "parent table") } if len(fk.RefColumns) == 0 { cause = append(cause, "parent columns") } if len(cause) != 0 { return fmt.Errorf("missing %q for foreign key: %q", cause, fk.Symbol) } return nil } // table extracts a table from the given change. func table(change schema.Change) (t string) { switch change := change.(type) { case *schema.AddTable: t = change.T.Name case *schema.DropTable: t = change.T.Name case *schema.ModifyTable: t = change.T.Name } return } // isDropped checks if the given table is marked as a deleted in the changeset. func isDropped(changes []schema.Change, t *schema.Table) bool { for _, c := range changes { if c, ok := c.(*schema.DropTable); ok && c.T.Name == t.Name { return true } } return false } // CheckChangesScope checks that changes can be applied // on a schema scope (connection). func CheckChangesScope(changes []schema.Change) error { names := make(map[string]struct{}) for _, c := range changes { var t *schema.Table switch c := c.(type) { case *schema.AddSchema, *schema.ModifySchema, *schema.DropSchema: return fmt.Errorf("%T is not allowed when migration plan is scoped to one schema", c) case *schema.AddTable: t = c.T case *schema.ModifyTable: t = c.T case *schema.DropTable: t = c.T default: continue } if t.Schema != nil && t.Schema.Name != "" { names[t.Schema.Name] = struct{}{} } for _, c := range t.Columns { e, ok := c.Type.Type.(*schema.EnumType) if ok && e.Schema != nil && e.Schema.Name != "" { names[t.Schema.Name] = struct{}{} } } } if len(names) > 1 { return fmt.Errorf("found %d schemas when migration plan is scoped to one", len(names)) } return nil } atlas-0.7.2/sql/internal/sqlx/plan_test.go000066400000000000000000000102441431455511600205540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "testing" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestDetachCycles(t *testing.T) { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint"}}, {Name: "workplace_id", Type: &schema.ColumnType{Raw: "bigint", Null: true}}, }, } workplaces := &schema.Table{ Name: "workplaces", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint"}}, {Name: "owner_id", Type: &schema.ColumnType{Raw: "bigint", Null: true}}, }, } users.ForeignKeys = []*schema.ForeignKey{ {Symbol: "workplace", Table: users, Columns: users.Columns[1:2], RefTable: workplaces, RefColumns: workplaces.Columns[:1]}, } changes := []schema.Change{&schema.AddTable{T: workplaces}, &schema.AddTable{T: users}} planned, err := DetachCycles(changes) require.NoError(t, err) require.Equal(t, changes, planned) deletion := []schema.Change{&schema.DropTable{T: users}, &schema.DropTable{T: workplaces}} planned, err = DetachCycles(deletion) require.NoError(t, err) require.Equal(t, deletion, planned) // Create a circular reference. workplaces.ForeignKeys = []*schema.ForeignKey{ {Symbol: "owner", Table: workplaces, Columns: workplaces.Columns[1:], RefTable: users, RefColumns: users.Columns[:1]}, } // Add a self-ref foreign-key. users.Columns = append(users.Columns, &schema.Column{Name: "spouse_id", Type: &schema.ColumnType{Raw: "bigint", Null: true}}) users.ForeignKeys = append(users.ForeignKeys, &schema.ForeignKey{Symbol: "spouse", Table: users, Columns: users.Columns[2:], RefTable: users, RefColumns: users.Columns[:1]}) planned, err = DetachCycles(changes) require.NoError(t, err) require.Len(t, planned, 4) require.Empty(t, planned[0].(*schema.AddTable).T.ForeignKeys) require.NotEmpty(t, planned[1].(*schema.AddTable).T.ForeignKeys) require.Equal(t, &schema.ModifyTable{ T: workplaces, Changes: []schema.Change{ &schema.AddForeignKey{ F: &schema.ForeignKey{Symbol: "owner", Table: workplaces, Columns: workplaces.Columns[1:], RefTable: users, RefColumns: users.Columns[:1]}, }, }, }, planned[2]) require.Equal(t, &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddForeignKey{ F: &schema.ForeignKey{Symbol: "workplace", Table: users, Columns: users.Columns[1:2], RefTable: workplaces, RefColumns: workplaces.Columns[:1]}, }, }, }, planned[3]) planned, err = DetachCycles(deletion) require.NoError(t, err) require.Equal(t, &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropForeignKey{ F: &schema.ForeignKey{Symbol: "workplace", Table: users, Columns: users.Columns[1:2], RefTable: workplaces, RefColumns: workplaces.Columns[:1]}, }, }, }, planned[0]) require.Equal(t, &schema.ModifyTable{ T: workplaces, Changes: []schema.Change{ &schema.DropForeignKey{ F: &schema.ForeignKey{Symbol: "owner", Table: workplaces, Columns: workplaces.Columns[1:], RefTable: users, RefColumns: users.Columns[:1]}, }, }, }, planned[1]) users.ForeignKeys = nil workplaces.ForeignKeys = nil require.Equal(t, deletion, planned[2:]) } func TestCheckChangesScope(t *testing.T) { err := CheckChangesScope([]schema.Change{ &schema.AddSchema{}, }) require.EqualError(t, err, "*schema.AddSchema is not allowed when migration plan is scoped to one schema") err = CheckChangesScope([]schema.Change{ &schema.ModifySchema{}, }) require.EqualError(t, err, "*schema.ModifySchema is not allowed when migration plan is scoped to one schema") err = CheckChangesScope([]schema.Change{ &schema.DropSchema{}, }) require.EqualError(t, err, "*schema.DropSchema is not allowed when migration plan is scoped to one schema") err = CheckChangesScope([]schema.Change{ &schema.AddTable{T: schema.NewTable("users").SetSchema(schema.New("s1"))}, &schema.AddTable{T: schema.NewTable("users").SetSchema(schema.New("s2"))}, }) require.EqualError(t, err, "found 2 schemas when migration plan is scoped to one") } atlas-0.7.2/sql/internal/sqlx/sqlx.go000066400000000000000000000263111431455511600175540ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "bytes" "context" "database/sql" "database/sql/driver" "fmt" "io" "reflect" "strconv" "strings" "ariga.io/atlas/sql/schema" ) type ( // ExecQueryCloser is the interface that groups // Close with the schema.ExecQuerier methods. ExecQueryCloser interface { schema.ExecQuerier io.Closer } nopCloser struct { schema.ExecQuerier } ) // Close implements the io.Closer interface. func (nopCloser) Close() error { return nil } // SingleConn returns a closable single connection from the given ExecQuerier. // If the ExecQuerier is already bound to a single connection (e.g. Tx, Conn), // the connection will return as-is with a NopCloser. func SingleConn(ctx context.Context, conn schema.ExecQuerier) (ExecQueryCloser, error) { // A standard sql.DB or a wrapper of it. if opener, ok := conn.(interface { Conn(context.Context) (*sql.Conn, error) }); ok { return opener.Conn(ctx) } // Tx and Conn are bounded to a single connection. // We use sql/driver.Tx to cover also custom Tx structs. _, ok1 := conn.(driver.Tx) _, ok2 := conn.(*sql.Conn) if ok1 || ok2 { return nopCloser{ExecQuerier: conn}, nil } return nil, fmt.Errorf("cannot obtain a single connection from %T", conn) } // ValidString reports if the given string is not null and valid. func ValidString(s sql.NullString) bool { return s.Valid && s.String != "" && strings.ToLower(s.String) != "null" } // ScanOne scans one record and closes the rows at the end. func ScanOne(rows *sql.Rows, dest ...any) error { defer rows.Close() if !rows.Next() { return sql.ErrNoRows } if err := rows.Scan(dest...); err != nil { return err } return rows.Close() } // ScanNullBool scans one sql.NullBool record and closes the rows at the end. func ScanNullBool(rows *sql.Rows) (sql.NullBool, error) { var b sql.NullBool return b, ScanOne(rows, &b) } // ScanStrings scans sql.Rows into a slice of strings and closes it at the end. func ScanStrings(rows *sql.Rows) ([]string, error) { defer rows.Close() var vs []string for rows.Next() { var v string if err := rows.Scan(&v); err != nil { return nil, err } vs = append(vs, v) } return vs, nil } // SchemaFKs scans the rows and adds the foreign-key to the schema table. // Reference elements are added as stubs and should be linked manually by the // caller. func SchemaFKs(s *schema.Schema, rows *sql.Rows) error { for rows.Next() { var name, table, column, tSchema, refTable, refColumn, refSchema, updateRule, deleteRule string if err := rows.Scan(&name, &table, &column, &tSchema, &refTable, &refColumn, &refSchema, &updateRule, &deleteRule); err != nil { return err } t, ok := s.Table(table) if !ok { return fmt.Errorf("table %q was not found in schema", table) } fk, ok := t.ForeignKey(name) if !ok { fk = &schema.ForeignKey{ Symbol: name, Table: t, RefTable: t, OnDelete: schema.ReferenceOption(deleteRule), OnUpdate: schema.ReferenceOption(updateRule), } switch { case refTable == table: case tSchema == refSchema: if fk.RefTable, ok = s.Table(refTable); !ok { fk.RefTable = &schema.Table{Name: refTable, Schema: s} } case tSchema != refSchema: fk.RefTable = &schema.Table{Name: refTable, Schema: &schema.Schema{Name: refSchema}} } t.ForeignKeys = append(t.ForeignKeys, fk) } c, ok := t.Column(column) if !ok { return fmt.Errorf("column %q was not found for fk %q", column, fk.Symbol) } // Rows are ordered by ORDINAL_POSITION that specifies // the position of the column in the FK definition. if _, ok := fk.Column(c.Name); !ok { fk.Columns = append(fk.Columns, c) c.ForeignKeys = append(c.ForeignKeys, fk) } // Stub referenced columns or link if it's a self-reference. var rc *schema.Column if fk.Table != fk.RefTable { rc = &schema.Column{Name: refColumn} } else if c, ok := t.Column(refColumn); ok { rc = c } else { return fmt.Errorf("referenced column %q was not found for fk %q", refColumn, fk.Symbol) } if _, ok := fk.RefColumn(rc.Name); !ok { fk.RefColumns = append(fk.RefColumns, rc) } } return nil } // LinkSchemaTables links foreign-key stub tables/columns to actual elements. func LinkSchemaTables(schemas []*schema.Schema) { byName := make(map[string]map[string]*schema.Table) for _, s := range schemas { byName[s.Name] = make(map[string]*schema.Table) for _, t := range s.Tables { t.Schema = s byName[s.Name][t.Name] = t } } for _, s := range schemas { for _, t := range s.Tables { for _, fk := range t.ForeignKeys { rs, ok := byName[fk.RefTable.Schema.Name] if !ok { continue } ref, ok := rs[fk.RefTable.Name] if !ok { continue } fk.RefTable = ref for i, c := range fk.RefColumns { rc, ok := ref.Column(c.Name) if ok { fk.RefColumns[i] = rc } } } } } } // ValuesEqual checks if the 2 string slices are equal (including their order). func ValuesEqual(v1, v2 []string) bool { if len(v1) != len(v2) { return false } for i := range v1 { if v1[i] != v2[i] { return false } } return true } // ModeInspectSchema returns the InspectMode or its default. func ModeInspectSchema(o *schema.InspectOptions) schema.InspectMode { if o == nil || o.Mode == 0 { return schema.InspectSchemas | schema.InspectTables } return o.Mode } // ModeInspectRealm returns the InspectMode or its default. func ModeInspectRealm(o *schema.InspectRealmOption) schema.InspectMode { if o == nil || o.Mode == 0 { return schema.InspectSchemas | schema.InspectTables } return o.Mode } // A Builder provides a syntactic sugar API for writing SQL statements. type Builder struct { bytes.Buffer QuoteChar byte // quoting identifiers Schema *string // schema qualifier } // P writes a list of phrases to the builder separated and // suffixed with whitespace. func (b *Builder) P(phrases ...string) *Builder { for _, p := range phrases { if p == "" { continue } if b.Len() > 0 && b.lastByte() != ' ' && b.lastByte() != '(' { b.WriteByte(' ') } b.WriteString(p) if p[len(p)-1] != ' ' { b.WriteByte(' ') } } return b } // Ident writes the given string quoted as an SQL identifier. func (b *Builder) Ident(s string) *Builder { if s != "" { b.WriteByte(b.QuoteChar) b.WriteString(s) b.WriteByte(b.QuoteChar) b.WriteByte(' ') } return b } // Table writes the table identifier to the builder, prefixed // with the schema name if exists. func (b *Builder) Table(t *schema.Table) *Builder { switch { // Custom qualifier. case b.Schema != nil: // Empty means skip prefix. if *b.Schema != "" { b.Ident(*b.Schema) b.rewriteLastByte('.') } // Default schema qualifier. case t.Schema != nil && t.Schema.Name != "": b.Ident(t.Schema.Name) b.rewriteLastByte('.') } b.Ident(t.Name) return b } // Comma writes a comma in case the buffer is not empty, or // replaces the last char if it is a whitespace. func (b *Builder) Comma() *Builder { switch { case b.Len() == 0: case b.lastByte() == ' ': b.rewriteLastByte(',') b.WriteByte(' ') default: b.WriteString(", ") } return b } // MapComma maps the slice x using the function f and joins the result with // a comma separating between the written elements. func (b *Builder) MapComma(x any, f func(i int, b *Builder)) *Builder { s := reflect.ValueOf(x) for i := 0; i < s.Len(); i++ { if i > 0 { b.Comma() } f(i, b) } return b } // MapCommaErr is like MapComma, but returns an error if f returns an error. func (b *Builder) MapCommaErr(x any, f func(i int, b *Builder) error) error { s := reflect.ValueOf(x) for i := 0; i < s.Len(); i++ { if i > 0 { b.Comma() } if err := f(i, b); err != nil { return err } } return nil } // Wrap wraps the written string with parentheses. func (b *Builder) Wrap(f func(b *Builder)) *Builder { b.WriteByte('(') f(b) if b.lastByte() != ' ' { b.WriteByte(')') } else { b.rewriteLastByte(')') } return b } // Clone returns a duplicate of the builder. func (b *Builder) Clone() *Builder { return &Builder{ QuoteChar: b.QuoteChar, Buffer: *bytes.NewBufferString(b.Buffer.String()), } } // String overrides the Buffer.String method and ensure no spaces pad the returned statement. func (b *Builder) String() string { return strings.TrimSpace(b.Buffer.String()) } func (b *Builder) lastByte() byte { if b.Len() == 0 { return 0 } buf := b.Buffer.Bytes() return buf[len(buf)-1] } func (b *Builder) rewriteLastByte(c byte) { if b.Len() == 0 { return } buf := b.Buffer.Bytes() buf[len(buf)-1] = c } // IsQuoted reports if the given string is quoted with one of the given quotes (e.g. ', ", `). func IsQuoted(s string, q ...byte) bool { for i := range q { if l, r := strings.IndexByte(s, q[i]), strings.LastIndexByte(s, q[i]); l < r && l == 0 && r == len(s)-1 { return true } } return false } // IsLiteralBool reports if the given string is a valid literal bool. func IsLiteralBool(s string) bool { _, err := strconv.ParseBool(s) return err == nil } // IsLiteralNumber reports if the given string is a literal number. func IsLiteralNumber(s string) bool { // Hex digits. if strings.HasPrefix(s, "0x") || strings.HasPrefix(s, "0X") { // Some databases allow odd length hex string. _, err := strconv.ParseUint(s[2:], 16, 64) return err == nil } // Digits with optional exponent. _, err := strconv.ParseFloat(s, 64) return err == nil } // DefaultValue returns the string represents the DEFAULT of a column. func DefaultValue(c *schema.Column) (string, bool) { switch x := c.Default.(type) { case nil: return "", false case *schema.Literal: return x.V, true case *schema.RawExpr: return x.X, true default: panic(fmt.Sprintf("unexpected default value type: %T", x)) } } // MayWrap ensures the given string is wrapped with parentheses. // Used by the different drivers to turn strings valid expressions. func MayWrap(s string) string { n := len(s) - 1 if len(s) < 2 || s[0] != '(' || s[n] != ')' || !balanced(s[1:n]) { return "(" + s + ")" } return s } func balanced(expr string) bool { return ExprLastIndex(expr) == len(expr)-1 } // ExprLastIndex scans the first expression in the given string until // its end and returns its last index. func ExprLastIndex(expr string) int { var l, r int for i := 0; i < len(expr); i++ { Top: switch expr[i] { case '(': l++ case ')': r++ // String or identifier. case '\'', '"', '`': for j := i + 1; j < len(expr); j++ { switch expr[j] { case '\\': j++ case expr[i]: i = j break Top } } // Unexpected EOS. return -1 } // Balanced parens and we reached EOS or a terminator. if l == r && (i == len(expr)-1 || expr[i+1] == ',') { return i } else if r > l { return -1 } } return -1 } // ReverseChanges reverses the order of the changes. func ReverseChanges(c []schema.Change) { for i, n := 0, len(c); i < n/2; i++ { c[i], c[n-i-1] = c[n-i-1], c[i] } } // P returns a pointer to v. func P[T any](v T) *T { return &v } // V returns the value p is pointing to. // If p is nil, the zero value is returned. func V[T any](p *T) (v T) { if p != nil { v = *p } return } atlas-0.7.2/sql/internal/sqlx/sqlx_test.go000066400000000000000000000115261431455511600206150ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlx import ( "strconv" "testing" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestModeInspectRealm(t *testing.T) { m := ModeInspectRealm(nil) require.True(t, m.Is(schema.InspectSchemas)) require.True(t, m.Is(schema.InspectTables)) m = ModeInspectRealm(&schema.InspectRealmOption{}) require.True(t, m.Is(schema.InspectSchemas)) require.True(t, m.Is(schema.InspectTables)) m = ModeInspectRealm(&schema.InspectRealmOption{Mode: schema.InspectSchemas}) require.True(t, m.Is(schema.InspectSchemas)) require.False(t, m.Is(schema.InspectTables)) } func TestModeInspectSchema(t *testing.T) { m := ModeInspectSchema(nil) require.True(t, m.Is(schema.InspectSchemas)) require.True(t, m.Is(schema.InspectTables)) m = ModeInspectSchema(&schema.InspectOptions{}) require.True(t, m.Is(schema.InspectSchemas)) require.True(t, m.Is(schema.InspectTables)) m = ModeInspectSchema(&schema.InspectOptions{Mode: schema.InspectSchemas}) require.True(t, m.Is(schema.InspectSchemas)) require.False(t, m.Is(schema.InspectTables)) } func TestBuilder(t *testing.T) { var ( b = &Builder{QuoteChar: '"'} columns = []string{"a", "b", "c"} ) b.P("CREATE TABLE"). Table(&schema.Table{Name: "users"}). Wrap(func(b *Builder) { b.MapComma(columns, func(i int, b *Builder) { b.Ident(columns[i]).P("int").P("NOT NULL") }) b.Comma().P("PRIMARY KEY").Wrap(func(b *Builder) { b.MapComma(columns, func(i int, b *Builder) { b.Ident(columns[i]) }) }) }) require.Equal(t, `CREATE TABLE "users" ("a" int NOT NULL, "b" int NOT NULL, "c" int NOT NULL, PRIMARY KEY ("a", "b", "c"))`, b.String()) } func TestBuilder_Qualifier(t *testing.T) { var ( s = "other" b = &Builder{QuoteChar: '"', Schema: &s} ) b.P("CREATE TABLE").Table(schema.NewTable("users")) require.Equal(t, `CREATE TABLE "other"."users"`, b.String()) // Bypass table schema. b.Reset() b.P("CREATE TABLE").Table(schema.NewTable("users").SetSchema(schema.New("test"))) require.Equal(t, `CREATE TABLE "other"."users"`, b.String()) // Empty qualifier, means skip. s = "" b.Reset() b.P("CREATE TABLE").Table(schema.NewTable("users").SetSchema(schema.New("test"))) require.Equal(t, `CREATE TABLE "users"`, b.String()) } func TestMayWrap(t *testing.T) { tests := []struct { input string wrapped bool }{ {"", true}, {"()", false}, {"('text')", false}, {"('(')", false}, {`('(\\')`, false}, {`('\')(')`, false}, {`(a) in (b)`, true}, {`a in (b)`, true}, {`("\\\\(((('")`, false}, {`('(')||(')')`, true}, // Test examples from SQLite. {"b || 'x'", true}, {"a+1", true}, {"substr(x, 2)", true}, {"(json_extract(x, '$.a'))", false}, {"(substr(a, 2) COLLATE NOCASE)", false}, {"(b+random())", false}, } for i, tt := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { expect := tt.input if tt.wrapped { expect = "(" + expect + ")" } require.Equal(t, expect, MayWrap(tt.input)) }) } } func TestExprLastIndex(t *testing.T) { tests := []struct { input string wantIdx int }{ {"", -1}, {"()", 1}, {"'('", 2}, {"('(')", 4}, {"('text')", 7}, {"floor(x), y", 7}, {"f(floor(x), y)", 13}, {"f(floor(x), y, (z))", 18}, {"f(x, (x*2)), y, (z)", 10}, {"(a || ' ' || b)", 14}, {"(a || ', ' || b)", 15}, {"a || ', ' || b, x", 13}, {"(a || ', ' || b), x", 15}, } for i, tt := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { idx := ExprLastIndex(tt.input) require.Equal(t, tt.wantIdx, idx) }) } } func TestReverseChanges(t *testing.T) { tests := []struct { input []schema.Change expect []schema.Change }{ { input: []schema.Change{ (*schema.AddColumn)(nil), }, expect: []schema.Change{ (*schema.AddColumn)(nil), }, }, { input: []schema.Change{ (*schema.AddColumn)(nil), (*schema.DropColumn)(nil), }, expect: []schema.Change{ (*schema.DropColumn)(nil), (*schema.AddColumn)(nil), }, }, { input: []schema.Change{ (*schema.AddColumn)(nil), (*schema.ModifyColumn)(nil), (*schema.DropColumn)(nil), }, expect: []schema.Change{ (*schema.DropColumn)(nil), (*schema.ModifyColumn)(nil), (*schema.AddColumn)(nil), }, }, { input: []schema.Change{ (*schema.AddColumn)(nil), (*schema.ModifyColumn)(nil), (*schema.DropColumn)(nil), (*schema.ModifyColumn)(nil), }, expect: []schema.Change{ (*schema.ModifyColumn)(nil), (*schema.DropColumn)(nil), (*schema.ModifyColumn)(nil), (*schema.AddColumn)(nil), }, }, } for i, tt := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { ReverseChanges(tt.input) require.Equal(t, tt.expect, tt.input) }) } } atlas-0.7.2/sql/migrate/000077500000000000000000000000001431455511600150605ustar00rootroot00000000000000atlas-0.7.2/sql/migrate/dir.go000066400000000000000000000261071431455511600161730ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate import ( "bufio" "bytes" "crypto/sha256" "encoding/base64" "errors" "fmt" "io" "io/fs" "os" "path/filepath" "regexp" "sort" "strings" "text/template" "time" ) type ( // Dir wraps the functionality used to interact with a migration directory. Dir interface { fs.FS // WriteFile writes the data to the named file. WriteFile(string, []byte) error // Files returns a set of files stored in this Dir to be executed on a database. Files() ([]File, error) // Checksum returns a HashFile of the migration directory. Checksum() (HashFile, error) } // Formatter wraps the Format method. Formatter interface { // Format formats the given Plan into one or more migration files. Format(*Plan) ([]File, error) } // File represents a single migration file. File interface { // Name returns the name of the migration file. Name() string // Desc returns the description of the migration File. Desc() string // Version returns the version of the migration File. Version() string // Bytes returns the read content of the file. Bytes() []byte // Stmts returns the set of SQL statements this file holds. Stmts() ([]string, error) // StmtDecls returns the set of SQL statements this file holds alongside its preceding comments. StmtDecls() ([]*Stmt, error) } ) // LocalDir implements Dir for a local migration // directory with default Atlas formatting. type LocalDir struct { path string } var _ Dir = (*LocalDir)(nil) // NewLocalDir returns a new the Dir used by a Planner to work on the given local path. func NewLocalDir(path string) (*LocalDir, error) { fi, err := os.Stat(path) if err != nil { return nil, fmt.Errorf("sql/migrate: %w", err) } if !fi.IsDir() { return nil, fmt.Errorf("sql/migrate: %q is not a dir", path) } return &LocalDir{path: path}, nil } // Path returns the local path used for opening this dir. func (d *LocalDir) Path() string { return d.path } // Open implements fs.FS. func (d *LocalDir) Open(name string) (fs.File, error) { return os.Open(filepath.Join(d.path, name)) } // WriteFile implements Dir.WriteFile. func (d *LocalDir) WriteFile(name string, b []byte) error { return os.WriteFile(filepath.Join(d.path, name), b, 0644) } // Files implements Dir.Files. It looks for all files with .sql suffix and orders them by filename. func (d *LocalDir) Files() ([]File, error) { names, err := fs.Glob(d, "*.sql") if err != nil { return nil, err } // Sort files lexicographically. sort.Slice(names, func(i, j int) bool { return names[i] < names[j] }) ret := make([]File, len(names)) for i, n := range names { b, err := fs.ReadFile(d, n) if err != nil { return nil, fmt.Errorf("sql/migrate: read file %q: %w", n, err) } ret[i] = NewLocalFile(n, b) } return ret, nil } // Checksum implements Dir.Checksum. By default, it calls Files() and creates a checksum from them. func (d *LocalDir) Checksum() (HashFile, error) { var ( hs HashFile h = sha256.New() ) files, err := d.Files() if err != nil { return nil, err } for _, f := range files { if _, err = h.Write([]byte(f.Name())); err != nil { return nil, err } // Check if this file contains an "atlas:sum" directive and if so, act to it. if mode, ok := directive(string(f.Bytes()), directiveSum); ok && mode == sumModeIgnore { continue } if _, err = h.Write(f.Bytes()); err != nil { return nil, err } hs = append(hs, struct{ N, H string }{f.Name(), base64.StdEncoding.EncodeToString(h.Sum(nil))}) } return hs, nil } // LocalFile is used by LocalDir to implement the Scanner interface. type LocalFile struct { n string b []byte } var _ File = (*LocalFile)(nil) // NewLocalFile returns a new local file. func NewLocalFile(name string, data []byte) *LocalFile { return &LocalFile{n: name, b: data} } // Name implements File.Name. func (f LocalFile) Name() string { return f.n } // Desc implements File.Desc. func (f LocalFile) Desc() string { parts := strings.SplitN(f.n, "_", 2) if len(parts) == 1 { return "" } return strings.TrimSuffix(parts[1], ".sql") } // Version implements File.Version. func (f LocalFile) Version() string { return strings.SplitN(strings.TrimSuffix(f.n, ".sql"), "_", 2)[0] } // Stmts returns the SQL statement exists in the local file. func (f LocalFile) Stmts() ([]string, error) { s, err := Stmts(string(f.b)) if err != nil { return nil, err } stmts := make([]string, len(s)) for i := range s { stmts[i] = s[i].Text } return stmts, nil } // StmtDecls returns the all statement declarations exist in the local file. func (f LocalFile) StmtDecls() ([]*Stmt, error) { return Stmts(string(f.b)) } // Bytes returns local file data. func (f LocalFile) Bytes() []byte { return f.b } var ( // templateFuncs contains the template.FuncMap for the DefaultFormatter. templateFuncs = template.FuncMap{"now": func() string { return time.Now().UTC().Format("20060102150405") }} // DefaultFormatter is a default implementation for Formatter. DefaultFormatter = &TemplateFormatter{ templates: []struct{ N, C *template.Template }{ { N: template.Must(template.New("").Funcs(templateFuncs).Parse( "{{ with .Version }}{{ . }}{{ else }}{{ now }}{{ end }}{{ with .Name }}_{{ . }}{{ end }}.sql", )), C: template.Must(template.New("").Parse( `{{ range .Changes }}{{ with .Comment }}-- {{ println . }}{{ end }}{{ printf "%s;\n" .Cmd }}{{ end }}`, )), }, }, } ) // TemplateFormatter implements Formatter by using templates. type TemplateFormatter struct { templates []struct{ N, C *template.Template } } // NewTemplateFormatter creates a new Formatter working with the given templates. // // migrate.NewTemplateFormatter( // template.Must(template.New("").Parse("{{now.Unix}}{{.Name}}.sql")), // name template // template.Must(template.New("").Parse("{{range .Changes}}{{println .Cmd}}{{end}}")), // content template // ) func NewTemplateFormatter(templates ...*template.Template) (*TemplateFormatter, error) { if n := len(templates); n == 0 || n%2 == 1 { return nil, fmt.Errorf("zero or odd number of templates given") } t := new(TemplateFormatter) for i := 0; i < len(templates); i += 2 { t.templates = append(t.templates, struct{ N, C *template.Template }{templates[i], templates[i+1]}) } return t, nil } // Format implements the Formatter interface. func (t *TemplateFormatter) Format(plan *Plan) ([]File, error) { files := make([]File, 0, len(t.templates)) for _, tpl := range t.templates { var n, b bytes.Buffer if err := tpl.N.Execute(&n, plan); err != nil { return nil, err } if err := tpl.C.Execute(&b, plan); err != nil { return nil, err } files = append(files, &LocalFile{ n: n.String(), b: b.Bytes(), }) } return files, nil } // HashFileName of the migration directory integrity sum file. const HashFileName = "atlas.sum" // HashFile represents the integrity sum file of the migration dir. type HashFile []struct{ N, H string } // HashSum reads the whole dir, sorts the files by name and creates a HashSum from its contents. // // Deprecated: Use Dir.Checksum instead. func HashSum(dir Dir) (HashFile, error) { return dir.Checksum() } // WriteSumFile writes the given HashFile to the Dir. If the file does not exist, it is created. func WriteSumFile(dir Dir, sum HashFile) error { b, err := sum.MarshalText() if err != nil { return err } return dir.WriteFile(HashFileName, b) } // Sum returns the checksum of the represented hash file. func (f HashFile) Sum() string { sha := sha256.New() for _, f := range f { sha.Write([]byte(f.N)) sha.Write([]byte(f.H)) } return base64.StdEncoding.EncodeToString(sha.Sum(nil)) } // MarshalText implements encoding.TextMarshaler. func (f HashFile) MarshalText() ([]byte, error) { buf := new(bytes.Buffer) for _, f := range f { fmt.Fprintf(buf, "%s h1:%s\n", f.N, f.H) } return []byte(fmt.Sprintf("h1:%s\n%s", f.Sum(), buf)), nil } // UnmarshalText implements encoding.TextUnmarshaler. func (f *HashFile) UnmarshalText(b []byte) error { sc := bufio.NewScanner(bytes.NewReader(b)) // The first line contains the sum. sc.Scan() sum := strings.TrimPrefix(sc.Text(), "h1:") for sc.Scan() { li := strings.SplitN(sc.Text(), "h1:", 2) if len(li) != 2 { return ErrChecksumFormat } *f = append(*f, struct{ N, H string }{strings.TrimSpace(li[0]), li[1]}) } if sum != f.Sum() { return ErrChecksumMismatch } return sc.Err() } // SumByName returns the hash for a migration file by its name. func (f HashFile) SumByName(n string) (string, error) { for _, f := range f { if f.N == n { return f.H, nil } } return "", errors.New("checksum not found") } var ( // ErrChecksumFormat is returned from Validate if the sum files format is invalid. ErrChecksumFormat = errors.New("checksum file format invalid") // ErrChecksumMismatch is returned from Validate if the hash sums don't match. ErrChecksumMismatch = errors.New("checksum mismatch") // ErrChecksumNotFound is returned from Validate if the hash file does not exist. ErrChecksumNotFound = errors.New("checksum file not found") ) // Validate checks if the migration dir is in sync with its sum file. // If they don't match ErrChecksumMismatch is returned. func Validate(dir Dir) error { fh, err := readHashFile(dir) if errors.Is(err, fs.ErrNotExist) { // If there are no migration files yet this is okay. files, err := fs.ReadDir(dir, "/") if err != nil || len(files) > 0 { return ErrChecksumNotFound } return nil } if err != nil { return err } mh, err := dir.Checksum() if err != nil { return err } if fh.Sum() != mh.Sum() { return ErrChecksumMismatch } return nil } // FilesLastIndex returns the index of the last file // satisfying f(i), or -1 if none do. func FilesLastIndex(files []File, f func(File) bool) int { for i := len(files) - 1; i >= 0; i-- { if f(files[i]) { return i } } return -1 } const ( // atlas:sum directive. directiveSum = "sum" sumModeIgnore = "ignore" // atlas:delimiter directive. directiveDelimiter = "delimiter" directivePrefixSQL = "-- " ) var reDirective = regexp.MustCompile(`^([ -~]*)atlas:(\w+)(?: +([ -~]*))*`) // directive searches in the content a line that matches a directive // with the given prefix and name. For example: // // directive(c, "delimiter", "-- ") // '-- atlas:delimiter.*' // directive(c, "sum", "") // 'atlas:sum.*' // directive(c, "sum") // '.*atlas:sum' func directive(content, name string, prefix ...string) (string, bool) { m := reDirective.FindStringSubmatch(content) // In case the prefix was provided ensures it is matched. if len(m) == 4 && m[2] == name && (len(prefix) == 0 || prefix[0] == m[1]) { return m[3], true } return "", false } // readHashFile reads the HashFile from the given Dir. func readHashFile(dir Dir) (HashFile, error) { f, err := dir.Open(HashFileName) if err != nil { return nil, err } defer f.Close() b, err := io.ReadAll(f) if err != nil { return nil, err } var fh HashFile if err := fh.UnmarshalText(b); err != nil { return nil, err } return fh, nil } atlas-0.7.2/sql/migrate/dir_test.go000066400000000000000000000153021431455511600172250ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate_test import ( _ "embed" "io" "os" "path/filepath" "testing" "time" "ariga.io/atlas/sql/migrate" "github.com/stretchr/testify/require" ) func TestHashSum(t *testing.T) { // Sum file gets created. p := t.TempDir() d, err := migrate.NewLocalDir(p) require.NoError(t, err) plan := &migrate.Plan{Name: "plan", Changes: []*migrate.Change{{Cmd: "cmd"}}} pl := migrate.NewPlanner(nil, d) require.NotNil(t, pl) require.NoError(t, pl.WritePlan(plan)) v := time.Now().UTC().Format("20060102150405") require.Equal(t, 2, countFiles(t, d)) requireFileEqual(t, d, v+"_plan.sql", "cmd;\n") require.FileExists(t, filepath.Join(p, "atlas.sum")) // Disable sum. p = t.TempDir() d, err = migrate.NewLocalDir(p) require.NoError(t, err) pl = migrate.NewPlanner(nil, d, migrate.PlanWithChecksum(false)) require.NotNil(t, pl) require.NoError(t, pl.WritePlan(plan)) require.Equal(t, 1, countFiles(t, d)) requireFileEqual(t, d, v+"_plan.sql", "cmd;\n") // Files not ending with .sql get ignored. p = t.TempDir() d, err = migrate.NewLocalDir(p) require.NoError(t, err) pl = migrate.NewPlanner(nil, d) require.NotNil(t, pl) require.NoError(t, os.WriteFile(filepath.Join(p, "include.sql"), nil, 0600)) require.NoError(t, os.WriteFile(filepath.Join(p, "exclude.txt"), nil, 0600)) require.NoError(t, pl.WritePlan(plan)) require.Equal(t, 4, countFiles(t, d)) c, err := os.ReadFile(filepath.Join(p, "atlas.sum")) require.NoError(t, err) require.Contains(t, string(c), "include.sql") require.NotContains(t, string(c), "exclude.txt") // Files with directive in first line get ignored. p = t.TempDir() d, err = migrate.NewLocalDir(p) require.NoError(t, err) pl = migrate.NewPlanner(nil, d) require.NotNil(t, pl) require.NoError(t, os.WriteFile(filepath.Join(p, "include.sql"), []byte("//atlas:sum\nfoo"), 0600)) require.NoError(t, os.WriteFile(filepath.Join(p, "exclude_1.sql"), []byte("//atlas:sum ignore\nbar"), 0600)) require.NoError(t, os.WriteFile(filepath.Join(p, "exclude_2.sql"), []byte("atlas:sum ignore"), 0600)) require.NoError(t, pl.WritePlan(plan)) require.Equal(t, 5, countFiles(t, d)) requireFileEqual(t, d, v+"_plan.sql", "cmd;\n") c, err = os.ReadFile(filepath.Join(p, "atlas.sum")) require.NoError(t, err) require.Contains(t, string(c), "include") require.NotContains(t, string(c), "exclude_1.sql") require.NotContains(t, string(c), "exclude_2.sql") } //go:embed testdata/migrate/atlas.sum var hash []byte func TestValidate(t *testing.T) { // Add the sum file form the testdata/migrate dir without any files in it - should fail. p := t.TempDir() d, err := migrate.NewLocalDir(p) require.NoError(t, err) require.NoError(t, d.WriteFile("atlas.sum", hash)) require.Equal(t, migrate.ErrChecksumMismatch, migrate.Validate(d)) td := "testdata/migrate" d, err = migrate.NewLocalDir(td) require.NoError(t, err) // testdata/migrate is valid. require.Nil(t, migrate.Validate(d)) // Making a manual change to the sum file should raise validation error. f, err := os.OpenFile(filepath.Join(td, "atlas.sum"), os.O_RDWR, os.ModeAppend) require.NoError(t, err) _, err = f.WriteString("foo") require.NoError(t, err) require.NoError(t, f.Close()) t.Cleanup(func() { require.NoError(t, os.WriteFile(filepath.Join(td, "atlas.sum"), hash, 0644)) }) require.Equal(t, migrate.ErrChecksumMismatch, migrate.Validate(d)) require.NoError(t, os.WriteFile(filepath.Join(td, "atlas.sum"), hash, 0644)) f, err = os.OpenFile(filepath.Join(td, "atlas.sum"), os.O_APPEND|os.O_WRONLY, os.ModeAppend) require.NoError(t, err) _, err = f.WriteString("foo") require.NoError(t, err) require.NoError(t, f.Close()) require.Equal(t, migrate.ErrChecksumFormat, migrate.Validate(d)) require.NoError(t, os.WriteFile(filepath.Join(td, "atlas.sum"), hash, 0644)) // Changing the filename should raise validation error. require.NoError(t, os.Rename(filepath.Join(td, "1_initial.up.sql"), filepath.Join(td, "1_first.up.sql"))) t.Cleanup(func() { require.NoError(t, os.Rename(filepath.Join(td, "1_first.up.sql"), filepath.Join(td, "1_initial.up.sql"))) }) require.Equal(t, migrate.ErrChecksumMismatch, migrate.Validate(d)) // Removing it as well (move it out of the dir). require.NoError(t, os.Rename(filepath.Join(td, "1_first.up.sql"), filepath.Join(td, "..", "bak"))) t.Cleanup(func() { require.NoError(t, os.Rename(filepath.Join(td, "..", "bak"), filepath.Join(td, "1_first.up.sql"))) }) require.Equal(t, migrate.ErrChecksumMismatch, migrate.Validate(d)) } func TestHash_MarshalText(t *testing.T) { d, err := migrate.NewLocalDir("testdata/migrate") require.NoError(t, err) h, err := d.Checksum() require.NoError(t, err) ac, err := h.MarshalText() require.Equal(t, hash, ac) } func TestHash_UnmarshalText(t *testing.T) { d, err := migrate.NewLocalDir("testdata/migrate") require.NoError(t, err) h, err := d.Checksum() require.NoError(t, err) var ac migrate.HashFile require.NoError(t, ac.UnmarshalText(hash)) require.Equal(t, h, ac) } func TestLocalDir(t *testing.T) { // Files don't work. d, err := migrate.NewLocalDir("migrate.go") require.ErrorContains(t, err, "sql/migrate: \"migrate.go\" is not a dir") require.Nil(t, d) // Does not create a dir for you. d, err = migrate.NewLocalDir("foo/bar") require.EqualError(t, err, "sql/migrate: stat foo/bar: no such file or directory") require.Nil(t, d) // Open and WriteFile work. d, err = migrate.NewLocalDir(t.TempDir()) require.NoError(t, err) require.NotNil(t, d) require.NoError(t, d.WriteFile("name", []byte("content"))) f, err := d.Open("name") require.NoError(t, err) i, err := f.Stat() require.NoError(t, err) require.Equal(t, i.Name(), "name") c, err := io.ReadAll(f) require.NoError(t, err) require.Equal(t, "content", string(c)) // Default Dir implementation. d, err = migrate.NewLocalDir("testdata/migrate/sub") require.NoError(t, err) require.NotNil(t, d) files, err := d.Files() require.NoError(t, err) require.Len(t, files, 3) require.Equal(t, "1.a_sub.up.sql", files[0].Name()) require.Equal(t, "2.10.x-20_description.sql", files[1].Name()) require.Equal(t, "3_partly.sql", files[2].Name()) stmts, err := files[0].Stmts() require.NoError(t, err) require.Equal(t, []string{"CREATE TABLE t_sub(c int);", "ALTER TABLE t_sub ADD c1 int;"}, stmts) require.Equal(t, "1.a", files[0].Version()) require.Equal(t, "sub.up", files[0].Desc()) stmts, err = files[1].Stmts() require.NoError(t, err) require.Equal(t, []string{"ALTER TABLE t_sub ADD c2 int;"}, stmts) require.Equal(t, "2.10.x-20", files[1].Version()) require.Equal(t, "description", files[1].Desc()) } atlas-0.7.2/sql/migrate/lex.go000066400000000000000000000137201431455511600162020ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate import ( "errors" "fmt" "io" "strings" "unicode" "unicode/utf8" ) // Stmt represents a scanned statement text along with its // position in the file and associated comments group. type Stmt struct { Pos int // statement position Text string // statement text Comments []string // associated comments } // Directive returns all directive comments with the given name. // See: pkg.go.dev/cmd/compile#hdr-Compiler_Directives. func (s *Stmt) Directive(name string) (ds []string) { for _, c := range s.Comments { switch { case strings.HasPrefix(c, "/*") && !strings.Contains(c, "\n"): if d, ok := directive(strings.TrimSuffix(c, "*/"), name, "/*"); ok { ds = append(ds, d) } default: for _, p := range []string{"#", "--", "-- "} { if d, ok := directive(c, name, p); ok { ds = append(ds, d) } } } } return } // Stmts provides a generic implementation for extracting SQL statements from the given file contents. func Stmts(input string) ([]*Stmt, error) { var stmts []*Stmt l, err := newLex(input) if err != nil { return nil, err } for { s, err := l.stmt() if err == io.EOF { return stmts, nil } if err != nil { return nil, err } stmts = append(stmts, s) } } type lex struct { input string pos int // current phase position total int // total bytes scanned so far width int // size of latest rune delim string // configured delimiter comments []string // collected comments } const ( eos = -1 delimiter = ";" delimiterCmd = "delimiter" ) func newLex(input string) (*lex, error) { l := &lex{input: input, delim: delimiter} if d, ok := directive(input, directiveDelimiter, directivePrefixSQL); ok { if err := l.setDelim(d); err != nil { return nil, err } parts := strings.SplitN(input, "\n", 2) if len(parts) == 1 { return nil, fmt.Errorf("no input found after delimiter %q", d) } l.input = parts[1] } return l, nil } func (l *lex) stmt() (*Stmt, error) { var ( depth int text string ) l.skipSpaces() Scan: for { switch r := l.next(); { case r == eos: switch { case depth > 0: return nil, errors.New("unclosed parentheses") case l.pos > 0: text = l.input break Scan default: return nil, io.EOF } case r == '(': depth++ case r == ')': if depth == 0 { return nil, fmt.Errorf("unexpected ')' at position %d", l.pos) } depth-- case r == '\'', r == '"', r == '`': if err := l.skipQuote(r); err != nil { return nil, err } // Check if the start of the statement is the MySQL DELIMITER command. // See https://dev.mysql.com/doc/refman/8.0/en/mysql-commands.html. case l.pos == 1 && len(l.input) > len(delimiterCmd) && strings.EqualFold(l.input[:len(delimiterCmd)], delimiterCmd): l.addPos(len(delimiterCmd) - 1) if err := l.delimCmd(); err != nil { return nil, err } // Delimiters take precedence over comments. case depth == 0 && strings.HasPrefix(l.input[l.pos-l.width:], l.delim): l.addPos(len(l.delim) - l.width) text = l.input[:l.pos] break Scan case r == '#': l.comment("#", "\n") case r == '-' && l.next() == '-': l.comment("--", "\n") case r == '/' && l.next() == '*': l.comment("/*", "*/") } } return l.emit(text), nil } func (l *lex) next() rune { if l.pos >= len(l.input) { return eos } r, w := utf8.DecodeRuneInString(l.input[l.pos:]) l.width = w l.addPos(w) return r } func (l *lex) pick() rune { p, w := l.pos, l.width r := l.next() l.pos, l.width = p, w return r } func (l *lex) addPos(p int) { l.pos += p l.total += p } func (l *lex) skipQuote(quote rune) error { for { switch r := l.next(); { case r == eos: return fmt.Errorf("unclosed quote %q", quote) case r == '\\': l.next() case r == quote: return nil } } } func (l *lex) comment(left, right string) { i := strings.Index(l.input[l.pos:], right) // Not a comment. if i == -1 { return } // If the comment reside inside a statement, collect it. if l.pos != len(left) { l.addPos(i + len(right)) return } l.addPos(i + len(right)) // If we did not scan any statement characters, it // can be skipped and stored in the comments group. l.comments = append(l.comments, l.input[:l.pos]) l.input = l.input[l.pos:] l.pos = 0 // Double \n separate the comments group from the statement. if strings.HasPrefix(l.input, "\n\n") || right == "\n" && strings.HasPrefix(l.input, "\n") { l.comments = nil } l.skipSpaces() } func (l *lex) skipSpaces() { n := len(l.input) l.input = strings.TrimLeftFunc(l.input, unicode.IsSpace) l.total += n - len(l.input) } func (l *lex) emit(text string) *Stmt { s := &Stmt{Pos: l.total - len(text), Text: text, Comments: l.comments} l.input = l.input[l.pos:] l.pos = 0 l.comments = nil // Trim custom delimiter. if l.delim != delimiter { s.Text = strings.TrimSuffix(s.Text, l.delim) } s.Text = strings.TrimSpace(s.Text) return s } // delimCmd checks if the scanned "DELIMITER" // text represents an actual delimiter command. func (l *lex) delimCmd() error { // A space must come after the delimiter. if l.pick() != ' ' { return nil } // Scan delimiter. for r := l.pick(); r != eos && r != '\n'; r = l.next() { } delim := strings.TrimSpace(l.input[len(delimiterCmd):l.pos]) // MySQL client allows quoting delimiters. if strings.HasPrefix(delim, "'") && strings.HasSuffix(delim, "'") { delim = strings.ReplaceAll(delim[1:len(delim)-1], "''", "'") } if err := l.setDelim(delim); err != nil { return err } // Skip all we saw until now. l.emit(l.input[:l.pos]) return nil } func (l *lex) setDelim(d string) error { if d == "" { return errors.New("empty delimiter") } // Unescape delimiters. e.g. "\\n" => "\n". l.delim = strings.NewReplacer(`\n`, "\n", `\r`, "\r", `\t`, "\t").Replace(d) return nil } atlas-0.7.2/sql/migrate/lex_test.go000066400000000000000000000053231431455511600172410ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate import ( "os" "path/filepath" "strings" "testing" "github.com/stretchr/testify/require" ) func TestLocalFile_Stmts(t *testing.T) { path := filepath.Join("testdata", "lex") dir, err := NewLocalDir(path) require.NoError(t, err) files, err := dir.Files() require.NoError(t, err) for _, f := range files { stmts, err := f.Stmts() require.NoError(t, err) buf, err := os.ReadFile(filepath.Join(path, f.Name()+".golden")) require.NoError(t, err) require.Equalf(t, string(buf), strings.Join(stmts, "\n-- end --\n"), "mismatched statements in file %q", f.Name()) } } func TestLocalFile_StmtDecls(t *testing.T) { f := `cmd0; -- test cmd1; -- hello -- world cmd2; -- skip -- this # comment /* Skip this as well */ # Skip this /* one */ # command cmd3; /* comment1 */ /* comment2 */ cmd4; --atlas:nolint -- atlas:nolint destructive cmd5; #atlas:lint error /*atlas:nolint DS101*/ /* atlas:lint not a directive */ /* atlas:lint not a directive */ cmd6; ` stmts, err := NewLocalFile("f", []byte(f)).StmtDecls() require.NoError(t, err) require.Len(t, stmts, 7) require.Equal(t, "cmd0;", stmts[0].Text) require.Equal(t, 0, stmts[0].Pos, "start of the file") require.Equal(t, "cmd1;", stmts[1].Text) require.Equal(t, strings.Index(f, "cmd1;"), stmts[1].Pos) require.Equal(t, []string{"-- test\n"}, stmts[1].Comments) require.Equal(t, "cmd2;", stmts[2].Text) require.Equal(t, strings.Index(f, "cmd2;"), stmts[2].Pos) require.Equal(t, []string{"-- hello\n", "-- world\n"}, stmts[2].Comments) require.Equal(t, "cmd3;", stmts[3].Text) require.Equal(t, strings.Index(f, "cmd3;"), stmts[3].Pos) require.Equal(t, []string{"# command\n"}, stmts[3].Comments) require.Equal(t, "cmd4;", stmts[4].Text) require.Equal(t, strings.Index(f, "cmd4;"), stmts[4].Pos) require.Equal(t, []string{"/* comment1 */", "/* comment2 */"}, stmts[4].Comments) require.Equal(t, "cmd5;", stmts[5].Text) require.Equal(t, strings.Index(f, "cmd5;"), stmts[5].Pos) require.Equal(t, []string{"--atlas:nolint\n", "-- atlas:nolint destructive\n"}, stmts[5].Comments) require.Equal(t, []string{"", "destructive"}, stmts[5].Directive("nolint")) require.Equal(t, "cmd6;", stmts[6].Text) require.Equal(t, strings.Index(f, "cmd6;"), stmts[6].Pos) require.Equal(t, []string{"#atlas:lint error\n", "/*atlas:nolint DS101*/", "/* atlas:lint not a directive */", "/*\natlas:lint not a directive\n*/"}, stmts[6].Comments) require.Equal(t, []string{"error"}, stmts[6].Directive("lint")) require.Equal(t, []string{"DS101"}, stmts[6].Directive("nolint")) } atlas-0.7.2/sql/migrate/migrate.go000066400000000000000000000714041431455511600170450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate import ( "context" "crypto/sha256" "encoding/base64" "errors" "fmt" "strings" "time" "ariga.io/atlas/sql/schema" ) type ( // A Plan defines a planned changeset that its execution brings the database to // the new desired state. Additional information is calculated by the different // drivers to indicate if the changeset is transactional (can be rolled-back) and // reversible (a down file can be generated to it). Plan struct { // Version and Name of the plan. Provided by the user or auto-generated. Version, Name string // Reversible describes if the changeset is reversible. Reversible bool // Transactional describes if the changeset is transactional. Transactional bool // Changes defines the list of changeset in the plan. Changes []*Change } // A Change of migration. Change struct { // Cmd or statement to execute. Cmd string // Args for placeholder parameters in the statement above. Args []any // A Comment describes the change. Comment string // Reverse contains the "reversed statement" if // command is reversible. Reverse string // The Source that caused this change, or nil. Source schema.Change } ) type ( // The Driver interface must be implemented by the different dialects to support database // migration authoring/planning and applying. ExecQuerier, Inspector and Differ, provide // basic schema primitives for inspecting database schemas, calculate the difference between // schema elements, and executing raw SQL statements. The PlanApplier interface wraps the // methods for generating migration plan for applying the actual changes on the database. Driver interface { schema.Differ schema.ExecQuerier schema.Inspector PlanApplier } // PlanApplier wraps the methods for planning and applying changes // on the database. PlanApplier interface { // PlanChanges returns a migration plan for applying the given changeset. PlanChanges(context.Context, string, []schema.Change, ...PlanOption) (*Plan, error) // ApplyChanges is responsible for applying the given changeset. // An error may return from ApplyChanges if the driver is unable // to execute a change. ApplyChanges(context.Context, []schema.Change, ...PlanOption) error } // PlanOptions holds the migration plan options to be used by PlanApplier. PlanOptions struct { // PlanWithSchemaQualifier allows setting a custom schema to prefix // tables and other resources. An empty string indicates no qualifier. SchemaQualifier *string } // PlanOption allows configuring a drivers' plan using functional arguments. PlanOption func(*PlanOptions) // StateReader wraps the method for reading a database/schema state. // The types below provides a few builtin options for reading a state // from a migration directory, a static object (e.g. a parsed file). StateReader interface { ReadState(ctx context.Context) (*schema.Realm, error) } // The StateReaderFunc type is an adapter to allow the use of // ordinary functions as state readers. StateReaderFunc func(ctx context.Context) (*schema.Realm, error) ) // ReadState calls f(ctx). func (f StateReaderFunc) ReadState(ctx context.Context) (*schema.Realm, error) { return f(ctx) } // ErrNoPlan is returned by Plan when there is no change between the two states. var ErrNoPlan = errors.New("sql/migrate: no plan for matched states") // Realm returns a StateReader for the static Realm object. func Realm(r *schema.Realm) StateReader { return StateReaderFunc(func(context.Context) (*schema.Realm, error) { return r, nil }) } // Schema returns a StateReader for the static Schema object. func Schema(s *schema.Schema) StateReader { return StateReaderFunc(func(context.Context) (*schema.Realm, error) { r := &schema.Realm{Schemas: []*schema.Schema{s}} if s.Realm != nil { r.Attrs = s.Realm.Attrs } s.Realm = r return r, nil }) } // RealmConn returns a StateReader for a Driver connected to a database. func RealmConn(drv Driver, opts *schema.InspectRealmOption) StateReader { return StateReaderFunc(func(ctx context.Context) (*schema.Realm, error) { return drv.InspectRealm(ctx, opts) }) } // SchemaConn returns a StateReader for a Driver connected to a schema. func SchemaConn(drv Driver, name string, opts *schema.InspectOptions) StateReader { return StateReaderFunc(func(ctx context.Context) (*schema.Realm, error) { s, err := drv.InspectSchema(ctx, name, opts) if err != nil { return nil, err } return Schema(s).ReadState(ctx) }) } type ( // Planner can plan the steps to take to migrate from one state to another. It uses the enclosed Dir to write // those changes to versioned migration files. Planner struct { drv Driver // driver to use dir Dir // where migration files are stored and read from fmt Formatter // how to format a plan to migration files sum bool // whether to create a sum file for the migration directory opts []PlanOption // driver options } // PlannerOption allows managing a Planner using functional arguments. PlannerOption func(*Planner) // A RevisionReadWriter wraps the functionality for reading and writing migration revisions in a database table. RevisionReadWriter interface { // Ident returns an object identifies this history table. Ident() *TableIdent // ReadRevisions returns all revisions. ReadRevisions(context.Context) ([]*Revision, error) // ReadRevision returns a revision by version. // Returns ErrRevisionNotExist if the version does not exist. ReadRevision(context.Context, string) (*Revision, error) // WriteRevision saves the revision to the storage. WriteRevision(context.Context, *Revision) error // DeleteRevision deletes a revision by version from the storage. DeleteRevision(context.Context, string) error } // A Revision denotes an applied migration in a deployment. Used to track migration executions state of a database. Revision struct { // Version of the migration. Version string // Description of this migration. Description string // Type of the migration. Type RevisionType // Applied denotes the amount of successfully applied statements of the revision. Applied int // Total denotes the total amount of statements of the migration. Total int // ExecutedAt denotes when this migration was started to be executed. ExecutedAt time.Time // ExecutionTime denotes the time it took for this migration to be applied on the database. ExecutionTime time.Duration // Error holds information about a migration error (if occurred). // If the error is from the application level, it is prefixed with "Go:\n". // If the error is raised from the database, Error contains both the failed statement and the database error // following the "SQL:\n\n\nError:\n" format. Error string // Hash is the check-sum of this migration as stated by the migration directories HashFile. Hash string // PartialHashes contains one hash per statement that has been applied on the database. PartialHashes []string // OperatorVersion holds a string representation of the Atlas operator managing this database migration. OperatorVersion string } // RevisionType defines the type of the revision record in the history table. RevisionType uint // Executor is responsible to manage and execute a set of migration files against a database. Executor struct { drv Driver // The Driver to access and manage the database. dir Dir // The Dir with migration files to use. rrw RevisionReadWriter // The RevisionReadWriter to read and write database revisions to. log Logger // The Logger to use. fromVer string // Calculate pending files from the given version (including it). baselineVer string // Start the first migration after the given baseline version. allowDirty bool // Allow start working on a non-clean database. operator string // Revision.OperatorVersion } // ExecutorOption allows configuring an Executor using functional arguments. ExecutorOption func(*Executor) error ) const ( // RevisionTypeUnknown represents an unknown revision type. // This type is unexpected and exists here to only ensure // the type is not set to the zero value. RevisionTypeUnknown RevisionType = 0 // RevisionTypeBaseline represents a baseline revision. Note that only // the first record can represent a baseline migration and most of its // fields are set to the zero value. RevisionTypeBaseline RevisionType = 1 << (iota - 1) // RevisionTypeExecute represents a migration that was executed. RevisionTypeExecute // RevisionTypeResolved represents a migration that was resolved. A migration // script that was script executed and then resolved should set its Type to // RevisionTypeExecute | RevisionTypeResolved. RevisionTypeResolved ) // NewPlanner creates a new Planner. func NewPlanner(drv Driver, dir Dir, opts ...PlannerOption) *Planner { p := &Planner{drv: drv, dir: dir, sum: true} for _, opt := range opts { opt(p) } if p.fmt == nil { p.fmt = DefaultFormatter } return p } // PlanWithSchemaQualifier allows setting a custom schema to prefix tables and // other resources. An empty string indicates no prefix. // // Note, this options require the changes to be scoped to one // schema and returns an error otherwise. func PlanWithSchemaQualifier(q string) PlannerOption { return func(p *Planner) { p.opts = append(p.opts, func(o *PlanOptions) { o.SchemaQualifier = &q }) } } // PlanFormat sets the Formatter of a Planner. func PlanFormat(fmt Formatter) PlannerOption { return func(p *Planner) { p.fmt = fmt } } // PlanWithChecksum allows setting if the hash-sum functionality // for the migration directory is enabled or not. func PlanWithChecksum(b bool) PlannerOption { return func(p *Planner) { p.sum = b } } var ( // WithFormatter calls PlanFormat. // Deprecated: use PlanFormat instead. WithFormatter = PlanFormat // DisableChecksum calls PlanWithChecksum(false). // Deprecated: use PlanWithoutChecksum instead. DisableChecksum = func() PlannerOption { return PlanWithChecksum(false) } ) // Plan calculates the migration Plan required for moving the current state (from) state to // the next state (to). A StateReader can be a directory, static schema elements or a Driver connection. func (p *Planner) Plan(ctx context.Context, name string, to StateReader) (*Plan, error) { return p.plan(ctx, name, to, true) } // PlanSchema is like Plan but limits its scope to the schema connection. // Note, the operation fails in case the connection was not set to a schema. func (p *Planner) PlanSchema(ctx context.Context, name string, to StateReader) (*Plan, error) { return p.plan(ctx, name, to, false) } func (p *Planner) plan(ctx context.Context, name string, to StateReader, realmScope bool) (*Plan, error) { from, err := NewExecutor(p.drv, p.dir, NopRevisionReadWriter{}) if err != nil { return nil, err } current, err := from.Replay(ctx, func() StateReader { if realmScope { return RealmConn(p.drv, nil) } // In case the scope is the schema connection, // inspect it and return its connected realm. return SchemaConn(p.drv, "", nil) }()) if err != nil { return nil, err } desired, err := to.ReadState(ctx) if err != nil { return nil, err } var changes []schema.Change switch { case realmScope: changes, err = p.drv.RealmDiff(current, desired) default: switch n, m := len(current.Schemas), len(desired.Schemas); { case n == 0: return nil, errors.New("no schema was found in current state after replaying migration directory") case n > 1: return nil, fmt.Errorf("%d schemas were found in current state after replaying migration directory", len(current.Schemas)) case m == 0: return nil, errors.New("no schema was found in desired state") case m > 1: return nil, fmt.Errorf("%d schemas were found in desired state; expect 1", len(desired.Schemas)) default: s1, s2 := *current.Schemas[0], *desired.Schemas[0] // Avoid comparing schema names when scope is limited to one schema, // and the schema qualifier is controlled by the caller. if s1.Name != s2.Name { s1.Name = s2.Name } changes, err = p.drv.SchemaDiff(&s1, &s2) } } if err != nil { return nil, err } if len(changes) == 0 { return nil, ErrNoPlan } return p.drv.PlanChanges(ctx, name, changes, p.opts...) } // WritePlan writes the given Plan to the Dir based on the configured Formatter. func (p *Planner) WritePlan(plan *Plan) error { // Format the plan into files. files, err := p.fmt.Format(plan) if err != nil { return err } // Store the files in the migration directory. for _, f := range files { if err := p.dir.WriteFile(f.Name(), f.Bytes()); err != nil { return err } } // If enabled, update the sum file. if p.sum { sum, err := p.dir.Checksum() if err != nil { return err } return WriteSumFile(p.dir, sum) } return nil } var ( // ErrNoPendingFiles is returned if there are no pending migration files to execute on the managed database. ErrNoPendingFiles = errors.New("sql/migrate: execute: nothing to do") // ErrSnapshotUnsupported is returned if there is no Snapshoter given. ErrSnapshotUnsupported = errors.New("sql/migrate: driver does not support taking a database snapshot") // ErrCleanCheckerUnsupported is returned if there is no CleanChecker given. ErrCleanCheckerUnsupported = errors.New("sql/migrate: driver does not support checking if database is clean") // ErrRevisionNotExist is returned if the requested revision is not found in the storage. ErrRevisionNotExist = errors.New("sql/migrate: revision not found") ) // MissingMigrationError is returned if a revision is partially applied but // the matching migration file is not found in the migration directory. type MissingMigrationError struct{ Version, Description string } // Error implements error. func (e MissingMigrationError) Error() string { return fmt.Sprintf( "sql/migrate: missing migration: revision %q is partially applied but migration file was not found", fmt.Sprintf("%s_%s.sql", e.Version, e.Description), ) } // NewExecutor creates a new Executor with default values. func NewExecutor(drv Driver, dir Dir, rrw RevisionReadWriter, opts ...ExecutorOption) (*Executor, error) { if drv == nil { return nil, errors.New("sql/migrate: execute: no driver given") } if dir == nil { return nil, errors.New("sql/migrate: execute: no dir given") } if rrw == nil { return nil, errors.New("sql/migrate: execute: no revision storage given") } ex := &Executor{drv: drv, dir: dir, rrw: rrw} for _, opt := range opts { if err := opt(ex); err != nil { return nil, err } } if ex.log == nil { ex.log = NopLogger{} } if _, ok := drv.(Snapshoter); !ok { return nil, ErrSnapshotUnsupported } if _, ok := drv.(CleanChecker); !ok { return nil, ErrCleanCheckerUnsupported } if ex.baselineVer != "" && ex.allowDirty { return nil, errors.New("sql/migrate: execute: baseline and allow-dirty are mutually exclusive") } return ex, nil } // WithAllowDirty defines if we can start working on a non-clean database // in the first migration execution. func WithAllowDirty(b bool) ExecutorOption { return func(ex *Executor) error { ex.allowDirty = b return nil } } // WithBaselineVersion allows setting the baseline version of the database on the // first migration. Hence, all versions up to and including this version are skipped. func WithBaselineVersion(v string) ExecutorOption { return func(ex *Executor) error { ex.baselineVer = v return nil } } // WithLogger sets the Logger of an Executor. func WithLogger(log Logger) ExecutorOption { return func(ex *Executor) error { ex.log = log return nil } } // WithFromVersion allows passing a file version as a starting point for calculating // pending migration scripts. It can be useful for skipping specific files. func WithFromVersion(v string) ExecutorOption { return func(ex *Executor) error { ex.fromVer = v return nil } } // WithOperatorVersion sets the operator version to save on the revisions // when executing migration files. func WithOperatorVersion(v string) ExecutorOption { return func(ex *Executor) error { ex.operator = v return nil } } // Pending returns all pending (not fully applied) migration files in the migration directory. func (e *Executor) Pending(ctx context.Context) ([]File, error) { // Don't operate with a broken migration directory. if err := Validate(e.dir); err != nil { return nil, fmt.Errorf("sql/migrate: execute: validate migration directory: %w", err) } // Read all applied database revisions. revs, err := e.rrw.ReadRevisions(ctx) if err != nil { return nil, fmt.Errorf("sql/migrate: execute: read revisions: %w", err) } // Select the correct migration files. migrations, err := e.dir.Files() if err != nil { return nil, fmt.Errorf("sql/migrate: execute: select migration files: %w", err) } if len(migrations) == 0 { return nil, ErrNoPendingFiles } var pending []File switch { // If it is the first time we run. case len(revs) == 0: var cerr *NotCleanError if err = e.drv.(CleanChecker).CheckClean(ctx, e.rrw.Ident()); err != nil && !errors.As(err, &cerr) { return nil, err } // In case the workspace is not clean one of the flags is required. if cerr != nil && !e.allowDirty && e.baselineVer == "" { return nil, fmt.Errorf("%w. baseline version or allow-dirty is required", cerr) } pending = migrations if e.baselineVer != "" { baseline := FilesLastIndex(migrations, func(f File) bool { return f.Version() == e.baselineVer }) if baseline == -1 { return nil, fmt.Errorf("baseline version %q not found", e.baselineVer) } f := migrations[baseline] // Mark the revision in the database as baseline revision. if err := e.writeRevision(ctx, &Revision{Version: f.Version(), Description: f.Desc(), Type: RevisionTypeBaseline}); err != nil { return nil, err } pending = migrations[baseline+1:] } // Not the first time we execute and a custom starting point was provided. case e.fromVer != "": idx := FilesLastIndex(migrations, func(f File) bool { return f.Version() == e.fromVer }) if idx == -1 { return nil, fmt.Errorf("starting point version %q not found in the migration directory", e.fromVer) } pending = migrations[idx:] default: var ( last = revs[len(revs)-1] partially = last.Applied != last.Total fn = func(f File) bool { return f.Version() <= last.Version } ) if partially { // If the last file is partially applied, we need to find the matching migration file in order to // continue execution at the correct statement. fn = func(f File) bool { return f.Version() == last.Version } } // Consider all migration files having a version < the latest revision version as pending. If the // last revision is partially applied, it is considered pending as well. idx := FilesLastIndex(migrations, fn) if idx == -1 { // If we cannot find the matching migration version for a partially applied migration, // error out since we cannot determine how to proceed from here. if partially { return nil, &MissingMigrationError{last.Version, last.Description} } // All migrations have a higher version than the latest revision. Take every migration file as pending. return migrations, nil } // If this file was not partially applied, take the next one. if last.Applied == last.Total { idx++ } pending = migrations[idx:] } if len(pending) == 0 { return nil, ErrNoPendingFiles } return pending, nil } // Execute executes the given migration file on the database. If it sees a file, that has been partially applied, it // will continue with the next statement in line. func (e *Executor) Execute(ctx context.Context, m File) (err error) { hf, err := e.dir.Checksum() if err != nil { return fmt.Errorf("sql/migrate: execute: compute hash: %w", err) } hash, err := hf.SumByName(m.Name()) if err != nil { return fmt.Errorf("sql/migrate: execute: scanning checksum from %q: %w", m.Name(), err) } stmts, err := m.Stmts() if err != nil { return fmt.Errorf("sql/migrate: execute: scanning statements from %q: %w", m.Name(), err) } // Create checksums for the statements. var ( sums = make([]string, len(stmts)) h = sha256.New() ) for i, stmt := range stmts { if _, err := h.Write([]byte(stmt)); err != nil { return err } sums[i] = base64.StdEncoding.EncodeToString(h.Sum(nil)) } version := m.Version() // If there already is a revision with this version in the database, // and it is partially applied, continue where the last attempt was left off. r, err := e.rrw.ReadRevision(ctx, version) if err != nil && !errors.Is(err, ErrRevisionNotExist) { return fmt.Errorf("sql/migrate: execute: read revision: %w", err) } if errors.Is(err, ErrRevisionNotExist) { // Haven't seen this file before, create a new revision. r = &Revision{ Version: version, Description: m.Desc(), Type: RevisionTypeExecute, Total: len(stmts), Hash: hash, } } // Save once to mark as started in the database. if err = e.writeRevision(ctx, r); err != nil { return err } // Make sure to store the Revision information. defer func(ctx context.Context, e *Executor, r *Revision) { if err2 := e.writeRevision(ctx, r); err2 != nil { err = wrap(err2, err) } }(ctx, e, r) if r.Applied > 0 { // If the file has been applied partially before, check if the // applied statements have not changed. for i := 0; i < r.Applied; i++ { if i > len(sums) || sums[i] != strings.TrimPrefix(r.PartialHashes[i], "h1:") { err = HistoryChangedError{m.Name(), i + 1} e.log.Log(LogError{Error: err}) return err } } } e.log.Log(LogFile{r.Version, r.Description, r.Applied}) for _, stmt := range stmts[r.Applied:] { e.log.Log(LogStmt{stmt}) if _, err = e.drv.ExecContext(ctx, stmt); err != nil { e.log.Log(LogError{Error: err}) r.setSQLErr(stmt, err) return fmt.Errorf("sql/migrate: execute: executing statement %q from version %q: %w", stmt, r.Version, err) } r.PartialHashes = append(r.PartialHashes, "h1:"+sums[r.Applied]) r.Applied++ if err = e.writeRevision(ctx, r); err != nil { return err } } r.done() return } func (e *Executor) writeRevision(ctx context.Context, r *Revision) error { r.ExecutedAt = time.Now() r.OperatorVersion = e.operator if err := e.rrw.WriteRevision(ctx, r); err != nil { return fmt.Errorf("sql/migrate: execute: write revision: %w", err) } return nil } // HistoryChangedError is returned if between two execution attempts already applied statements of a file have changed. type HistoryChangedError struct { File string Stmt int } func (e HistoryChangedError) Error() string { return fmt.Sprintf("sql/migrate: execute: history changed: statement %d from file %q changed", e.Stmt, e.File) } // ExecuteN executes n pending migration files. If n<=0 all pending migration files are executed. func (e *Executor) ExecuteN(ctx context.Context, n int) (err error) { pending, err := e.Pending(ctx) if err != nil { return err } if n > 0 { if n >= len(pending) { n = len(pending) } pending = pending[:n] } revs, err := e.rrw.ReadRevisions(ctx) if err != nil { return fmt.Errorf("sql/migrate: execute: read revisions: %w", err) } if err := LogIntro(e.log, revs, pending); err != nil { return err } for _, m := range pending { if err := e.Execute(ctx, m); err != nil { return err } } e.log.Log(LogDone{}) return err } // Replay the migration directory and invoke the state to get back the inspection result. func (e *Executor) Replay(ctx context.Context, r StateReader) (_ *schema.Realm, err error) { // Clean up after ourselves. restore, err := e.drv.(Snapshoter).Snapshot(ctx) if err != nil { return nil, fmt.Errorf("sql/migrate: taking database snapshot: %w", err) } defer func() { if err2 := restore(ctx); err2 != nil { err = wrap(err2, err) } }() // Replay the migration directory on the database. if err := e.ExecuteN(ctx, 0); err != nil && !errors.Is(err, ErrNoPendingFiles) { return nil, fmt.Errorf("sql/migrate: read migration directory state: %w", err) } return r.ReadState(ctx) } type ( // Snapshoter wraps the Snapshot method. Snapshoter interface { // Snapshot takes a snapshot of the current database state and returns a function that can be called to restore // that state. Snapshot should return an error, if the current state can not be restored completely, e.g. if // there is a table already containing some rows. Snapshot(context.Context) (RestoreFunc, error) } // RestoreFunc is returned by the Snapshoter to explicitly restore the database state. RestoreFunc func(context.Context) error // TableIdent describes a table identifier returned by the revisions table. TableIdent struct { Name string // name of the table. Schema string // optional schema. } // CleanChecker wraps the single CheckClean method. CleanChecker interface { // CheckClean checks if the connected realm or schema does not contain any resources besides the // revision history table. A NotCleanError is returned in case the connection is not-empty. CheckClean(context.Context, *TableIdent) error } // NotCleanError is returned when the connected dev-db is not in a clean state (aka it has schemas and tables). // This check is done to ensure no data is lost by overriding it when working on the dev-db. NotCleanError struct { Reason string // reason why the database is considered not clean } ) func (e NotCleanError) Error() string { return "sql/migrate: connected database is not clean: " + e.Reason } // NopRevisionReadWriter is a RevisionReadWriter that does nothing. // It is useful for one-time replay of the migration directory. type NopRevisionReadWriter struct{} // Ident implements RevisionsReadWriter.TableIdent. func (NopRevisionReadWriter) Ident() *TableIdent { return nil } // ReadRevisions implements RevisionsReadWriter.ReadRevisions. func (NopRevisionReadWriter) ReadRevisions(context.Context) ([]*Revision, error) { return nil, nil } // ReadRevision implements RevisionsReadWriter.ReadRevision. func (NopRevisionReadWriter) ReadRevision(context.Context, string) (*Revision, error) { return nil, ErrRevisionNotExist } // WriteRevision implements RevisionsReadWriter.WriteRevision. func (NopRevisionReadWriter) WriteRevision(context.Context, *Revision) error { return nil } // DeleteRevision implements RevisionsReadWriter.DeleteRevision. func (NopRevisionReadWriter) DeleteRevision(context.Context, string) error { return nil } var _ RevisionReadWriter = (*NopRevisionReadWriter)(nil) // done computes and sets the ExecutionTime. func (r *Revision) done() { r.ExecutionTime = time.Now().Sub(r.ExecutedAt) } func (r *Revision) setSQLErr(stmt string, err error) { r.done() r.Error = fmt.Sprintf("Statement:\n%s\n\nError:\n%s", stmt, err) } type ( // A Logger logs migration execution. Logger interface { Log(LogEntry) } // LogEntry marks several types of logs to be passed to a Logger. LogEntry interface { logEntry() } // LogExecution is sent once when execution of multiple migration files has been started. // It holds the filenames of the pending migration files. LogExecution struct { // From what version. From string // To what version. To string // Migration Files to be executed. Files []string } // LogFile is sent if a new migration file is executed. LogFile struct { // Version executed. Version string // Desc of migration executed. Desc string // Skip holds the number of stmts of this file that will be skipped. // This happens, if a migration file was only applied partially and will now continue to be applied. Skip int } // LogStmt is sent if a new SQL statement is executed. LogStmt struct { SQL string } // LogDone is sent if the execution is done. LogDone struct{} // LogError is sent if there is an error while execution. LogError struct { Error error } // NopLogger is a Logger that does nothing. // It is useful for one-time replay of the migration directory. NopLogger struct{} ) func (LogExecution) logEntry() {} func (LogFile) logEntry() {} func (LogStmt) logEntry() {} func (LogDone) logEntry() {} func (LogError) logEntry() {} // Log implements the Logger interface. func (NopLogger) Log(LogEntry) {} // LogIntro gathers some meta information from the migration files and stored revisions to // log some general information prior to actual execution. func LogIntro(l Logger, revs []*Revision, files []File) error { names := make([]string, len(files)) for i := range files { names[i] = files[i].Name() } last := files[len(files)-1] e := LogExecution{To: last.Version(), Files: names} if len(revs) > 0 { e.From = revs[len(revs)-1].Version } l.Log(e) return nil } func wrap(err1, err2 error) error { if err2 != nil { return fmt.Errorf("sql/migrate: %w: %v", err2, err1) } return err1 } atlas-0.7.2/sql/migrate/migrate_test.go000066400000000000000000000506401431455511600201030ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package migrate_test import ( "context" "database/sql" _ "embed" "errors" "io/fs" "path/filepath" "testing" "text/template" "time" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestPlanner_WritePlan(t *testing.T) { p := t.TempDir() d, err := migrate.NewLocalDir(p) require.NoError(t, err) plan := &migrate.Plan{ Name: "add_t1_and_t2", Changes: []*migrate.Change{ {Cmd: "CREATE TABLE t1(c int)", Reverse: "DROP TABLE t1 IF EXISTS"}, {Cmd: "CREATE TABLE t2(c int)", Reverse: "DROP TABLE t2"}, }, } // DefaultFormatter pl := migrate.NewPlanner(nil, d, migrate.PlanWithChecksum(false)) require.NotNil(t, pl) require.NoError(t, pl.WritePlan(plan)) v := time.Now().UTC().Format("20060102150405") require.Equal(t, countFiles(t, d), 1) requireFileEqual(t, d, v+"_add_t1_and_t2.sql", "CREATE TABLE t1(c int);\nCREATE TABLE t2(c int);\n") // Custom formatter (creates "up" and "down" migration files). fmt, err := migrate.NewTemplateFormatter( template.Must(template.New("").Parse("{{ .Name }}.up.sql")), template.Must(template.New("").Parse("{{ range .Changes }}{{ println .Cmd }}{{ end }}")), template.Must(template.New("").Parse("{{ .Name }}.down.sql")), template.Must(template.New("").Parse("{{ range .Changes }}{{ println .Reverse }}{{ end }}")), ) require.NoError(t, err) pl = migrate.NewPlanner(nil, d, migrate.PlanFormat(fmt), migrate.PlanWithChecksum(false)) require.NotNil(t, pl) require.NoError(t, pl.WritePlan(plan)) require.Equal(t, countFiles(t, d), 3) requireFileEqual(t, d, "add_t1_and_t2.up.sql", "CREATE TABLE t1(c int)\nCREATE TABLE t2(c int)\n") requireFileEqual(t, d, "add_t1_and_t2.down.sql", "DROP TABLE t1 IF EXISTS\nDROP TABLE t2\n") } func TestPlanner_Plan(t *testing.T) { var ( drv = &mockDriver{} ctx = context.Background() ) d, err := migrate.NewLocalDir(t.TempDir()) require.NoError(t, err) // nothing to do pl := migrate.NewPlanner(drv, d) plan, err := pl.Plan(ctx, "empty", migrate.Realm(nil)) require.ErrorIs(t, err, migrate.ErrNoPlan) require.Nil(t, plan) // there are changes drv.changes = []schema.Change{ &schema.AddTable{T: schema.NewTable("t1").AddColumns(schema.NewIntColumn("c", "int"))}, &schema.AddTable{T: schema.NewTable("t2").AddColumns(schema.NewIntColumn("c", "int"))}, } drv.plan = &migrate.Plan{ Changes: []*migrate.Change{ {Cmd: "CREATE TABLE t1(c int);"}, {Cmd: "CREATE TABLE t2(c int);"}, }, } plan, err = pl.Plan(ctx, "", migrate.Realm(nil)) require.NoError(t, err) require.Equal(t, drv.plan, plan) } func TestPlanner_PlanSchema(t *testing.T) { var ( drv = &mockDriver{} ctx = context.Background() ) d, err := migrate.NewLocalDir(t.TempDir()) require.NoError(t, err) // Schema is missing in dev connection. pl := migrate.NewPlanner(drv, d) plan, err := pl.PlanSchema(ctx, "empty", migrate.Realm(nil)) require.EqualError(t, err, `not found`) require.Nil(t, plan) drv.realm = *schema.NewRealm(schema.New("test")) pl = migrate.NewPlanner(drv, d) plan, err = pl.PlanSchema(ctx, "empty", migrate.Realm(schema.NewRealm())) require.EqualError(t, err, `no schema was found in desired state`) require.Nil(t, plan) drv.realm = *schema.NewRealm(schema.New("test")) pl = migrate.NewPlanner(drv, d) plan, err = pl.PlanSchema(ctx, "empty", migrate.Realm(schema.NewRealm(schema.New("test"), schema.New("dev")))) require.EqualError(t, err, `2 schemas were found in desired state; expect 1`) require.Nil(t, plan) drv.realm = *schema.NewRealm(schema.New("test")) pl = migrate.NewPlanner(drv, d) plan, err = pl.PlanSchema(ctx, "multi", migrate.Realm(schema.NewRealm(schema.New("test")))) require.ErrorIs(t, err, migrate.ErrNoPlan) require.Nil(t, plan) } func TestExecutor_Replay(t *testing.T) { ctx := context.Background() d, err := migrate.NewLocalDir("testdata/migrate") require.NoError(t, err) drv := &mockDriver{} ex, err := migrate.NewExecutor(drv, d, migrate.NopRevisionReadWriter{}) require.NoError(t, err) _, err = ex.Replay(ctx, migrate.RealmConn(drv, nil)) require.NoError(t, err) require.Equal(t, []string{"DROP TABLE IF EXISTS t;", "CREATE TABLE t(c int);"}, drv.executed) // Does not work if database is not clean. drv.dirty = true drv.realm = schema.Realm{Schemas: []*schema.Schema{{Name: "schema"}}} _, err = ex.Replay(ctx, migrate.RealmConn(drv, nil)) require.ErrorAs(t, err, &migrate.NotCleanError{}) } func TestExecutor_Pending(t *testing.T) { var ( drv = &mockDriver{} rrw = &mockRevisionReadWriter{} log = &mockLogger{} rev1 = &migrate.Revision{ Version: "1.a", Description: "sub.up", Applied: 2, Total: 2, Hash: "nXyZR020M/mH7LxkoTkJr7BcQkipVg90imQ9I4595dw=", } rev2 = &migrate.Revision{ Version: "2.10.x-20", Description: "description", Applied: 1, Total: 1, Hash: "wQB3Vh3PHVXQg9OD3Gn7TBxbZN3r1Qb7TtAE1g3q9mQ=", } rev3 = &migrate.Revision{ Version: "3", Description: "partly", Applied: 1, Total: 2, Error: "this is an migration error", Hash: "+O40cAXHgvMClnynHd5wggPAeZAk7zSEaNgzXCZOfmY=", } ) dir, err := migrate.NewLocalDir(filepath.Join("testdata/migrate", "sub")) require.NoError(t, err) ex, err := migrate.NewExecutor(drv, dir, rrw, migrate.WithLogger(log)) require.NoError(t, err) // All are pending p, err := ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 3) // 2 are pending. *rrw = []*migrate.Revision{rev1} p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 2) // Only the last one is pending (in full). *rrw = []*migrate.Revision{rev1, rev2} p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 1) // First statement of last one is marked as applied, second isn't. Third file is still pending. *rrw = []*migrate.Revision{rev1, rev2, rev3} p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 1) // Nothing to do if all migrations are applied. rev3.Applied = rev3.Total *rrw = []*migrate.Revision{rev1, rev2, rev3} p, err = ex.Pending(context.Background()) require.ErrorIs(t, err, migrate.ErrNoPendingFiles) require.Len(t, p, 0) // If there is a revision in the past with no existing migration, we don't care. rev3.Applied = rev3.Total *rrw = []*migrate.Revision{{Version: "2.11"}, rev3} p, err = ex.Pending(context.Background()) require.ErrorIs(t, err, migrate.ErrNoPendingFiles) require.Len(t, p, 0) // If only the last migration file is applied, we expect there are no pending files. *rrw = []*migrate.Revision{rev3} p, err = ex.Pending(context.Background()) require.ErrorIs(t, err, migrate.ErrNoPendingFiles) require.Len(t, p, 0) // If the last applied revision has no matching migration file, and the last // migration version precedes that revision, there is nothing to do. *rrw = []*migrate.Revision{{Version: "5"}} p, err = ex.Pending(context.Background()) require.ErrorIs(t, err, migrate.ErrNoPendingFiles) require.Len(t, p, 0) // The applied revision precedes every migration file. Expect all files pending. *rrw = []*migrate.Revision{{Version: "1.1"}} p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 3) // All except one file are applied. The latest revision does not exist and its version is smaller than the last // migration file. Expect the last file pending. *rrw = []*migrate.Revision{rev1, rev2, {Version: "2.11"}} p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 1) require.Equal(t, rev3.Version, p[0].Version()) // If the last revision is partially applied and the matching migration file does not exist, we have a problem. *rrw = []*migrate.Revision{{Version: "deleted", Description: "desc", Total: 1}} p, err = ex.Pending(context.Background()) require.EqualError(t, err, migrate.MissingMigrationError{Version: "deleted", Description: "desc"}.Error()) require.Len(t, p, 0) } func TestExecutor(t *testing.T) { // Passing nil raises error. ex, err := migrate.NewExecutor(nil, nil, nil) require.EqualError(t, err, "sql/migrate: execute: no driver given") require.Nil(t, ex) ex, err = migrate.NewExecutor(&mockDriver{}, nil, nil) require.EqualError(t, err, "sql/migrate: execute: no dir given") require.Nil(t, ex) dir, err := migrate.NewLocalDir(t.TempDir()) require.NoError(t, err) ex, err = migrate.NewExecutor(&mockDriver{}, dir, nil) require.EqualError(t, err, "sql/migrate: execute: no revision storage given") require.Nil(t, ex) // Does not operate on invalid migration dir. dir, err = migrate.NewLocalDir(t.TempDir()) require.NoError(t, err) require.NoError(t, dir.WriteFile("atlas.sum", hash)) ex, err = migrate.NewExecutor(&mockDriver{}, dir, &mockRevisionReadWriter{}, migrate.WithOperatorVersion("op")) require.NoError(t, err) require.NotNil(t, ex) require.ErrorIs(t, ex.ExecuteN(context.Background(), 0), migrate.ErrChecksumMismatch) // Prerequisites. var ( drv = &mockDriver{} rrw = &mockRevisionReadWriter{} log = &mockLogger{} rev1 = &migrate.Revision{ Version: "1.a", Description: "sub.up", Type: migrate.RevisionTypeExecute, Applied: 2, Total: 2, Hash: "nXyZR020M/mH7LxkoTkJr7BcQkipVg90imQ9I4595dw=", OperatorVersion: "op", } rev2 = &migrate.Revision{ Version: "2.10.x-20", Description: "description", Type: migrate.RevisionTypeExecute, Applied: 1, Total: 1, Hash: "wQB3Vh3PHVXQg9OD3Gn7TBxbZN3r1Qb7TtAE1g3q9mQ=", OperatorVersion: "op", } ) dir, err = migrate.NewLocalDir(filepath.Join("testdata/migrate", "sub")) require.NoError(t, err) ex, err = migrate.NewExecutor(drv, dir, rrw, migrate.WithLogger(log), migrate.WithOperatorVersion("op")) require.NoError(t, err) // Applies two of them. require.NoError(t, ex.ExecuteN(context.Background(), 2)) require.Equal(t, drv.executed, []string{ "CREATE TABLE t_sub(c int);", "ALTER TABLE t_sub ADD c1 int;", "ALTER TABLE t_sub ADD c2 int;", }) requireEqualRevisions(t, []*migrate.Revision{rev1, rev2}, *rrw) require.Equal(t, []migrate.LogEntry{ migrate.LogExecution{To: "2.10.x-20", Files: []string{"1.a_sub.up.sql", "2.10.x-20_description.sql"}}, migrate.LogFile{Version: "1.a", Desc: "sub.up"}, migrate.LogStmt{SQL: "CREATE TABLE t_sub(c int);"}, migrate.LogStmt{SQL: "ALTER TABLE t_sub ADD c1 int;"}, migrate.LogFile{Version: "2.10.x-20", Desc: "description"}, migrate.LogStmt{SQL: "ALTER TABLE t_sub ADD c2 int;"}, migrate.LogDone{}, }, []migrate.LogEntry(*log)) // Partly is pending. p, err := ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 1) require.Equal(t, "3_partly.sql", p[0].Name()) // Apply one by one. *rrw = mockRevisionReadWriter{} *drv = mockDriver{} require.NoError(t, ex.ExecuteN(context.Background(), 1)) require.Equal(t, []string{"CREATE TABLE t_sub(c int);", "ALTER TABLE t_sub ADD c1 int;"}, drv.executed) requireEqualRevisions(t, []*migrate.Revision{rev1}, *rrw) require.NoError(t, ex.ExecuteN(context.Background(), 1)) require.Equal(t, []string{ "CREATE TABLE t_sub(c int);", "ALTER TABLE t_sub ADD c1 int;", "ALTER TABLE t_sub ADD c2 int;", }, drv.executed) requireEqualRevisions(t, []*migrate.Revision{rev1, rev2}, *rrw) // Partly is pending. p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 1) require.Equal(t, "3_partly.sql", p[0].Name()) // Suppose first revision is already executed, only execute second migration file. *rrw = []*migrate.Revision{rev1} *drv = mockDriver{} require.NoError(t, ex.ExecuteN(context.Background(), 1)) require.Equal(t, []string{"ALTER TABLE t_sub ADD c2 int;"}, drv.executed) requireEqualRevisions(t, []*migrate.Revision{rev1, rev2}, *rrw) // Partly is pending. p, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, p, 1) require.Equal(t, "3_partly.sql", p[0].Name()) // Failing, counter will be correct. *rrw = []*migrate.Revision{rev1, rev2} *drv = mockDriver{} drv.failOn(2, errors.New("this is an error")) require.ErrorContains(t, ex.ExecuteN(context.Background(), 1), "this is an error") revs, err := rrw.ReadRevisions(context.Background()) require.NoError(t, err) requireEqualRevision(t, &migrate.Revision{ Version: "3", Description: "partly", Type: migrate.RevisionTypeExecute, Applied: 1, Total: 2, Error: "Statement:\nALTER TABLE t_sub ADD c4 int;\n\nError:\nthis is an error", OperatorVersion: "op", }, revs[len(revs)-1]) // Will fail if applied contents hash has changed (like when editing a partially applied file to fix an error). h := revs[len(revs)-1].PartialHashes[0] revs[len(revs)-1].PartialHashes[0] += h require.ErrorAs(t, ex.ExecuteN(context.Background(), 1), &migrate.HistoryChangedError{}) // Re-attempting to migrate will pick up where the execution was left off. revs[len(revs)-1].PartialHashes[0] = h *drv = mockDriver{} require.NoError(t, ex.ExecuteN(context.Background(), 1)) require.Equal(t, []string{"ALTER TABLE t_sub ADD c4 int;"}, drv.executed) // Everything is applied. require.ErrorIs(t, ex.ExecuteN(context.Background(), 0), migrate.ErrNoPendingFiles) } func TestExecutor_Baseline(t *testing.T) { var ( rrw mockRevisionReadWriter drv = &mockDriver{dirty: true} log = &mockLogger{} ) dir, err := migrate.NewLocalDir(filepath.Join("testdata/migrate", "sub")) require.NoError(t, err) ex, err := migrate.NewExecutor(drv, dir, &rrw, migrate.WithLogger(log)) require.NoError(t, err) // Require baseline-version or explicit flag to work on a dirty workspace. files, err := ex.Pending(context.Background()) require.EqualError(t, err, "sql/migrate: connected database is not clean: found table. baseline version or allow-dirty is required") require.Nil(t, files) rrw = mockRevisionReadWriter{} ex, err = migrate.NewExecutor(drv, dir, &rrw, migrate.WithLogger(log), migrate.WithAllowDirty(true)) require.NoError(t, err) files, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, files, 3) rrw = mockRevisionReadWriter{} ex, err = migrate.NewExecutor(drv, dir, &rrw, migrate.WithLogger(log), migrate.WithBaselineVersion("2.10.x-20")) require.NoError(t, err) files, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, files, 1) require.Len(t, rrw, 1) require.Equal(t, "2.10.x-20", rrw[0].Version) require.Equal(t, "description", rrw[0].Description) require.Equal(t, migrate.RevisionTypeBaseline, rrw[0].Type) rrw = mockRevisionReadWriter{} ex, err = migrate.NewExecutor(drv, dir, &rrw, migrate.WithLogger(log), migrate.WithBaselineVersion("3")) require.NoError(t, err) files, err = ex.Pending(context.Background()) require.ErrorIs(t, err, migrate.ErrNoPendingFiles) require.Len(t, rrw, 1) require.Equal(t, "3", rrw[0].Version) require.Equal(t, "partly", rrw[0].Description) require.Equal(t, migrate.RevisionTypeBaseline, rrw[0].Type) } func TestExecutor_FromVersion(t *testing.T) { var ( drv = &mockDriver{} log = &mockLogger{} rrw = &mockRevisionReadWriter{ { Version: "1.a", Description: "sub.up", Applied: 2, Total: 2, Hash: "nXyZR020M/mH7LxkoTkJr7BcQkipVg90imQ9I4595dw=", }, } ) dir, err := migrate.NewLocalDir(filepath.Join("testdata/migrate", "sub")) require.NoError(t, err) ex, err := migrate.NewExecutor(drv, dir, rrw, migrate.WithLogger(log)) require.NoError(t, err) files, err := ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, files, 2) // Control the starting point. ex, err = migrate.NewExecutor(drv, dir, rrw, migrate.WithLogger(log), migrate.WithFromVersion("3")) require.NoError(t, err) files, err = ex.Pending(context.Background()) require.NoError(t, err) require.Len(t, files, 1) // Starting point was not found. ex, err = migrate.NewExecutor(drv, dir, rrw, migrate.WithLogger(log), migrate.WithFromVersion("4")) require.NoError(t, err) files, err = ex.Pending(context.Background()) require.EqualError(t, err, `starting point version "4" not found in the migration directory`) require.Nil(t, files) } type ( mockDriver struct { migrate.Driver plan *migrate.Plan changes []schema.Change applied []schema.Change realm schema.Realm executed []string failCounter int failWith error dirty bool } ) // the nth call to ExecContext will fail with the given error. func (m *mockDriver) failOn(n int, err error) { m.failCounter = n m.failWith = err } func (m *mockDriver) ExecContext(_ context.Context, query string, _ ...any) (sql.Result, error) { if m.failCounter > 0 { m.failCounter-- if m.failCounter == 0 { return nil, m.failWith } } m.executed = append(m.executed, query) return nil, nil } func (m *mockDriver) InspectSchema(context.Context, string, *schema.InspectOptions) (*schema.Schema, error) { if len(m.realm.Schemas) == 0 { return nil, schema.NotExistError{Err: errors.New("not found")} } return m.realm.Schemas[0], nil } func (m *mockDriver) InspectRealm(context.Context, *schema.InspectRealmOption) (*schema.Realm, error) { return &m.realm, nil } func (m *mockDriver) SchemaDiff(_, _ *schema.Schema) ([]schema.Change, error) { return m.changes, nil } func (m *mockDriver) RealmDiff(_, _ *schema.Realm) ([]schema.Change, error) { return m.changes, nil } func (m *mockDriver) PlanChanges(context.Context, string, []schema.Change, ...migrate.PlanOption) (*migrate.Plan, error) { return m.plan, nil } func (m *mockDriver) ApplyChanges(_ context.Context, changes []schema.Change, _ ...migrate.PlanOption) error { m.applied = changes return nil } func (m *mockDriver) Snapshot(context.Context) (migrate.RestoreFunc, error) { if m.dirty { return nil, migrate.NotCleanError{} } realm := m.realm return func(context.Context) error { m.realm = realm return nil }, nil } func (m *mockDriver) CheckClean(context.Context, *migrate.TableIdent) error { if m.dirty { return &migrate.NotCleanError{Reason: "found table"} } return nil } type mockRevisionReadWriter []*migrate.Revision func (*mockRevisionReadWriter) Ident() *migrate.TableIdent { return nil } func (*mockRevisionReadWriter) Exists(_ context.Context) (bool, error) { return true, nil } func (*mockRevisionReadWriter) Init(_ context.Context) error { return nil } func (rrw *mockRevisionReadWriter) WriteRevision(_ context.Context, r *migrate.Revision) error { for i, rev := range *rrw { if rev.Version == r.Version { (*rrw)[i] = r return nil } } *rrw = append(*rrw, r) return nil } func (rrw *mockRevisionReadWriter) ReadRevision(_ context.Context, v string) (*migrate.Revision, error) { for _, r := range *rrw { if r.Version == v { return r, nil } } return nil, migrate.ErrRevisionNotExist } func (rrw *mockRevisionReadWriter) DeleteRevision(_ context.Context, v string) error { i := -1 for j, r := range *rrw { if r.Version == v { i = j break } } if i == -1 { return nil } copy((*rrw)[i:], (*rrw)[i+1:]) *rrw = (*rrw)[:len(*rrw)-1] return nil } func (rrw *mockRevisionReadWriter) ReadRevisions(context.Context) ([]*migrate.Revision, error) { return *rrw, nil } func (rrw *mockRevisionReadWriter) clean() { *rrw = []*migrate.Revision{} } type mockLogger []migrate.LogEntry func (m *mockLogger) Log(e migrate.LogEntry) { *m = append(*m, e) } func requireEqualRevisions(t *testing.T, expected, actual []*migrate.Revision) { require.Equal(t, len(expected), len(actual)) for i := range expected { requireEqualRevision(t, expected[i], actual[i]) } } func requireEqualRevision(t *testing.T, expected, actual *migrate.Revision) { require.Equal(t, expected.Version, actual.Version) require.Equal(t, expected.Description, actual.Description) require.Equal(t, expected.Type, actual.Type) require.Equal(t, expected.Applied, actual.Applied) require.Equal(t, expected.Total, actual.Total) require.Equal(t, expected.Error, actual.Error) if expected.Hash != "" { require.Equal(t, expected.Hash, actual.Hash) } require.Equal(t, expected.OperatorVersion, actual.OperatorVersion) } func countFiles(t *testing.T, d migrate.Dir) int { files, err := fs.ReadDir(d, "") require.NoError(t, err) return len(files) } func requireFileEqual(t *testing.T, d migrate.Dir, name, contents string) { c, err := fs.ReadFile(d, name) require.NoError(t, err) require.Equal(t, contents, string(c)) } atlas-0.7.2/sql/migrate/testdata/000077500000000000000000000000001431455511600166715ustar00rootroot00000000000000atlas-0.7.2/sql/migrate/testdata/lex/000077500000000000000000000000001431455511600174615ustar00rootroot00000000000000atlas-0.7.2/sql/migrate/testdata/lex/1.sql000066400000000000000000000004461431455511600203460ustar00rootroot00000000000000CREATE TABLE t1(id int); CREATE TABLE t2(id int); CREATE TABLE t3(id int); CREATE TABLE t4( id int, name varchar(255) ); CREATE TABLE t4( id int, `name` varchar(255) DEFAULT ';' ) ENGINE=InnoDB; CREATE TABLE t5( id int /* comment */ -- comment ) ENGINE=InnoDB;atlas-0.7.2/sql/migrate/testdata/lex/1.sql.golden000066400000000000000000000005221431455511600216100ustar00rootroot00000000000000CREATE TABLE t1(id int); -- end -- CREATE TABLE t2(id int); -- end -- CREATE TABLE t3(id int); -- end -- CREATE TABLE t4( id int, name varchar(255) ); -- end -- CREATE TABLE t4( id int, `name` varchar(255) DEFAULT ';' ) ENGINE=InnoDB; -- end -- CREATE TABLE t5( id int /* comment */ -- comment ) ENGINE=InnoDB;atlas-0.7.2/sql/migrate/testdata/lex/10_delimiter_comment.sql000066400000000000000000000002521431455511600242010ustar00rootroot00000000000000-- atlas:delimiter -- end CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; -- end CALL dorepeat(1000); atlas-0.7.2/sql/migrate/testdata/lex/10_delimiter_comment.sql.golden000066400000000000000000000002211431455511600254440ustar00rootroot00000000000000CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; -- end -- CALL dorepeat(1000);atlas-0.7.2/sql/migrate/testdata/lex/11_delimiter_mysql_command.sql000066400000000000000000000004301431455511600254010ustar00rootroot00000000000000-- An example for supporting MySQL client delimiters. DELIMITER $$ CREATE OR REPLACE FUNCTION gen_uuid() RETURNS VARCHAR(22) BEGIN RETURN concat( date_format(NOW(6), '%Y%m%d%i%s%f'), ROUND(1 + RAND() * (100 - 2)) ); END;$$ DELIMITER ; CALL gen_uuid(); atlas-0.7.2/sql/migrate/testdata/lex/11_delimiter_mysql_command.sql.golden000066400000000000000000000003141431455511600266510ustar00rootroot00000000000000CREATE OR REPLACE FUNCTION gen_uuid() RETURNS VARCHAR(22) BEGIN RETURN concat( date_format(NOW(6), '%Y%m%d%i%s%f'), ROUND(1 + RAND() * (100 - 2)) ); END; -- end -- CALL gen_uuid();atlas-0.7.2/sql/migrate/testdata/lex/12_delimiter_mysql_command.sql000066400000000000000000000002131431455511600254010ustar00rootroot00000000000000delimiter // create table t2 (a int) // delimiter ; delimiter // create table t3 (a int) // delimiter ; show tables; drop table t2, t3; atlas-0.7.2/sql/migrate/testdata/lex/12_delimiter_mysql_command.sql.golden000066400000000000000000000001551431455511600266550ustar00rootroot00000000000000create table t2 (a int) -- end -- create table t3 (a int) -- end -- show tables; -- end -- drop table t2, t3;atlas-0.7.2/sql/migrate/testdata/lex/13_delimiter_mysql_command.sql000066400000000000000000000020041431455511600254020ustar00rootroot00000000000000# Test delimiter : select "Test delimiter :" as " "; delimiter : select * from t1: /* Delimiter commands can have comments */ delimiter ; select 'End test :'; /* Test delimiter :; */ select "Test delimiter :;" as " "; delimiter :; select * from t1 :; delimiter ; select 'End test :;'; -- Test delimiter // select "Test delimiter //" as " "; delimiter // select * from t1// delimiter ; select 'End test //'; # Test delimiter 'MySQL' select "Test delimiter MySQL" as " "; delimiter 'MySQL' select * from t1MySQL delimiter ; select 'End test MySQL'; # Test delimiter 'delimiter' select "Test delimiter delimiter" as " "; delimiter delimiter select * from t1delimiter delimiter ; select 'End test delimiter'; # Test delimiter @@ select "Test delimiter @@" as " "; delimiter @@ select * from t1 @@ select * from t2@@ alter table t add column c@@ delimiter ; select 'End test @@'; # Test delimiter \n\n select "Test delimiter \n\n" as " "; delimiter \n\n select * from t1 select * from t2 delimiter ; select 'End test \\n\\n';atlas-0.7.2/sql/migrate/testdata/lex/13_delimiter_mysql_command.sql.golden000066400000000000000000000015001431455511600266510ustar00rootroot00000000000000select "Test delimiter :" as " "; -- end -- select * from t1 -- end -- select 'End test :'; -- end -- select "Test delimiter :;" as " "; -- end -- select * from t1 -- end -- select 'End test :;'; -- end -- select "Test delimiter //" as " "; -- end -- select * from t1 -- end -- select 'End test //'; -- end -- select "Test delimiter MySQL" as " "; -- end -- select * from t1 -- end -- select 'End test MySQL'; -- end -- select "Test delimiter delimiter" as " "; -- end -- select * from t1 -- end -- select 'End test delimiter'; -- end -- select "Test delimiter @@" as " "; -- end -- select * from t1 -- end -- select * from t2 -- end -- alter table t add column c -- end -- select 'End test @@'; -- end -- select "Test delimiter \n\n" as " "; -- end -- select * from t1 -- end -- select * from t2 -- end -- select 'End test \\n\\n';atlas-0.7.2/sql/migrate/testdata/lex/14_delimiter_mysql_command.sql000066400000000000000000000002441431455511600254070ustar00rootroot00000000000000DELIMITER // CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END // DELIMITER ; CALL dorepeat(100)atlas-0.7.2/sql/migrate/testdata/lex/14_delimiter_mysql_command.sql.golden000066400000000000000000000002221431455511600266520ustar00rootroot00000000000000CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END -- end -- CALL dorepeat(100)atlas-0.7.2/sql/migrate/testdata/lex/2_mysql.sql000066400000000000000000000033121431455511600215670ustar00rootroot00000000000000create table t1 (b char(0)); create table t1 (b char(0) not null); create table if not exists t1 (b char(0) not null); create table t2 engine=heap select * from t1; create table t1 (ordid int(8) not null auto_increment, ord varchar(50) not null, primary key (ord,ordid)) engine=heap; create table mysqltest.$test1 (a$1 int, $b int, c$ int); create table t2 (b int) select a as b, a+1 as b from t1; create table t1 select if('2002'='2002','Y','N'); create table t1 ( k1 varchar(2), k2 int, primary key(k1,k2)); insert into t1 values ("a", 1), ("b", 2); create table t2 select a, ifnull(b,cast(-7 as signed)) as b, ifnull(c,cast(7 as unsigned)) as c, ifnull(d,cast('2000-01-01' as date)) as d, ifnull(e,cast('b' as char)) as e, ifnull(f,cast('2000-01-01' as datetime)) as f, ifnull(g,cast('5:4:3' as time)) as g, ifnull(h,cast('yet another binary data' as binary)) as h, addtime(cast('1:0:0' as time),cast('1:0:0' as time)) as dd from t1; CREATE TABLE t1(id varchar(10) NOT NULL PRIMARY KEY, dsc longtext); INSERT INTO t1 VALUES ('5000000001', NULL),('5000000003', 'Test'),('5000000004', NULL); create table t1 ( a varchar(112) charset utf8 collate utf8_bin not null, primary key (a) ) select 'test' as a ; create table טבלה_של_אריאל ( כמות int ); CREATE TABLE t1( c1 INT DEFAULT 12 COMMENT 'column1', c2 INT NULL COMMENT 'column2', c3 INT NOT NULL COMMENT 'column3', c4 VARCHAR(255) CHARACTER SET utf8 NOT NULL DEFAULT 'a', c5 VARCHAR(255) COLLATE utf8_unicode_ci NULL DEFAULT 'b', c6 VARCHAR(255)) COLLATE latin1_bin;atlas-0.7.2/sql/migrate/testdata/lex/2_mysql.sql.golden000066400000000000000000000035351431455511600230450ustar00rootroot00000000000000create table t1 (b char(0)); -- end -- create table t1 (b char(0) not null); -- end -- create table if not exists t1 (b char(0) not null); -- end -- create table t2 engine=heap select * from t1; -- end -- create table t1 (ordid int(8) not null auto_increment, ord varchar(50) not null, primary key (ord,ordid)) engine=heap; -- end -- create table mysqltest.$test1 (a$1 int, $b int, c$ int); -- end -- create table t2 (b int) select a as b, a+1 as b from t1; -- end -- create table t1 select if('2002'='2002','Y','N'); -- end -- create table t1 ( k1 varchar(2), k2 int, primary key(k1,k2)); -- end -- insert into t1 values ("a", 1), ("b", 2); -- end -- create table t2 select a, ifnull(b,cast(-7 as signed)) as b, ifnull(c,cast(7 as unsigned)) as c, ifnull(d,cast('2000-01-01' as date)) as d, ifnull(e,cast('b' as char)) as e, ifnull(f,cast('2000-01-01' as datetime)) as f, ifnull(g,cast('5:4:3' as time)) as g, ifnull(h,cast('yet another binary data' as binary)) as h, addtime(cast('1:0:0' as time),cast('1:0:0' as time)) as dd from t1; -- end -- CREATE TABLE t1(id varchar(10) NOT NULL PRIMARY KEY, dsc longtext); -- end -- INSERT INTO t1 VALUES ('5000000001', NULL),('5000000003', 'Test'),('5000000004', NULL); -- end -- create table t1 ( a varchar(112) charset utf8 collate utf8_bin not null, primary key (a) ) select 'test' as a ; -- end -- create table טבלה_של_אריאל ( כמות int ); -- end -- CREATE TABLE t1( c1 INT DEFAULT 12 COMMENT 'column1', c2 INT NULL COMMENT 'column2', c3 INT NOT NULL COMMENT 'column3', c4 VARCHAR(255) CHARACTER SET utf8 NOT NULL DEFAULT 'a', c5 VARCHAR(255) COLLATE utf8_unicode_ci NULL DEFAULT 'b', c6 VARCHAR(255)) COLLATE latin1_bin;atlas-0.7.2/sql/migrate/testdata/lex/3_delimiter.sql000066400000000000000000000001151431455511600223770ustar00rootroot00000000000000-- atlas:delimiter \n\n CREATE INDEX i1 ON t1(c1) CREATE INDEX i2 ON t2(c2)atlas-0.7.2/sql/migrate/testdata/lex/3_delimiter.sql.golden000066400000000000000000000000751431455511600236530ustar00rootroot00000000000000CREATE INDEX i1 ON t1(c1) -- end -- CREATE INDEX i2 ON t2(c2)atlas-0.7.2/sql/migrate/testdata/lex/4_delimiter.sql000066400000000000000000000001231431455511600223770ustar00rootroot00000000000000-- atlas:delimiter \n---\n CREATE INDEX i1 ON t1(c1) --- CREATE INDEX i2 ON t2(c2)atlas-0.7.2/sql/migrate/testdata/lex/4_delimiter.sql.golden000066400000000000000000000000751431455511600236540ustar00rootroot00000000000000CREATE INDEX i1 ON t1(c1) -- end -- CREATE INDEX i2 ON t2(c2)atlas-0.7.2/sql/migrate/testdata/lex/5_delimiter.sql000066400000000000000000000006431431455511600224070ustar00rootroot00000000000000-- atlas:delimiter \n-- end --\n CREATE DEFINER='boring' PROCEDURE proc () COMMENT 'ATLAS_DELIMITER' SQL SECURITY INVOKER NOT DETERMINISTIC MODIFIES SQL DATA BEGIN UPDATE performance_schema.threads SET instrumented = 'YES' WHERE type = 'BACKGROUND'; SELECT CONCAT('Enabled ', @rows := ROW_COUNT(), ' background thread', IF(@rows != 1, 's', '')) AS summary; END -- end -- CALL proc();atlas-0.7.2/sql/migrate/testdata/lex/5_delimiter.sql.golden000066400000000000000000000005771431455511600236640ustar00rootroot00000000000000CREATE DEFINER='boring' PROCEDURE proc () COMMENT 'ATLAS_DELIMITER' SQL SECURITY INVOKER NOT DETERMINISTIC MODIFIES SQL DATA BEGIN UPDATE performance_schema.threads SET instrumented = 'YES' WHERE type = 'BACKGROUND'; SELECT CONCAT('Enabled ', @rows := ROW_COUNT(), ' background thread', IF(@rows != 1, 's', '')) AS summary; END -- end -- CALL proc();atlas-0.7.2/sql/migrate/testdata/lex/6_skip_comment.sql000066400000000000000000000006031431455511600231160ustar00rootroot00000000000000-- comment 1 CREATE TABLE t1(id int); # CREATE TABLE t1(id int); -- comment 2 # CREATE TABLE t2(id int); CREATE TABLE t2(id int); -- comment 3 CREATE TABLE t3(id int); # CREATE TABLE t3(id int); /* comment 4 */ CREATE TABLE t4( id int /* comment */ -- comment ) ENGINE=InnoDB; /* comment 5 */ CREATE TABLE t5( id int /* comment */ -- comment ) ENGINE=InnoDB; atlas-0.7.2/sql/migrate/testdata/lex/6_skip_comment.sql.golden000066400000000000000000000004161431455511600243670ustar00rootroot00000000000000CREATE TABLE t1(id int); -- end -- CREATE TABLE t2(id int); -- end -- CREATE TABLE t3(id int); -- end -- CREATE TABLE t4( id int /* comment */ -- comment ) ENGINE=InnoDB; -- end -- CREATE TABLE t5( id int /* comment */ -- comment ) ENGINE=InnoDB;atlas-0.7.2/sql/migrate/testdata/lex/7_delimiter_2n.sql000066400000000000000000000004431431455511600230060ustar00rootroot00000000000000-- atlas:delimiter \n\n CREATE EXTENSION IF NOT EXISTS unaccent; CREATE OR REPLACE FUNCTION public.slugify( v TEXT ) RETURNS TEXT STRICT IMMUTABLE AS $$ BEGIN RETURN trim(BOTH '-' FROM regexp_replace(lower(unaccent(trim(v))), '[^a-z0-9\\-_]+', '-', 'gi')); END; LANGUAGE plpgsql; atlas-0.7.2/sql/migrate/testdata/lex/7_delimiter_2n.sql.golden000066400000000000000000000004211431455511600242510ustar00rootroot00000000000000CREATE EXTENSION IF NOT EXISTS unaccent; -- end -- CREATE OR REPLACE FUNCTION public.slugify( v TEXT ) RETURNS TEXT STRICT IMMUTABLE AS $$ BEGIN RETURN trim(BOTH '-' FROM regexp_replace(lower(unaccent(trim(v))), '[^a-z0-9\\-_]+', '-', 'gi')); END; LANGUAGE plpgsql;atlas-0.7.2/sql/migrate/testdata/lex/8_delimiter_3n.sql000066400000000000000000000004451431455511600230120ustar00rootroot00000000000000-- atlas:delimiter \n\n\n CREATE EXTENSION IF NOT EXISTS unaccent; CREATE OR REPLACE FUNCTION public.slugify( v TEXT ) RETURNS TEXT STRICT IMMUTABLE AS $$ BEGIN RETURN trim(BOTH '-' FROM regexp_replace(lower(unaccent(trim(v))), '[^a-z0-9\\-_]+', '-', 'gi')); END; LANGUAGE plpgsql; atlas-0.7.2/sql/migrate/testdata/lex/8_delimiter_3n.sql.golden000066400000000000000000000004211431455511600242530ustar00rootroot00000000000000CREATE EXTENSION IF NOT EXISTS unaccent; -- end -- CREATE OR REPLACE FUNCTION public.slugify( v TEXT ) RETURNS TEXT STRICT IMMUTABLE AS $$ BEGIN RETURN trim(BOTH '-' FROM regexp_replace(lower(unaccent(trim(v))), '[^a-z0-9\\-_]+', '-', 'gi')); END; LANGUAGE plpgsql;atlas-0.7.2/sql/migrate/testdata/lex/9_delimiter_3n.sql000066400000000000000000000002451431455511600230110ustar00rootroot00000000000000-- atlas:delimiter \n\n\n CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; CALL dorepeat(1000); atlas-0.7.2/sql/migrate/testdata/lex/9_delimiter_3n.sql.golden000066400000000000000000000002211431455511600242520ustar00rootroot00000000000000CREATE PROCEDURE dorepeat(p1 INT) BEGIN SET @x = 0; REPEAT SET @x = @x + 1; UNTIL @x > p1 END REPEAT; END; -- end -- CALL dorepeat(1000);atlas-0.7.2/sql/migrate/testdata/migrate/000077500000000000000000000000001431455511600203215ustar00rootroot00000000000000atlas-0.7.2/sql/migrate/testdata/migrate/1_initial.down.sql000066400000000000000000000000271431455511600236600ustar00rootroot00000000000000DROP TABLE IF EXISTS t;atlas-0.7.2/sql/migrate/testdata/migrate/1_initial.up.sql000066400000000000000000000000261431455511600233340ustar00rootroot00000000000000CREATE TABLE t(c int);atlas-0.7.2/sql/migrate/testdata/migrate/atlas.sum000066400000000000000000000002641431455511600221550ustar00rootroot00000000000000h1:M74RrNK69S2pj6C541LR1ew5O32/i0WoyNgsJmyuiUk= 1_initial.down.sql h1:0zypK43rgPbgvVUgVJABGN25VgM1QSeU+LJDBb8cEQI= 1_initial.up.sql h1:hFhs5XhRml4KTWGF5td6h1s7xNqAFnaEBbC5Y/NF7i4= atlas-0.7.2/sql/migrate/testdata/migrate/sub/000077500000000000000000000000001431455511600211125ustar00rootroot00000000000000atlas-0.7.2/sql/migrate/testdata/migrate/sub/1.a_sub.up.sql000066400000000000000000000001411431455511600235020ustar00rootroot00000000000000-- create table "t_sub" CREATE TABLE t_sub(c int); -- add c1 column ALTER TABLE t_sub ADD c1 int;atlas-0.7.2/sql/migrate/testdata/migrate/sub/2.10.x-20_description.sql000066400000000000000000000000561431455511600253040ustar00rootroot00000000000000-- add c2 column ALTER TABLE t_sub ADD c2 int;atlas-0.7.2/sql/migrate/testdata/migrate/sub/3_partly.sql000066400000000000000000000001351431455511600233670ustar00rootroot00000000000000-- add c3 column ALTER TABLE t_sub ADD c3 int; -- add c4 column ALTER TABLE t_sub ADD c4 int;atlas-0.7.2/sql/migrate/testdata/migrate/sub/atlas.sum000066400000000000000000000003661431455511600227510ustar00rootroot00000000000000h1:VpH77zWOBMwX5QhvnQo0XQvXCrOYZg4h1o0XlJTQnl0= 1.a_sub.up.sql h1:nXyZR020M/mH7LxkoTkJr7BcQkipVg90imQ9I4595dw= 2.10.x-20_description.sql h1:wQB3Vh3PHVXQg9OD3Gn7TBxbZN3r1Qb7TtAE1g3q9mQ= 3_partly.sql h1:lHlMz6mEvBfvjry5lFXjs2vi6Et9xb9CWicaOXD42Qc= atlas-0.7.2/sql/mysql/000077500000000000000000000000001431455511600145755ustar00rootroot00000000000000atlas-0.7.2/sql/mysql/convert.go000066400000000000000000000157731431455511600166210ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "fmt" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // FormatType converts schema type to its column form in the database. // An error is returned if the type cannot be recognized. func FormatType(t schema.Type) (string, error) { var f string switch t := t.(type) { case *BitType: f = strings.ToLower(t.T) if t.Size > 1 { // The default size is 1. Thus, both // BIT and BIT(1) are formatted as bit. f += fmt.Sprintf("(%d)", t.Size) } case *schema.BoolType: // Map all flavors to a single form. switch f = strings.ToLower(t.T); f { case TypeBool, TypeBoolean, TypeTinyInt, "tinyint(1)": f = TypeBool } case *schema.BinaryType: f = strings.ToLower(t.T) // Accept 0 as a valid size, and avoid appending the default size of type BINARY. if f == TypeVarBinary && t.Size != nil || f == TypeBinary && t.Size != nil && *t.Size != 1 { f = fmt.Sprintf("%s(%d)", f, *t.Size) } case *schema.DecimalType: if f = strings.ToLower(t.T); f != TypeDecimal && f != TypeNumeric { return "", fmt.Errorf("mysql: unexpected decimal type: %q", t.T) } switch p, s := t.Precision, t.Scale; { case p < 0 || s < 0: return "", fmt.Errorf("mysql: decimal type must have precision > 0 and scale >= 0: %d, %d", p, s) case p < s: return "", fmt.Errorf("mysql: decimal type must have precision >= scale: %d < %d", p, s) case p == 0 && s == 0: // The default value for precision is 10 (i.e. decimal(0,0) = decimal(10)). p = 10 fallthrough case s == 0: // In standard SQL, the syntax DECIMAL(M) is equivalent to DECIMAL(M,0), f = fmt.Sprintf("decimal(%d)", p) default: f = fmt.Sprintf("decimal(%d,%d)", p, s) } if t.Unsigned { f += " unsigned" } case *schema.EnumType: f = fmt.Sprintf("enum(%s)", formatValues(t.Values)) case *schema.FloatType: f = strings.ToLower(t.T) // FLOAT with precision > 24, become DOUBLE. // Also, REAL is a synonym for DOUBLE (if REAL_AS_FLOAT was not set). if f == TypeFloat && t.Precision > 24 || f == TypeReal { f = TypeDouble } if t.Unsigned { f += " unsigned" } case *schema.IntegerType: f = strings.ToLower(t.T) if t.Unsigned { f += " unsigned" } case *schema.JSONType: f = strings.ToLower(t.T) case *SetType: f = fmt.Sprintf("set(%s)", formatValues(t.Values)) case *schema.StringType: f = strings.ToLower(t.T) switch f { case TypeChar: // Not a single char. if t.Size > 0 { f += fmt.Sprintf("(%d)", t.Size) } case TypeVarchar: // Zero is also a valid length. f = fmt.Sprintf("varchar(%d)", t.Size) } case *schema.SpatialType: f = strings.ToLower(t.T) case *schema.TimeType: f = strings.ToLower(t.T) if p := t.Precision; p != nil && *p > 0 { f = fmt.Sprintf("%s(%d)", f, *p) } case *schema.UnsupportedType: // Do not accept unsupported types as we should cover all cases. return "", fmt.Errorf("unsupported type %q", t.T) default: return "", fmt.Errorf("invalid schema type %T", t) } return f, nil } // ParseType returns the schema.Type value represented by the given raw type. // The raw value is expected to follow the format in MySQL information schema. func ParseType(raw string) (schema.Type, error) { parts, size, unsigned, err := parseColumn(raw) if err != nil { return nil, err } switch t := parts[0]; t { case TypeBit: return &BitType{ T: t, Size: int(size), }, nil // bool and booleans are synonyms for // tinyint with display-width set to 1. case TypeBool, TypeBoolean: return &schema.BoolType{ T: TypeBool, }, nil case TypeTinyInt, TypeSmallInt, TypeMediumInt, TypeInt, TypeBigInt: if size == 1 { return &schema.BoolType{ T: TypeBool, }, nil } // For integer types, the size represents the display width and does not // constrain the range of values that can be stored in the column. // The storage byte-size is inferred from the type name (i.e TINYINT takes // a single byte). ft := &schema.IntegerType{ T: t, Unsigned: unsigned, } if attr := parts[len(parts)-1]; attr == "zerofill" && size != 0 { ft.Attrs = []schema.Attr{ &DisplayWidth{ N: size, }, &ZeroFill{ A: attr, }, } } return ft, nil case TypeNumeric, TypeDecimal: dt := &schema.DecimalType{ T: t, Unsigned: unsigned, } if len(parts) > 1 && parts[1] != "unsigned" { if dt.Precision, err = strconv.Atoi(parts[1]); err != nil { return nil, fmt.Errorf("parse precision %q", parts[1]) } } if len(parts) > 2 && parts[2] != "unsigned" { if dt.Scale, err = strconv.Atoi(parts[2]); err != nil { return nil, fmt.Errorf("parse scale %q", parts[1]) } } return dt, nil case TypeFloat, TypeDouble, TypeReal: ft := &schema.FloatType{ T: t, Unsigned: unsigned, } if len(parts) > 1 && parts[1] != "unsigned" { if ft.Precision, err = strconv.Atoi(parts[1]); err != nil { return nil, fmt.Errorf("parse precision %q", parts[1]) } } return ft, nil case TypeBinary, TypeVarBinary: bt := &schema.BinaryType{T: t} if len(parts) > 1 { bt.Size = &size } return bt, nil case TypeTinyBlob, TypeMediumBlob, TypeBlob, TypeLongBlob: return &schema.BinaryType{ T: t, }, nil case TypeChar, TypeVarchar: return &schema.StringType{ T: t, Size: size, }, nil case TypeTinyText, TypeMediumText, TypeText, TypeLongText: return &schema.StringType{ T: t, }, nil case TypeEnum, TypeSet: // Parse the enum values according to the MySQL format. // github.com/mysql/mysql-server/blob/8.0/sql/field.cc#Field_enum::sql_type rv := strings.TrimSuffix(strings.TrimPrefix(raw, t+"("), ")") if rv == "" { return nil, fmt.Errorf("mysql: unexpected enum type: %q", raw) } values := strings.Split(rv, "','") for i := range values { values[i] = strings.Trim(values[i], "'") } if t == TypeEnum { return &schema.EnumType{ T: TypeEnum, Values: values, }, nil } return &SetType{ Values: values, }, nil case TypeDate, TypeDateTime, TypeTime, TypeTimestamp, TypeYear: tt := &schema.TimeType{ T: t, } if len(parts) > 1 { p, err := strconv.Atoi(parts[1]) if err != nil { return nil, fmt.Errorf("parse precision %q", parts[1]) } tt.Precision = &p } return tt, nil case TypeJSON: return &schema.JSONType{ T: t, }, nil case TypePoint, TypeMultiPoint, TypeLineString, TypeMultiLineString, TypePolygon, TypeMultiPolygon, TypeGeometry, TypeGeoCollection, TypeGeometryCollection: return &schema.SpatialType{ T: t, }, nil default: return &schema.UnsupportedType{ T: t, }, nil } } // formatValues formats ENUM and SET values. func formatValues(vs []string) string { values := make([]string, len(vs)) for i := range vs { values[i] = vs[i] if !sqlx.IsQuoted(values[i], '"', '\'') { values[i] = "'" + values[i] + "'" } } return strings.Join(values, ",") } atlas-0.7.2/sql/mysql/diff.go000066400000000000000000000446511431455511600160460ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "encoding/hex" "fmt" "reflect" "strconv" "strings" "sync" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // A diff provides a MySQL implementation for sqlx.DiffDriver. type diff struct { conn // charset to collation mapping. // See, internal directory. ch2co, co2ch struct { sync.Once v map[string]string err error } } // SchemaAttrDiff returns a changeset for migrating schema attributes from one state to the other. func (d *diff) SchemaAttrDiff(from, to *schema.Schema) []schema.Change { var ( topAttr []schema.Attr changes []schema.Change ) if from.Realm != nil { topAttr = from.Realm.Attrs } // Charset change. if change := d.charsetChange(from.Attrs, topAttr, to.Attrs); change != noChange { changes = append(changes, change) } // Collation change. if change := d.collationChange(from.Attrs, topAttr, to.Attrs); change != noChange { changes = append(changes, change) } return changes } // TableAttrDiff returns a changeset for migrating table attributes from one state to the other. func (d *diff) TableAttrDiff(from, to *schema.Table) ([]schema.Change, error) { var changes []schema.Change if change := d.autoIncChange(from.Attrs, to.Attrs); change != noChange { changes = append(changes, change) } if change := sqlx.CommentDiff(from.Attrs, to.Attrs); change != nil { changes = append(changes, change) } if change := d.charsetChange(from.Attrs, from.Schema.Attrs, to.Attrs); change != noChange { changes = append(changes, change) } if change := d.collationChange(from.Attrs, from.Schema.Attrs, to.Attrs); change != noChange { changes = append(changes, change) } if !d.SupportsCheck() && sqlx.Has(to.Attrs, &schema.Check{}) { return nil, fmt.Errorf("version %q does not support CHECK constraints", d.V) } // For MariaDB, we skip JSON CHECK constraints that were created by the databases, // or by Atlas for older versions. These CHECK constraints (inlined on the columns) // also cannot be dropped using "DROP CONSTRAINTS", but can be modified and dropped // using "MODIFY COLUMN". var checks []schema.Change for _, c := range sqlx.CheckDiff(from, to, func(c1, c2 *schema.Check) bool { return enforced(c1.Attrs) == enforced(c2.Attrs) }) { drop, ok := c.(*schema.DropCheck) if !ok || !strings.HasPrefix(drop.C.Expr, "json_valid") { checks = append(checks, c) continue } // Generated CHECK have the form of "json_valid(``)" // and named as the column. if _, ok := to.Column(drop.C.Name); !ok { checks = append(checks, c) } } return append(changes, checks...), nil } // ColumnChange returns the schema changes (if any) for migrating one column to the other. func (d *diff) ColumnChange(fromT *schema.Table, from, to *schema.Column) (schema.ChangeKind, error) { change := sqlx.CommentChange(from.Attrs, to.Attrs) if from.Type.Null != to.Type.Null { change |= schema.ChangeNull } changed, err := d.typeChanged(from, to) if err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeType } if changed, err = d.defaultChanged(from, to); err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeDefault } if changed, err = d.generatedChanged(from, to); err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeGenerated } if changed, err = d.columnCharsetChanged(fromT, from, to); err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeCharset } if changed, err = d.columnCollateChanged(fromT, from, to); err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeCollate } return change, nil } // IsGeneratedIndexName reports if the index name was generated by the database. func (d *diff) IsGeneratedIndexName(_ *schema.Table, idx *schema.Index) bool { // Auto-generated index names for functional/expression indexes. See. // mysql-server/sql/sql_table.cc#add_functional_index_to_create_list const f = "functional_index" switch { case d.SupportsIndexExpr() && idx.Name == f: return true case d.SupportsIndexExpr() && strings.HasPrefix(idx.Name+"_", f): i, err := strconv.ParseInt(strings.TrimLeft(idx.Name, idx.Name+"_"), 10, 64) return err == nil && i > 1 case len(idx.Parts) == 0 || idx.Parts[0].C == nil: return false } // Unnamed INDEX or UNIQUE constraints are named by // the first index-part (as column or part of it). // For example, "c", "c_2", "c_3", etc. switch name := idx.Parts[0].C.Name; { case idx.Name == name: return true case strings.HasPrefix(idx.Name, name+"_"): i, err := strconv.ParseInt(strings.TrimPrefix(idx.Name, name+"_"), 10, 64) return err == nil && i > 1 default: return false } } // IndexAttrChanged reports if the index attributes were changed. func (*diff) IndexAttrChanged(from, to []schema.Attr) bool { return indexType(from).T != indexType(to).T } // IndexPartAttrChanged reports if the index-part attributes (collation or prefix) were changed. func (*diff) IndexPartAttrChanged(from, to *schema.IndexPart) bool { var s1, s2 SubPart return sqlx.Has(from.Attrs, &s1) != sqlx.Has(to.Attrs, &s2) || s1.Len != s2.Len } // ReferenceChanged reports if the foreign key referential action was changed. func (*diff) ReferenceChanged(from, to schema.ReferenceOption) bool { // According to MySQL docs, foreign key constraints are checked // immediately, so NO ACTION is the same as RESTRICT. Specifying // RESTRICT (or NO ACTION) is the same as omitting the ON DELETE // or ON UPDATE clause. if from == "" || from == schema.Restrict { from = schema.NoAction } if to == "" || to == schema.Restrict { to = schema.NoAction } return from != to } // Normalize implements the sqlx.Normalizer interface. func (d *diff) Normalize(from, to *schema.Table) error { indexes := make([]*schema.Index, 0, len(from.Indexes)) for _, idx := range from.Indexes { // MySQL requires that foreign key columns be indexed; Therefore, if the child // table is defined on non-indexed columns, an index is automatically created // to satisfy the constraint. // Therefore, if no such key was defined on the desired state, the diff will // recommend dropping it on migration. Therefore, we fix it by dropping it from // the current state manually. if _, ok := to.Index(idx.Name); ok || !keySupportsFK(from, idx) { indexes = append(indexes, idx) } } from.Indexes = indexes // Avoid proposing changes to the table COLLATE or CHARSET // in case only one of these properties is defined. if err := d.defaultCollate(&to.Attrs); err != nil { return err } return d.defaultCharset(&to.Attrs) } // collationChange returns the schema change for migrating the collation if // it was changed, and it is not the default attribute inherited from its parent. func (*diff) collationChange(from, top, to []schema.Attr) schema.Change { var fromC, topC, toC schema.Collation switch fromHas, topHas, toHas := sqlx.Has(from, &fromC), sqlx.Has(top, &topC), sqlx.Has(to, &toC); { case !fromHas && !toHas: case !fromHas: return &schema.AddAttr{ A: &toC, } case !toHas: // There is no way to DROP a COLLATE that was configured on the table, // and it is not the default. Therefore, we use ModifyAttr and give it // the inherited (and default) collation from schema or server. if topHas && fromC.V != topC.V { return &schema.ModifyAttr{ From: &fromC, To: &topC, } } case fromC.V != toC.V: return &schema.ModifyAttr{ From: &fromC, To: &toC, } } return noChange } // charsetChange returns the schema change for migrating the collation if // it was changed, and it is not the default attribute inherited from its parent. func (*diff) charsetChange(from, top, to []schema.Attr) schema.Change { var fromC, topC, toC schema.Charset switch fromHas, topHas, toHas := sqlx.Has(from, &fromC), sqlx.Has(top, &topC), sqlx.Has(to, &toC); { case !fromHas && !toHas: case !fromHas: return &schema.AddAttr{ A: &toC, } case !toHas: // There is no way to DROP a CHARSET that was configured on the table, // and it is not the default. Therefore, we use ModifyAttr and give it // the inherited (and default) collation from schema or server. if topHas && fromC.V != topC.V { return &schema.ModifyAttr{ From: &fromC, To: &topC, } } case fromC.V != toC.V: return &schema.ModifyAttr{ From: &fromC, To: &toC, } } return noChange } // columnCharsetChange indicates if there is a change to the column charset. func (d *diff) columnCharsetChanged(fromT *schema.Table, from, to *schema.Column) (bool, error) { if err := d.defaultCharset(&to.Attrs); err != nil { return false, err } var ( fromC, topC, toC schema.Charset fromHas, topHas, toHas = sqlx.Has(from.Attrs, &fromC), sqlx.Has(fromT.Attrs, &topC), sqlx.Has(to.Attrs, &toC) ) // Column was updated with custom CHARSET that was dropped. // Hence, we should revert to the one defined on the table. return fromHas && !toHas && topHas && fromC.V != topC.V || // Custom CHARSET was added to the column. Hence, // Does not match the one defined in the table. !fromHas && toHas && topHas && toC.V != topC.V || // CHARSET was explicitly changed. fromHas && toHas && fromC.V != toC.V, nil } // columnCollateChanged indicates if there is a change to the column charset. func (d *diff) columnCollateChanged(fromT *schema.Table, from, to *schema.Column) (bool, error) { if err := d.defaultCollate(&to.Attrs); err != nil { return false, err } var ( fromC, topC, toC schema.Collation fromHas, topHas, toHas = sqlx.Has(from.Attrs, &fromC), sqlx.Has(fromT.Attrs, &topC), sqlx.Has(to.Attrs, &toC) ) // Column was updated with custom COLLATE that was dropped. // Hence, we should revert to the one defined on the table. return fromHas && !toHas && topHas && fromC.V != topC.V || // Custom COLLATE was added to the column. Hence, // Does not match the one defined in the table. !fromHas && toHas && topHas && toC.V != topC.V || // COLLATE was explicitly changed. fromHas && toHas && fromC.V != toC.V, nil } // autoIncChange returns the schema change for changing the AUTO_INCREMENT // attribute in case it is not the default. func (*diff) autoIncChange(from, to []schema.Attr) schema.Change { var fromA, toA AutoIncrement switch fromHas, toHas := sqlx.Has(from, &fromA), sqlx.Has(to, &toA); { // Ignore if the AUTO_INCREMENT attribute was dropped from the desired schema. case fromHas && !toHas: // The AUTO_INCREMENT exists in the desired schema, and may not exist in the inspected one. // This can happen because older versions of MySQL (< 8.0) stored the AUTO_INCREMENT counter // in main memory (not persistent), and the value is reset on process restart for empty tables. case toA.V > 1 && toA.V > fromA.V: // Suggest a diff only if the desired value is greater than the inspected one, // because this attribute cannot be maintained in users schema and used to set // up only the initial value. return &schema.ModifyAttr{ From: &fromA, To: &toA, } } return noChange } // indexType returns the index type from its attribute. // The default type is BTREE if no type was specified. func indexType(attr []schema.Attr) *IndexType { t := &IndexType{T: IndexTypeBTree} if sqlx.Has(attr, t) { t.T = strings.ToUpper(t.T) } return t } // enforced returns the ENFORCED attribute for the CHECK // constraint. A CHECK is ENFORCED if not state otherwise. func enforced(attr []schema.Attr) bool { if e := (Enforced{}); sqlx.Has(attr, &e) { return e.V } return true } // noChange describes a zero change. var noChange struct{ schema.Change } func (d *diff) typeChanged(from, to *schema.Column) (bool, error) { fromT, toT := from.Type.Type, to.Type.Type if fromT == nil || toT == nil { return false, fmt.Errorf("mysql: missing type information for column %q", from.Name) } if reflect.TypeOf(fromT) != reflect.TypeOf(toT) { return true, nil } var changed bool switch fromT := fromT.(type) { case *BitType, *schema.BinaryType, *schema.BoolType, *schema.DecimalType, *schema.FloatType, *schema.JSONType, *schema.StringType, *schema.SpatialType, *schema.TimeType: ft, err := FormatType(fromT) if err != nil { return false, err } tt, err := FormatType(toT) if err != nil { return false, err } changed = ft != tt case *schema.EnumType: toT := toT.(*schema.EnumType) changed = !sqlx.ValuesEqual(fromT.Values, toT.Values) case *schema.IntegerType: toT := toT.(*schema.IntegerType) // MySQL v8.0.19 dropped both display-width // and zerofill from the information schema. if d.SupportsDisplayWidth() { ft, _, _, err := parseColumn(fromT.T) if err != nil { return false, err } tt, _, _, err := parseColumn(toT.T) if err != nil { return false, err } fromT.T, toT.T = ft[0], tt[0] } changed = fromT.T != toT.T || fromT.Unsigned != toT.Unsigned case *SetType: toT := toT.(*SetType) changed = !sqlx.ValuesEqual(fromT.Values, toT.Values) default: return false, &sqlx.UnsupportedTypeError{Type: fromT} } return changed, nil } // defaultChanged reports if the default value of a column was changed. func (d *diff) defaultChanged(from, to *schema.Column) (bool, error) { d1, ok1 := sqlx.DefaultValue(from) d2, ok2 := sqlx.DefaultValue(to) if ok1 != ok2 { return true, nil } if d1 == d2 { return false, nil } switch from.Type.Type.(type) { case *schema.BinaryType: a, err1 := binValue(d1) b, err2 := binValue(d2) if err1 != nil || err2 != nil { return true, nil } return !equalsStringValues(a, b), nil case *schema.BoolType: a, err1 := boolValue(d1) b, err2 := boolValue(d2) if err1 == nil && err2 == nil { return a != b, nil } return false, nil case *schema.IntegerType: return !d.equalIntValues(d1, d2), nil case *schema.FloatType, *schema.DecimalType: return !d.equalFloatValues(d1, d2), nil case *schema.EnumType, *SetType, *schema.StringType: return !equalsStringValues(d1, d2), nil case *schema.TimeType: x1 := strings.ToLower(strings.Trim(d1, "' ()")) x2 := strings.ToLower(strings.Trim(d2, "' ()")) return x1 != x2, nil default: x1 := strings.Trim(d1, "'") x2 := strings.Trim(d2, "'") return x1 != x2, nil } } // generatedChanged reports if the generated expression of a column was changed. func (*diff) generatedChanged(from, to *schema.Column) (bool, error) { var ( fromX, toX schema.GeneratedExpr fromHas, toHas = sqlx.Has(from.Attrs, &fromX), sqlx.Has(to.Attrs, &toX) ) if !fromHas && !toHas || fromHas && toHas && sqlx.MayWrap(fromX.Expr) == sqlx.MayWrap(toX.Expr) && storedOrVirtual(fromX.Type) == storedOrVirtual(toX.Type) { return false, nil } return true, checkChangeGenerated(from, to) } // equalIntValues report if the 2 int default values are ~equal. // Note that default expression are not supported atm. func (d *diff) equalIntValues(x1, x2 string) bool { x1 = strings.ToLower(strings.Trim(x1, "' ")) x2 = strings.ToLower(strings.Trim(x2, "' ")) if x1 == x2 { return true } d1, err := strconv.ParseInt(x1, 10, 64) if err != nil { // Numbers are rounded down to their nearest integer. f, err := strconv.ParseFloat(x1, 64) if err != nil { return false } d1 = int64(f) } d2, err := strconv.ParseInt(x2, 10, 64) if err != nil { // Numbers are rounded down to their nearest integer. f, err := strconv.ParseFloat(x2, 64) if err != nil { return false } d2 = int64(f) } return d1 == d2 } // equalFloatValues report if the 2 float default values are ~equal. // Note that default expression are not supported atm. func (d *diff) equalFloatValues(x1, x2 string) bool { x1 = strings.ToLower(strings.Trim(x1, "' ")) x2 = strings.ToLower(strings.Trim(x2, "' ")) if x1 == x2 { return true } d1, err := strconv.ParseFloat(x1, 64) if err != nil { return false } d2, err := strconv.ParseFloat(x2, 64) if err != nil { return false } return d1 == d2 } // equalsStringValues report if the 2 string default values are // equal after dropping their quotes. func equalsStringValues(x1, x2 string) bool { a, err1 := sqlx.Unquote(x1) b, err2 := sqlx.Unquote(x2) return a == b && err1 == nil && err2 == nil } // boolValue returns the MySQL boolean value from the given string (if it is known). func boolValue(x string) (bool, error) { switch x { case "1", "'1'", "TRUE", "true": return true, nil case "0", "'0'", "FALSE", "false": return false, nil default: return false, fmt.Errorf("mysql: unknown value: %q", x) } } // binValue returns the MySQL binary value from the given string (if it is known). func binValue(x string) (string, error) { if !isHex(x) { return x, nil } d, err := hex.DecodeString(x[2:]) if err != nil { return x, err } return string(d), nil } // keySupportsFK reports if the index key was created automatically by MySQL // to support the constraint. See sql/sql_table.cc#find_fk_supporting_key. func keySupportsFK(t *schema.Table, idx *schema.Index) bool { if _, ok := t.ForeignKey(idx.Name); ok { return true } search: for _, fk := range t.ForeignKeys { if len(fk.Columns) != len(idx.Parts) { continue } for i, c := range fk.Columns { if idx.Parts[i].C == nil || idx.Parts[i].C.Name != c.Name { continue search } } return true } return false } // defaultCollate appends the default COLLATE to the attributes in case a // custom character-set was defined for the element and the COLLATE was not. func (d *diff) defaultCollate(attrs *[]schema.Attr) error { var charset schema.Charset if !sqlx.Has(*attrs, &charset) || sqlx.Has(*attrs, &schema.Collation{}) { return nil } d.ch2co.Do(func() { d.ch2co.v, d.ch2co.err = d.CharsetToCollate() }) if d.ch2co.err != nil { return d.ch2co.err } v, ok := d.ch2co.v[charset.V] if !ok { return fmt.Errorf("mysql: unknown character set: %q", charset.V) } schema.ReplaceOrAppend(attrs, &schema.Collation{V: v}) return nil } // defaultCharset appends the default CHARSET to the attributes in case a // custom collation was defined for the element and the CHARSET was not. func (d *diff) defaultCharset(attrs *[]schema.Attr) error { var collate schema.Collation if !sqlx.Has(*attrs, &collate) || sqlx.Has(*attrs, &schema.Charset{}) { return nil } d.co2ch.Do(func() { d.co2ch.v, d.co2ch.err = d.CollateToCharset() }) if d.co2ch.err != nil { return d.co2ch.err } v, ok := d.co2ch.v[collate.V] if !ok { return fmt.Errorf("mysql: unknown collation: %q", collate.V) } schema.ReplaceOrAppend(attrs, &schema.Charset{V: v}) return nil } atlas-0.7.2/sql/mysql/diff_test.go000066400000000000000000000507011431455511600170760ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "testing" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestDiff_TableDiff(t *testing.T) { type testcase struct { name string from, to *schema.Table wantChanges []schema.Change wantErr bool } tests := []testcase{ { name: "mismatched names", from: &schema.Table{Name: "users"}, to: &schema.Table{Name: "pets"}, wantErr: true, }, { name: "no changes", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "users"}, }, { name: "no changes", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Columns: []*schema.Column{{Name: "enum", Default: &schema.RawExpr{X: "'A'"}, Type: &schema.ColumnType{Type: &schema.EnumType{Values: []string{"A"}}}}}}, to: &schema.Table{Name: "users", Columns: []*schema.Column{{Name: "enum", Default: &schema.RawExpr{X: `"A"`}, Type: &schema.ColumnType{Type: &schema.EnumType{Values: []string{"A"}}}}}}, }, { name: "change primary key", from: func() *schema.Table { t := &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}}} t.PrimaryKey = &schema.Index{ Parts: []*schema.IndexPart{{C: t.Columns[0]}}, } return t }(), to: &schema.Table{Name: "users"}, wantErr: true, }, { name: "modify counter", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&AutoIncrement{V: 1}}}, to: &schema.Table{Name: "users", Attrs: []schema.Attr{&AutoIncrement{V: 100}}}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &AutoIncrement{V: 1}, To: &AutoIncrement{V: 100}, }, }, }, { name: "add collation", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Charset{V: "latin1"}}}, to: &schema.Table{Name: "users", Attrs: []schema.Attr{&schema.Collation{V: "latin1_bin"}}}, wantChanges: []schema.Change{ &schema.AddAttr{ A: &schema.Collation{V: "latin1_bin"}, }, }, }, { name: "drop collation means modify", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public", Attrs: []schema.Attr{&schema.Collation{V: "utf8mb4_0900_ai_ci"}}}, Attrs: []schema.Attr{&schema.Collation{V: "utf8mb4_bin"}}}, to: &schema.Table{Name: "users"}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Collation{V: "utf8mb4_bin"}, To: &schema.Collation{V: "utf8mb4_0900_ai_ci"}, }, }, }, { name: "modify collation", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}}}, to: &schema.Table{Name: "users", Attrs: []schema.Attr{&schema.Collation{V: "latin1_bin"}}}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Collation{V: "latin1_swedish_ci"}, To: &schema.Collation{V: "latin1_bin"}, }, }, }, { name: "drop charset means modify", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public", Attrs: []schema.Attr{&schema.Charset{V: "hebrew"}}}, Attrs: []schema.Attr{&schema.Charset{V: "hebrew_bin"}}}, to: &schema.Table{Name: "users"}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Charset{V: "hebrew_bin"}, To: &schema.Charset{V: "hebrew"}, }, }, }, { name: "modify charset", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Charset{V: "utf8"}, &schema.Collation{V: "utf8_general_ci"}}}, to: &schema.Table{Name: "users", Attrs: []schema.Attr{&schema.Charset{V: "utf8mb4"}}}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Charset{V: "utf8"}, To: &schema.Charset{V: "utf8mb4"}, }, &schema.ModifyAttr{ From: &schema.Collation{V: "utf8_general_ci"}, To: &schema.Collation{V: "utf8mb4_0900_ai_ci"}, }, }, }, { name: "add check", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')"}}}, wantChanges: []schema.Change{ &schema.AddCheck{ C: &schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')"}, }, }, }, { name: "drop check", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')"}}}, to: &schema.Table{Name: "t1"}, wantChanges: []schema.Change{ &schema.DropCheck{ C: &schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')"}, }, }, }, { name: "modify check", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')", Attrs: []schema.Attr{&Enforced{V: false}}}}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')", Attrs: []schema.Attr{&Enforced{V: true}}}}}, wantChanges: []schema.Change{ &schema.ModifyCheck{ From: &schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')", Attrs: []schema.Attr{&Enforced{V: false}}}, To: &schema.Check{Name: "users_chk1_c1", Expr: "(`c1` <>_latin1\\'foo\\')", Attrs: []schema.Attr{&Enforced{V: true}}}, }, }, }, { name: "add comment", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Comment{Text: "t1"}}}, wantChanges: []schema.Change{ &schema.AddAttr{ A: &schema.Comment{Text: "t1"}, }, }, }, { name: "drop comment", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Comment{Text: "t1"}}}, to: &schema.Table{Name: "t1"}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Comment{Text: "t1"}, To: &schema.Comment{Text: ""}, }, }, }, { name: "modify comment", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Comment{Text: "t1"}}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Comment{Text: "t1!"}}}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Comment{Text: "t1"}, To: &schema.Comment{Text: "t1!"}, }, }, }, func() testcase { var ( from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "tinyint", Type: &schema.IntegerType{T: "tinyint"}}}, {Name: "c4", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float"}}, Default: &schema.Literal{V: "0.00"}}, }, } to = &schema.Table{ Name: "t1", Columns: []*schema.Column{ { Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}, Null: true}, Default: &schema.RawExpr{X: "{}"}, Attrs: []schema.Attr{&schema.Comment{Text: "json comment"}}, }, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "c4", Type: &schema.ColumnType{Type: &schema.FloatType{T: "float"}}, Default: &schema.Literal{V: "0.00"}}, }, } ) return testcase{ name: "columns", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{ From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeNull | schema.ChangeComment | schema.ChangeDefault, }, &schema.DropColumn{C: from.Columns[1]}, &schema.AddColumn{C: to.Columns[1]}, }, } }(), // Custom CHARSET was dropped. func() testcase { var ( from = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). SetCollation("utf8_general_ci"). AddColumns(schema.NewStringColumn("c1", "text").SetCharset("latin1")) to = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). AddColumns(schema.NewStringColumn("c1", "text")) ) return testcase{ name: "columns", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{ From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeCharset, }, }, } }(), // Custom CHARSET was added. func() testcase { var ( from = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). SetCollation("utf8_general_ci"). AddColumns(schema.NewStringColumn("c1", "text")) to = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). AddColumns(schema.NewStringColumn("c1", "text").SetCharset("latin1")) ) return testcase{ name: "columns", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{ From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeCharset | schema.ChangeCollate, }, }, } }(), // Custom CHARSET was changed. func() testcase { var ( from = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). SetCollation("utf8_general_ci"). AddColumns(schema.NewStringColumn("c1", "text").SetCharset("hebrew")) to = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). SetCollation("utf8_general_ci"). AddColumns(schema.NewStringColumn("c1", "text").SetCharset("latin1")) ) return testcase{ name: "columns", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{ From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeCharset | schema.ChangeCollate, }, }, } }(), // Nop CHARSET change. func() testcase { var ( from = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCharset("utf8"). SetCollation("utf8_general_ci"). AddColumns( schema.NewStringColumn("c1", "text").SetCharset("utf8"), schema.NewStringColumn("c2", "text"), ) to = schema.NewTable("t1"). SetSchema(schema.New("public")). SetCollation("utf8_general_ci"). AddColumns( schema.NewStringColumn("c1", "text"), schema.NewStringColumn("c2", "text").SetCharset("utf8"), ) ) return testcase{ name: "columns", from: from, to: to, } }(), func() testcase { var ( s = schema.New("public") from = schema.NewTable("t1"). SetSchema(s). AddColumns( schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), schema.NewIntColumn("c3", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"}), schema.NewIntColumn("c4", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "VIRTUAL"}), schema.NewIntColumn("c5", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "PERSISTENT"}), schema.NewIntColumn("c6", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "PERSISTENT"}), ) to = schema.NewTable("t1"). SetSchema(s). AddColumns( // Add generated expression. schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), // Drop generated expression. schema.NewIntColumn("c2", "int"), // Modify generated expression. schema.NewIntColumn("c3", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "2"}), // No change. schema.NewIntColumn("c4", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"}), schema.NewIntColumn("c5", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), schema.NewIntColumn("c6", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "(1)", Type: "PERSISTENT"}), ) ) return testcase{ name: "modify column generated", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeGenerated}, &schema.ModifyColumn{From: from.Columns[1], To: to.Columns[1], Change: schema.ChangeGenerated}, &schema.ModifyColumn{From: from.Columns[2], To: to.Columns[2], Change: schema.ChangeGenerated}, }, } }(), func() testcase { var ( from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "tinyint", Type: &schema.IntegerType{T: "tinyint"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } to = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "tinyint", Type: &schema.IntegerType{T: "tinyint"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) from.Indexes = []*schema.Index{ {Name: "c1_index", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0]}}}, {Name: "c2_unique", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, {Name: "c1_prefix", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1], Attrs: []schema.Attr{&SubPart{Len: 50}}}}}, {Name: "c1_desc", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, } to.Indexes = []*schema.Index{ {Name: "c1_index", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0]}}}, {Name: "c3_unique", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: to.Columns[1]}}}, {Name: "c1_prefix", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0], Attrs: []schema.Attr{&SubPart{Len: 100}}}}}, {Name: "c1_desc", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1], Desc: true}}}, } return testcase{ name: "indexes", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyIndex{From: from.Indexes[0], To: to.Indexes[0], Change: schema.ChangeUnique}, &schema.DropIndex{I: from.Indexes[1]}, &schema.ModifyIndex{From: from.Indexes[2], To: to.Indexes[2], Change: schema.ChangeParts}, &schema.ModifyIndex{From: from.Indexes[3], To: to.Indexes[3], Change: schema.ChangeParts}, &schema.AddIndex{I: to.Indexes[1]}, }, } }(), func() testcase { var ( ref = &schema.Table{ Name: "t2", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "ref_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } to = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) from.ForeignKeys = []*schema.ForeignKey{ {Table: from, Columns: from.Columns, RefTable: ref, RefColumns: ref.Columns[:1]}, } to.ForeignKeys = []*schema.ForeignKey{ {Table: to, Columns: to.Columns, RefTable: ref, RefColumns: ref.Columns[1:]}, } return testcase{ name: "foreign-keys", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyForeignKey{ From: from.ForeignKeys[0], To: to.ForeignKeys[0], Change: schema.ChangeRefColumn, }, }, } }(), } for _, tt := range tests { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.version("8.0.19") drv, err := Open(db) require.NoError(t, err) t.Run(tt.name, func(t *testing.T) { changes, err := drv.TableDiff(tt.from, tt.to) require.Equalf(t, tt.wantErr, err != nil, "error: %q", err) require.EqualValues(t, tt.wantChanges, changes) }) } } func TestDiff_UnsupportedChecks(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.version("5.6.35") drv, err := Open(db) require.NoError(t, err) s := schema.New("public") changes, err := drv.TableDiff( schema.NewTable("t").SetSchema(s), schema.NewTable("t").SetSchema(s).AddChecks(schema.NewCheck()), ) require.Nil(t, changes) require.EqualError(t, err, `version "5.6.35" does not support CHECK constraints`) } func TestDiff_SchemaDiff(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.version("8.0.19") drv, err := Open(db) require.NoError(t, err) changes, err := drv.SchemaDiff(&schema.Schema{Name: "public"}, &schema.Schema{Name: "test"}) require.Error(t, err) require.Nil(t, changes) from := &schema.Schema{ Realm: &schema.Realm{ Attrs: []schema.Attr{ &schema.Collation{V: "latin1"}, }, }, Tables: []*schema.Table{ {Name: "users"}, {Name: "pets"}, }, Attrs: []schema.Attr{ &schema.Collation{V: "latin1"}, }, } to := &schema.Schema{ Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, }, {Name: "groups"}, }, Attrs: []schema.Attr{ &schema.Collation{V: "utf8"}, }, } from.Tables[0].Schema = from from.Tables[1].Schema = from changes, err = drv.SchemaDiff(from, to) require.NoError(t, err) require.EqualValues(t, []schema.Change{ &schema.ModifySchema{S: to, Changes: []schema.Change{&schema.ModifyAttr{From: from.Attrs[0], To: to.Attrs[0]}}}, &schema.ModifyTable{T: to.Tables[0], Changes: []schema.Change{&schema.AddColumn{C: to.Tables[0].Columns[0]}}}, &schema.DropTable{T: from.Tables[1]}, &schema.AddTable{T: to.Tables[1]}, }, changes) } func TestDiff_RealmDiff(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.version("8.0.19") drv, err := Open(db) require.NoError(t, err) from := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "public", Tables: []*schema.Table{ {Name: "users"}, {Name: "pets"}, }, Attrs: []schema.Attr{ &schema.Collation{V: "latin1"}, }, }, { Name: "internal", Tables: []*schema.Table{ {Name: "pets"}, }, }, }, } to := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "public", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, }, {Name: "pets"}, }, Attrs: []schema.Attr{ &schema.Collation{V: "utf8"}, }, }, { Name: "test", Tables: []*schema.Table{ {Name: "pets"}, }, }, }, } from.Schemas[0].Realm = from from.Schemas[0].Tables[0].Schema = from.Schemas[0] from.Schemas[0].Tables[1].Schema = from.Schemas[0] to.Schemas[0].Realm = to to.Schemas[0].Tables[0].Schema = to.Schemas[0] changes, err := drv.RealmDiff(from, to) require.NoError(t, err) require.EqualValues(t, []schema.Change{ &schema.ModifySchema{S: to.Schemas[0], Changes: []schema.Change{&schema.ModifyAttr{From: from.Schemas[0].Attrs[0], To: to.Schemas[0].Attrs[0]}}}, &schema.ModifyTable{T: to.Schemas[0].Tables[0], Changes: []schema.Change{&schema.AddColumn{C: to.Schemas[0].Tables[0].Columns[0]}}}, &schema.DropSchema{S: from.Schemas[1]}, &schema.AddSchema{S: to.Schemas[1]}, &schema.AddTable{T: to.Schemas[1].Tables[0]}, }, changes) } atlas-0.7.2/sql/mysql/driver.go000066400000000000000000000255401431455511600164250ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "fmt" "net/url" "strings" "time" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/mysql/internal/mysqlversion" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" ) type ( // Driver represents a MySQL driver for introspecting database schemas, // generating diff between schema elements and apply migrations changes. Driver struct { conn schema.Differ schema.Inspector migrate.PlanApplier } // database connection and its information. conn struct { schema.ExecQuerier // System variables that are set on `Open`. mysqlversion.V collate string charset string } ) // DriverName holds the name used for registration. const DriverName = "mysql" func init() { sqlclient.Register( DriverName, sqlclient.DriverOpener(Open), sqlclient.RegisterCodec(MarshalHCL, EvalHCL), sqlclient.RegisterFlavours("maria", "mariadb"), sqlclient.RegisterURLParser(parser{}), ) } // Open opens a new MySQL driver. func Open(db schema.ExecQuerier) (migrate.Driver, error) { c := conn{ExecQuerier: db} rows, err := db.QueryContext(context.Background(), variablesQuery) if err != nil { return nil, fmt.Errorf("mysql: query system variables: %w", err) } if err := sqlx.ScanOne(rows, &c.V, &c.collate, &c.charset); err != nil { return nil, fmt.Errorf("mysql: scan system variables: %w", err) } if c.TiDB() { return &Driver{ conn: c, Differ: &sqlx.Diff{DiffDriver: &tdiff{diff{conn: c}}}, Inspector: &tinspect{inspect{c}}, PlanApplier: &tplanApply{planApply{c}}, }, nil } return &Driver{ conn: c, Differ: &sqlx.Diff{DiffDriver: &diff{conn: c}}, Inspector: &inspect{c}, PlanApplier: &planApply{c}, }, nil } func (d *Driver) dev() *sqlx.DevDriver { return &sqlx.DevDriver{Driver: d, MaxNameLen: 64} } // NormalizeRealm returns the normal representation of the given database. func (d *Driver) NormalizeRealm(ctx context.Context, r *schema.Realm) (*schema.Realm, error) { return d.dev().NormalizeRealm(ctx, r) } // NormalizeSchema returns the normal representation of the given database. func (d *Driver) NormalizeSchema(ctx context.Context, s *schema.Schema) (*schema.Schema, error) { return d.dev().NormalizeSchema(ctx, s) } // Lock implements the schema.Locker interface. func (d *Driver) Lock(ctx context.Context, name string, timeout time.Duration) (schema.UnlockFunc, error) { conn, err := sqlx.SingleConn(ctx, d.ExecQuerier) if err != nil { return nil, err } if err := acquire(ctx, conn, name, timeout); err != nil { conn.Close() return nil, err } return func() error { defer conn.Close() rows, err := conn.QueryContext(ctx, "SELECT RELEASE_LOCK(?)", name) if err != nil { return err } switch released, err := sqlx.ScanNullBool(rows); { case err != nil: return err case !released.Valid || !released.Bool: return fmt.Errorf("sql/mysql: failed releasing a named lock %q", name) } return nil }, nil } // Snapshot implements migrate.Snapshoter. func (d *Driver) Snapshot(ctx context.Context) (migrate.RestoreFunc, error) { // If the connection is bound to a schema, we can restore the state if the schema has no tables. s, err := d.InspectSchema(ctx, "", nil) if err != nil && !schema.IsNotExistError(err) { return nil, err } // If a schema was found, it has to have no tables attached to be considered clean. if s != nil { if len(s.Tables) > 0 { return nil, migrate.NotCleanError{Reason: fmt.Sprintf("found table %q in schema %q", s.Tables[0].Name, s.Name)} } return func(ctx context.Context) error { current, err := d.InspectSchema(ctx, s.Name, nil) if err != nil { return err } changes, err := d.SchemaDiff(current, s) if err != nil { return err } return d.ApplyChanges(ctx, changes) }, nil } // Otherwise, the database can not have any schema. realm, err := d.InspectRealm(ctx, nil) if err != nil { return nil, err } if len(realm.Schemas) > 0 { return nil, migrate.NotCleanError{Reason: fmt.Sprintf("found schema %q", realm.Schemas[0].Name)} } return func(ctx context.Context) error { current, err := d.InspectRealm(ctx, nil) if err != nil { return err } changes, err := d.RealmDiff(current, realm) if err != nil { return err } return d.ApplyChanges(ctx, changes) }, nil } // CheckClean implements migrate.CleanChecker. func (d *Driver) CheckClean(ctx context.Context, revT *migrate.TableIdent) error { s, err := d.InspectSchema(ctx, "", nil) if err != nil && !schema.IsNotExistError(err) { return err } if s != nil { if len(s.Tables) == 0 || (revT != nil && (revT.Schema == "" || s.Name == revT.Schema) && len(s.Tables) == 1 && s.Tables[0].Name == revT.Name) { return nil } return &migrate.NotCleanError{Reason: fmt.Sprintf("found table %q in schema %q", s.Tables[0].Name, s.Name)} } r, err := d.InspectRealm(ctx, nil) if err != nil { return err } switch n := len(r.Schemas); { case n > 1: return migrate.NotCleanError{Reason: fmt.Sprintf("found multiple schemas: %d", len(r.Schemas))} case n == 1 && r.Schemas[0].Name != revT.Schema: return migrate.NotCleanError{Reason: fmt.Sprintf("found schema %q", r.Schemas[0].Name)} case n == 1 && len(r.Schemas[0].Tables) > 1: return migrate.NotCleanError{Reason: fmt.Sprintf("found multiple tables: %d", len(r.Schemas[0].Tables))} case n == 1 && len(r.Schemas[0].Tables) == 1 && r.Schemas[0].Tables[0].Name != revT.Name: return migrate.NotCleanError{Reason: fmt.Sprintf("found table %q", r.Schemas[0].Tables[0].Name)} } return nil } func acquire(ctx context.Context, conn schema.ExecQuerier, name string, timeout time.Duration) error { rows, err := conn.QueryContext(ctx, "SELECT GET_LOCK(?, ?)", name, int(timeout.Seconds())) if err != nil { return err } switch acquired, err := sqlx.ScanNullBool(rows); { case err != nil: return err case !acquired.Valid: // NULL is returned in case of an unexpected internal error. return fmt.Errorf("sql/mysql: unexpected internal error on Lock(%q, %s)", name, timeout) case !acquired.Bool: return schema.ErrLocked } return nil } // unescape strings with backslashes returned // for SQL expressions from information schema. func unescape(s string) string { var b strings.Builder for i := 0; i < len(s); i++ { switch c := s[i]; { case c != '\\' || i == len(s)-1: b.WriteByte(c) case s[i+1] == '\'', s[i+1] == '\\': b.WriteByte(s[i+1]) i++ } } return b.String() } type parser struct{} // ParseURL implements the sqlclient.URLParser interface. func (parser) ParseURL(u *url.URL) *sqlclient.URL { v := u.Query() v.Set("parseTime", "true") u.RawQuery = v.Encode() return &sqlclient.URL{URL: u, DSN: dsn(u), Schema: strings.TrimPrefix(u.Path, "/")} } // ChangeSchema implements the sqlclient.SchemaChanger interface. func (parser) ChangeSchema(u *url.URL, s string) *url.URL { nu := *u nu.Path = "/" + s return &nu } // dsn returns the MySQL standard DSN for opening // the sql.DB from the user provided URL. func dsn(u *url.URL) string { var b strings.Builder b.WriteString(u.User.Username()) if p, ok := u.User.Password(); ok { b.WriteByte(':') b.WriteString(p) } if b.Len() > 0 { b.WriteByte('@') } if u.Host != "" { b.WriteString("tcp(") b.WriteString(u.Host) b.WriteByte(')') } if u.Path != "" { b.WriteString(u.Path) } else { b.WriteByte('/') } if u.RawQuery != "" { b.WriteByte('?') b.WriteString(u.RawQuery) } return b.String() } // MySQL standard column types as defined in its codebase. Name and order // is organized differently than MySQL. // // https://github.com/mysql/mysql-server/blob/8.0/include/field_types.h // https://github.com/mysql/mysql-server/blob/8.0/sql/dd/types/column.h // https://github.com/mysql/mysql-server/blob/8.0/sql/sql_show.cc // https://github.com/mysql/mysql-server/blob/8.0/sql/gis/geometries.cc // https://dev.mysql.com/doc/refman/8.0/en/other-vendor-data-types.html const ( TypeBool = "bool" TypeBoolean = "boolean" TypeBit = "bit" // MYSQL_TYPE_BIT TypeInt = "int" // MYSQL_TYPE_LONG TypeTinyInt = "tinyint" // MYSQL_TYPE_TINY TypeSmallInt = "smallint" // MYSQL_TYPE_SHORT TypeMediumInt = "mediumint" // MYSQL_TYPE_INT24 TypeBigInt = "bigint" // MYSQL_TYPE_LONGLONG TypeDecimal = "decimal" // MYSQL_TYPE_DECIMAL TypeNumeric = "numeric" // MYSQL_TYPE_DECIMAL (numeric_type rule in sql_yacc.yy) TypeFloat = "float" // MYSQL_TYPE_FLOAT TypeDouble = "double" // MYSQL_TYPE_DOUBLE TypeReal = "real" // MYSQL_TYPE_FLOAT or MYSQL_TYPE_DOUBLE (real_type in sql_yacc.yy) TypeTimestamp = "timestamp" // MYSQL_TYPE_TIMESTAMP TypeDate = "date" // MYSQL_TYPE_DATE TypeTime = "time" // MYSQL_TYPE_TIME TypeDateTime = "datetime" // MYSQL_TYPE_DATETIME TypeYear = "year" // MYSQL_TYPE_YEAR TypeVarchar = "varchar" // MYSQL_TYPE_VAR_STRING, MYSQL_TYPE_VARCHAR TypeChar = "char" // MYSQL_TYPE_STRING TypeVarBinary = "varbinary" // MYSQL_TYPE_VAR_STRING + NULL CHARACTER_SET. TypeBinary = "binary" // MYSQL_TYPE_STRING + NULL CHARACTER_SET. TypeBlob = "blob" // MYSQL_TYPE_BLOB TypeTinyBlob = "tinyblob" // MYSQL_TYPE_TINYBLOB TypeMediumBlob = "mediumblob" // MYSQL_TYPE_MEDIUM_BLOB TypeLongBlob = "longblob" // MYSQL_TYPE_LONG_BLOB TypeText = "text" // MYSQL_TYPE_BLOB + CHARACTER_SET utf8mb4 TypeTinyText = "tinytext" // MYSQL_TYPE_TINYBLOB + CHARACTER_SET utf8mb4 TypeMediumText = "mediumtext" // MYSQL_TYPE_MEDIUM_BLOB + CHARACTER_SET utf8mb4 TypeLongText = "longtext" // MYSQL_TYPE_LONG_BLOB with + CHARACTER_SET utf8mb4 TypeEnum = "enum" // MYSQL_TYPE_ENUM TypeSet = "set" // MYSQL_TYPE_SET TypeJSON = "json" // MYSQL_TYPE_JSON TypeGeometry = "geometry" // MYSQL_TYPE_GEOMETRY TypePoint = "point" // Geometry_type::kPoint TypeMultiPoint = "multipoint" // Geometry_type::kMultipoint TypeLineString = "linestring" // Geometry_type::kLinestring TypeMultiLineString = "multilinestring" // Geometry_type::kMultilinestring TypePolygon = "polygon" // Geometry_type::kPolygon TypeMultiPolygon = "multipolygon" // Geometry_type::kMultipolygon TypeGeoCollection = "geomcollection" // Geometry_type::kGeometrycollection TypeGeometryCollection = "geometrycollection" // Geometry_type::kGeometrycollection ) // Additional common constants in MySQL. const ( IndexTypeBTree = "BTREE" IndexTypeHash = "HASH" IndexTypeFullText = "FULLTEXT" IndexTypeSpatial = "SPATIAL" currentTS = "current_timestamp" defaultGen = "default_generated" autoIncrement = "auto_increment" virtual = "VIRTUAL" stored = "STORED" persistent = "PERSISTENT" ) atlas-0.7.2/sql/mysql/driver_test.go000066400000000000000000000140541431455511600174620ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "database/sql" "net/url" "testing" "time" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestParser_ParseURLParseTime(t *testing.T) { u, err := url.Parse("mysql://user:pass@localhost:3306/my_db?foo=bar") require.NoError(t, err) ac := parser{}.ParseURL(u) require.Equal(t, "true", ac.Query().Get("parseTime")) } func TestDriver_LockAcquired(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) lock := func(l schema.Locker) { name, sec := "name", 1 m.ExpectQuery(sqltest.Escape("SELECT GET_LOCK(?, ?)")). WithArgs(name, sec). WillReturnRows(sqlmock.NewRows([]string{"acquired"}).AddRow(1)). RowsWillBeClosed() m.ExpectQuery(sqltest.Escape("SELECT RELEASE_LOCK(?)")). WithArgs(name). WillReturnRows(sqlmock.NewRows([]string{"released"}).AddRow(1)). RowsWillBeClosed() unlock, err := l.Lock(context.Background(), name, time.Second) require.NoError(t, err) require.NoError(t, unlock()) } t.Run("OnPool", func(t *testing.T) { d := &Driver{} d.ExecQuerier = &mockOpener{DB: db} lock(d) require.EqualValues(t, 1, d.ExecQuerier.(*mockOpener).opened) }) t.Run("OnConn", func(t *testing.T) { conn, err := db.Conn(context.Background()) require.NoError(t, err) d := &Driver{} d.ExecQuerier = conn lock(d) }) t.Run("OnTx", func(t *testing.T) { m.ExpectBegin() tx, err := db.Begin() require.NoError(t, err) d := &Driver{} d.ExecQuerier = tx lock(d) }) require.NoError(t, m.ExpectationsWereMet()) } func TestDriver_LockError(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) d := &Driver{} d.ExecQuerier = db t.Run("Timeout", func(t *testing.T) { name, sec := "name", 60 m.ExpectQuery(sqltest.Escape("SELECT GET_LOCK(?, ?)")). WithArgs(name, sec). WillReturnRows(sqlmock.NewRows([]string{"acquired"}).AddRow(0)). RowsWillBeClosed() unlock, err := d.Lock(context.Background(), name, time.Minute) require.Equal(t, schema.ErrLocked, err) require.Nil(t, unlock) }) t.Run("Internal", func(t *testing.T) { name, sec := "migrate", -1 m.ExpectQuery(sqltest.Escape("SELECT GET_LOCK(?, ?)")). WithArgs(name, sec). WillReturnRows(sqlmock.NewRows([]string{"acquired"}).AddRow(nil)). RowsWillBeClosed() unlock, err := d.Lock(context.Background(), name, -time.Second) require.EqualError(t, err, `sql/mysql: unexpected internal error on Lock("migrate", -1s)`) require.Nil(t, unlock) }) } func TestDriver_UnlockError(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) d := &Driver{} d.ExecQuerier = db acquired := func(name string, sec int) { m.ExpectQuery(sqltest.Escape("SELECT GET_LOCK(?, ?)")). WithArgs(name, sec). WillReturnRows(sqlmock.NewRows([]string{"acquired"}).AddRow(1)). RowsWillBeClosed() } t.Run("NotHeld", func(t *testing.T) { name, sec := "unknown_lock", 0 acquired(name, sec) unlock, err := d.Lock(context.Background(), name, time.Millisecond) require.NoError(t, err) m.ExpectQuery(sqltest.Escape("SELECT RELEASE_LOCK(?)")). WithArgs(name). WillReturnRows(sqlmock.NewRows([]string{"released"}).AddRow(0)). RowsWillBeClosed() require.Error(t, unlock()) }) t.Run("Internal", func(t *testing.T) { name, sec := "unknown_error", 1 acquired(name, sec) unlock, err := d.Lock(context.Background(), name, time.Second+time.Millisecond) require.NoError(t, err) m.ExpectQuery(sqltest.Escape("SELECT RELEASE_LOCK(?)")). WithArgs(name). WillReturnRows(sqlmock.NewRows([]string{"released"}).AddRow(nil)). RowsWillBeClosed() require.Error(t, unlock()) }) } func TestDriver_CheckClean(t *testing.T) { s := schema.New("test") drv := &Driver{Inspector: &mockInspector{schema: s}} // Empty schema. err := drv.CheckClean(context.Background(), nil) require.NoError(t, err) // Revisions table found. s.AddTables(schema.NewTable("revisions")) err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions", Schema: "test"}) require.NoError(t, err) // Multiple tables. s.Tables = []*schema.Table{schema.NewTable("a"), schema.NewTable("revisions")} err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions", Schema: "test"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found table "a" in schema "test"`) r := schema.NewRealm() drv.Inspector = &mockInspector{realm: r} // Empty realm. err = drv.CheckClean(context.Background(), nil) require.NoError(t, err) // Revisions table found. s.Tables = []*schema.Table{schema.NewTable("revisions")} r.AddSchemas(s) err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions", Schema: "test"}) require.NoError(t, err) // Unknown table. s.Tables[0].Name = "unknown" err = drv.CheckClean(context.Background(), &migrate.TableIdent{Schema: "test", Name: "revisions"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found table "unknown"`) // Multiple tables. s.Tables = []*schema.Table{schema.NewTable("a"), schema.NewTable("revisions")} err = drv.CheckClean(context.Background(), &migrate.TableIdent{Schema: "test", Name: "revisions"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found multiple tables: 2`) } type mockInspector struct { schema.Inspector realm *schema.Realm schema *schema.Schema } func (m *mockInspector) InspectSchema(context.Context, string, *schema.InspectOptions) (*schema.Schema, error) { if m.schema == nil { return nil, &schema.NotExistError{} } return m.schema, nil } func (m *mockInspector) InspectRealm(context.Context, *schema.InspectRealmOption) (*schema.Realm, error) { return m.realm, nil } type mockOpener struct { *sql.DB opened uint } func (m *mockOpener) Conn(ctx context.Context) (*sql.Conn, error) { m.opened++ return m.DB.Conn(ctx) } atlas-0.7.2/sql/mysql/inspect.go000066400000000000000000000647071431455511600166070ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "database/sql" "fmt" "regexp" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // A diff provides a MySQL implementation for schema.Inspector. type inspect struct{ conn } var _ schema.Inspector = (*inspect)(nil) // InspectRealm returns schema descriptions of all resources in the given realm. func (i *inspect) InspectRealm(ctx context.Context, opts *schema.InspectRealmOption) (*schema.Realm, error) { schemas, err := i.schemas(ctx, opts) if err != nil { return nil, err } if opts == nil { opts = &schema.InspectRealmOption{} } r := schema.NewRealm(schemas...).SetCharset(i.charset).SetCollation(i.collate) if len(schemas) == 0 || !sqlx.ModeInspectRealm(opts).Is(schema.InspectTables) { return r, nil } if err := i.inspectTables(ctx, r, nil); err != nil { return nil, err } sqlx.LinkSchemaTables(schemas) return sqlx.ExcludeRealm(r, opts.Exclude) } // InspectSchema returns schema descriptions of the tables in the given schema. // If the schema name is empty, the result will be the attached schema. func (i *inspect) InspectSchema(ctx context.Context, name string, opts *schema.InspectOptions) (*schema.Schema, error) { schemas, err := i.schemas(ctx, &schema.InspectRealmOption{Schemas: []string{name}}) if err != nil { return nil, err } switch n := len(schemas); { case n == 0: return nil, &schema.NotExistError{Err: fmt.Errorf("mysql: schema %q was not found", name)} case n > 1: return nil, fmt.Errorf("mysql: %d schemas were found for %q", n, name) } if opts == nil { opts = &schema.InspectOptions{} } r := schema.NewRealm(schemas...).SetCharset(i.charset).SetCollation(i.collate) if sqlx.ModeInspectSchema(opts).Is(schema.InspectTables) { if err := i.inspectTables(ctx, r, opts); err != nil { return nil, err } sqlx.LinkSchemaTables(schemas) } return sqlx.ExcludeSchema(r.Schemas[0], opts.Exclude) } func (i *inspect) inspectTables(ctx context.Context, r *schema.Realm, opts *schema.InspectOptions) error { if err := i.tables(ctx, r, opts); err != nil { return err } for _, s := range r.Schemas { if len(s.Tables) == 0 { continue } if err := i.columns(ctx, s); err != nil { return err } if err := i.indexes(ctx, s); err != nil { return err } if err := i.fks(ctx, s); err != nil { return err } if err := i.checks(ctx, s); err != nil { return err } if err := i.showCreate(ctx, s); err != nil { return err } } return nil } // schemas returns the list of the schemas in the database. func (i *inspect) schemas(ctx context.Context, opts *schema.InspectRealmOption) ([]*schema.Schema, error) { var ( args []any query = schemasQuery ) if opts != nil { switch n := len(opts.Schemas); { case n == 1 && opts.Schemas[0] == "": query = fmt.Sprintf(schemasQueryArgs, "= SCHEMA()") case n == 1 && opts.Schemas[0] != "": query = fmt.Sprintf(schemasQueryArgs, "= ?") args = append(args, opts.Schemas[0]) case n > 0: query = fmt.Sprintf(schemasQueryArgs, "IN ("+nArgs(len(opts.Schemas))+")") for _, s := range opts.Schemas { args = append(args, s) } } } rows, err := i.QueryContext(ctx, query, args...) if err != nil { return nil, fmt.Errorf("mysql: querying schemas: %w", err) } defer rows.Close() var schemas []*schema.Schema for rows.Next() { var name, charset, collation string if err := rows.Scan(&name, &charset, &collation); err != nil { return nil, err } schemas = append(schemas, &schema.Schema{ Name: name, Attrs: []schema.Attr{ &schema.Charset{ V: charset, }, &schema.Collation{ V: collation, }, }, }) } return schemas, nil } func (i *inspect) tables(ctx context.Context, realm *schema.Realm, opts *schema.InspectOptions) error { var ( args []any query = fmt.Sprintf(tablesQuery, nArgs(len(realm.Schemas))) ) for _, s := range realm.Schemas { args = append(args, s.Name) } if opts != nil && len(opts.Tables) > 0 { for _, t := range opts.Tables { args = append(args, t) } query = fmt.Sprintf(tablesQueryArgs, nArgs(len(realm.Schemas)), nArgs(len(opts.Tables))) } rows, err := i.QueryContext(ctx, query, args...) if err != nil { return err } defer rows.Close() for rows.Next() { var ( autoinc sql.NullInt64 tSchema, name, charset, collation, comment, options sql.NullString ) if err := rows.Scan(&tSchema, &name, &charset, &collation, &autoinc, &comment, &options); err != nil { return fmt.Errorf("scan table information: %w", err) } if !sqlx.ValidString(tSchema) || !sqlx.ValidString(name) { return fmt.Errorf("invalid schema or table name: %q.%q", tSchema.String, name.String) } s, ok := realm.Schema(tSchema.String) if !ok { return fmt.Errorf("schema %q was not found in realm", tSchema.String) } t := &schema.Table{Name: name.String} s.AddTables(t) if sqlx.ValidString(charset) { t.Attrs = append(t.Attrs, &schema.Charset{ V: charset.String, }) } if sqlx.ValidString(collation) { t.Attrs = append(t.Attrs, &schema.Collation{ V: collation.String, }) } if sqlx.ValidString(comment) { t.Attrs = append(t.Attrs, &schema.Comment{ Text: comment.String, }) } if sqlx.ValidString(options) { t.Attrs = append(t.Attrs, &CreateOptions{ V: options.String, }) } if autoinc.Valid { t.Attrs = append(t.Attrs, &AutoIncrement{ V: autoinc.Int64, }) } } return rows.Close() } // columns queries and appends the columns of the given table. func (i *inspect) columns(ctx context.Context, s *schema.Schema) error { query := columnsQuery if i.SupportsGeneratedColumns() { query = columnsExprQuery } rows, err := i.querySchema(ctx, query, s) if err != nil { return fmt.Errorf("mysql: query schema %q columns: %w", s.Name, err) } defer rows.Close() for rows.Next() { if err := i.addColumn(s, rows); err != nil { return fmt.Errorf("mysql: %w", err) } } return rows.Err() } // addColumn scans the current row and adds a new column from it to the table. func (i *inspect) addColumn(s *schema.Schema, rows *sql.Rows) error { var table, name, typ, comment, nullable, key, defaults, extra, charset, collation, expr sql.NullString if err := rows.Scan(&table, &name, &typ, &comment, &nullable, &key, &defaults, &extra, &charset, &collation, &expr); err != nil { return err } t, ok := s.Table(table.String) if !ok { return fmt.Errorf("table %q was not found in schema", table.String) } c := &schema.Column{ Name: name.String, Type: &schema.ColumnType{ Raw: typ.String, Null: nullable.String == "YES", }, } ct, err := ParseType(c.Type.Raw) if err != nil { return err } c.Type.Type = ct attr, err := parseExtra(extra.String) if err != nil { return err } if attr.autoinc { a := &AutoIncrement{} if !sqlx.Has(t.Attrs, a) { // A table can have only one AUTO_INCREMENT column. If it was returned as NULL // from INFORMATION_SCHEMA, it is due to information_schema_stats_expiry, and // we need to extract it from the 'CREATE TABLE' command. putShow(t).auto = a } c.Attrs = append(c.Attrs, a) } if attr.onUpdate != "" { c.Attrs = append(c.Attrs, &OnUpdate{A: attr.onUpdate}) } if x := expr.String; x != "" { if !i.Maria() { x = unescape(x) } c.SetGeneratedExpr(&schema.GeneratedExpr{Expr: x, Type: attr.generatedType}) } if defaults.Valid { if i.Maria() { c.Default = i.marDefaultExpr(c, defaults.String) } else { c.Default = i.myDefaultExpr(c, defaults.String, attr) } } if sqlx.ValidString(comment) { c.SetComment(comment.String) } if sqlx.ValidString(charset) { c.SetCharset(charset.String) } if sqlx.ValidString(collation) { c.SetCollation(collation.String) } t.AddColumns(c) // From MySQL doc: A UNIQUE index may be displayed as "PRI" if it is NOT NULL // and there is no PRIMARY KEY in the table. We detect this in `addIndexes`. if key.String == "PRI" { if t.PrimaryKey == nil { t.PrimaryKey = &schema.Index{Table: t, Name: key.String} } t.PrimaryKey.Parts = append(t.PrimaryKey.Parts, &schema.IndexPart{ C: c, SeqNo: len(t.PrimaryKey.Parts), }) } return nil } // indexes queries and appends the indexes of the given table. func (i *inspect) indexes(ctx context.Context, s *schema.Schema) error { query := i.indexQuery() rows, err := i.querySchema(ctx, query, s) if err != nil { return fmt.Errorf("mysql: query schema %q indexes: %w", s.Name, err) } defer rows.Close() if err := i.addIndexes(s, rows); err != nil { return err } return rows.Err() } // addIndexes scans the rows and adds the indexes to the table. func (i *inspect) addIndexes(s *schema.Schema, rows *sql.Rows) error { hasPK := make(map[*schema.Table]bool) for rows.Next() { var ( seqno int table, name, indexType string nonuniq, desc sql.NullBool column, subPart, expr, comment sql.NullString ) if err := rows.Scan(&table, &name, &column, &nonuniq, &seqno, &indexType, &desc, &comment, &subPart, &expr); err != nil { return fmt.Errorf("mysql: scanning indexes for schema %q: %w", s.Name, err) } t, ok := s.Table(table) if !ok { return fmt.Errorf("table %q was not found in schema", table) } // Ignore primary keys. if name == "PRIMARY" { hasPK[t] = true continue } idx, ok := t.Index(name) if !ok { idx = &schema.Index{ Name: name, Unique: !nonuniq.Bool, Table: t, Attrs: []schema.Attr{ &IndexType{T: indexType}, }, } if sqlx.ValidString(comment) { idx.Attrs = append(t.Attrs, &schema.Comment{ Text: comment.String, }) } t.Indexes = append(t.Indexes, idx) } // Rows are ordered by SEQ_IN_INDEX that specifies the // position of the column in the index definition. part := &schema.IndexPart{SeqNo: seqno, Desc: desc.Bool} switch { case sqlx.ValidString(expr): part.X = &schema.RawExpr{X: unescape(expr.String)} case sqlx.ValidString(column): part.C, ok = t.Column(column.String) if !ok { return fmt.Errorf("mysql: column %q was not found for index %q", column.String, idx.Name) } if sqlx.ValidString(subPart) { n, err := strconv.Atoi(subPart.String) if err != nil { return fmt.Errorf("mysql: parse index prefix size %q: %w", subPart.String, err) } part.Attrs = append(part.Attrs, &SubPart{ Len: n, }) } part.C.Indexes = append(part.C.Indexes, idx) default: return fmt.Errorf("mysql: invalid part for index %q", idx.Name) } idx.Parts = append(idx.Parts, part) } for _, t := range s.Tables { if !hasPK[t] && t.PrimaryKey != nil { t.PrimaryKey = nil } } return nil } // fks queries and appends the foreign keys of the given table. func (i *inspect) fks(ctx context.Context, s *schema.Schema) error { rows, err := i.querySchema(ctx, fksQuery, s) if err != nil { return fmt.Errorf("mysql: querying %q foreign keys: %w", s.Name, err) } defer rows.Close() if err := sqlx.SchemaFKs(s, rows); err != nil { return fmt.Errorf("mysql: %w", err) } return rows.Err() } // checks queries and appends the check constraints of the given table. func (i *inspect) checks(ctx context.Context, s *schema.Schema) error { query, ok := i.supportsCheck() if !ok { return nil } rows, err := i.querySchema(ctx, query, s) if err != nil { return fmt.Errorf("mysql: querying %q check constraints: %w", s.Name, err) } defer rows.Close() for rows.Next() { var table, name, clause, enforced sql.NullString if err := rows.Scan(&table, &name, &clause, &enforced); err != nil { return fmt.Errorf("mysql: %w", err) } t, ok := s.Table(table.String) if !ok { return fmt.Errorf("table %q was not found in schema", table.String) } check := &schema.Check{ Name: name.String, Expr: unescape(clause.String), } if i.Maria() { check.Expr = clause.String // In MariaDB, JSON is an alias to LONGTEXT. For versions >= 10.4.3, the CHARSET and COLLATE set to utf8mb4 // and a CHECK constraint is automatically created for the column as well (i.e. JSON_VALID(``)). However, // we expect tools like Atlas and Ent to manually add this CHECK for older versions of MariaDB. c, ok := t.Column(check.Name) if ok && c.Type.Raw == TypeLongText && check.Expr == fmt.Sprintf("json_valid(`%s`)", c.Name) { c.Type.Raw = TypeJSON c.Type.Type = &schema.JSONType{T: TypeJSON} // Unset the inspected CHARSET/COLLATE attributes // as they are valid only for character types. c.UnsetCharset().UnsetCollation() } } else if enforced.String == "NO" { // The ENFORCED attribute is not supported by MariaDB. // Also, skip adding it in case the CHECK is ENFORCED, // as the default is ENFORCED if not state otherwise. check.Attrs = append(check.Attrs, &Enforced{V: false}) } t.Attrs = append(t.Attrs, check) } return rows.Err() } // supportsCheck reports if the connected database supports // the CHECK clause, and return the querying for getting them. func (i *inspect) supportsCheck() (string, bool) { q := myChecksQuery if i.Maria() { q = marChecksQuery } return q, i.SupportsCheck() } // indexQuery returns the query to retrieve the indexes of the given table. func (i *inspect) indexQuery() string { query := indexesNoCommentQuery if i.SupportsIndexComment() { query = indexesQuery } if i.SupportsIndexExpr() { query = indexesExprQuery } return query } // extraAttr is a parsed version of the information_schema EXTRA column. type extraAttr struct { autoinc bool onUpdate string generatedType string defaultGenerated bool } var ( reGenerateType = regexp.MustCompile(`(?i)^(stored|persistent|virtual) generated$`) reTimeOnUpdate = regexp.MustCompile(`(?i)^(?:default_generated )?on update (current_timestamp(?:\(\d?\))?)$`) ) // parseExtra returns a parsed version of the EXTRA column // from the INFORMATION_SCHEMA.COLUMNS table. func parseExtra(extra string) (*extraAttr, error) { attr := &extraAttr{} switch el := strings.ToLower(extra); { case el == "", el == "null": case el == defaultGen: attr.defaultGenerated = true // The column has an expression default value, // and it is handled in Driver.addColumn. case el == autoIncrement: attr.autoinc = true case reTimeOnUpdate.MatchString(extra): attr.onUpdate = reTimeOnUpdate.FindStringSubmatch(extra)[1] case reGenerateType.MatchString(extra): attr.generatedType = reGenerateType.FindStringSubmatch(extra)[1] default: return nil, fmt.Errorf("unknown extra column attribute %q", extra) } return attr, nil } // showCreate sets and fixes schema elements that require information from // the 'SHOW CREATE' command. func (i *inspect) showCreate(ctx context.Context, s *schema.Schema) error { for _, t := range s.Tables { st, ok := popShow(t) if !ok { continue } if err := i.createStmt(ctx, t); err != nil { return err } if err := i.setAutoInc(st, t); err != nil { return err } } return nil } var reAutoinc = regexp.MustCompile(`(?i)\s*AUTO_INCREMENT\s*=\s*(\d+)\s*`) // setAutoInc extracts the updated AUTO_INCREMENT from CREATE TABLE. func (i *inspect) setAutoInc(s *showTable, t *schema.Table) error { if s.auto == nil { return nil } var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE TABLE statement in attributes for %q", t.Name) } if sqlx.Has(t.Attrs, &AutoIncrement{}) { return fmt.Errorf("unexpected AUTO_INCREMENT attributes for table: %q", t.Name) } matches := reAutoinc.FindStringSubmatch(c.S) if len(matches) != 2 { return nil } v, err := strconv.ParseInt(matches[1], 10, 64) if err != nil { return err } s.auto.V = v t.Attrs = append(t.Attrs, s.auto) return nil } // createStmt loads the CREATE TABLE statement for the table. func (i *inspect) createStmt(ctx context.Context, t *schema.Table) error { c := &CreateStmt{} b := &sqlx.Builder{QuoteChar: '`'} rows, err := i.QueryContext(ctx, b.P("SHOW CREATE TABLE").Table(t).String()) if err != nil { return fmt.Errorf("query CREATE TABLE %q: %w", t.Name, err) } if err := sqlx.ScanOne(rows, &sql.NullString{}, &c.S); err != nil { return fmt.Errorf("scan CREATE TABLE %q: %w", t.Name, err) } t.Attrs = append(t.Attrs, c) return nil } var reCurrTimestamp = regexp.MustCompile(`(?i)^current_timestamp(?:\(\d?\))?$`) // myDefaultExpr returns the correct schema.Expr based on the column attributes for MySQL. func (i *inspect) myDefaultExpr(c *schema.Column, x string, attr *extraAttr) schema.Expr { // In MySQL, the DEFAULT_GENERATED indicates the column has an expression default value. if i.SupportsExprDefault() && attr.defaultGenerated { // Skip CURRENT_TIMESTAMP, because wrapping it with parens will translate it to now(). if _, ok := c.Type.Type.(*schema.TimeType); ok && reCurrTimestamp.MatchString(x) { return &schema.RawExpr{X: x} } return &schema.RawExpr{X: sqlx.MayWrap(unescape(x))} } switch c.Type.Type.(type) { case *schema.BinaryType: // MySQL v8 uses Hexadecimal representation. if isHex(x) { return &schema.Literal{V: x} } case *BitType, *schema.BoolType, *schema.IntegerType, *schema.DecimalType, *schema.FloatType: return &schema.Literal{V: x} case *schema.TimeType: // "current_timestamp" is exceptional in old versions // of MySQL for timestamp and datetime data types. if reCurrTimestamp.MatchString(x) { return &schema.RawExpr{X: x} } } return &schema.Literal{V: quote(x)} } // parseColumn returns column parts, size and signed-info from a MySQL type. func parseColumn(typ string) (parts []string, size int, unsigned bool, err error) { switch parts = strings.FieldsFunc(typ, func(r rune) bool { return r == '(' || r == ')' || r == ' ' || r == ',' }); parts[0] { case TypeTinyInt, TypeSmallInt, TypeMediumInt, TypeInt, TypeBigInt, TypeDecimal, TypeNumeric, TypeFloat, TypeDouble, TypeReal: if attr := parts[len(parts)-1]; attr == "unsigned" || attr == "zerofill" { unsigned = true } if len(parts) > 2 || len(parts) == 2 && !unsigned { size, err = strconv.Atoi(parts[1]) } case TypeBit, TypeBinary, TypeVarBinary, TypeChar, TypeVarchar: if len(parts) > 1 { size, err = strconv.Atoi(parts[1]) } } if err != nil { return nil, 0, false, fmt.Errorf("parse %q to int: %w", parts[1], err) } return parts, size, unsigned, nil } // hasNumericDefault reports if the given type has a numeric default value. func hasNumericDefault(t schema.Type) bool { switch t.(type) { case *BitType, *schema.BoolType, *schema.IntegerType, *schema.DecimalType, *schema.FloatType: return true } return false } func isHex(x string) bool { return len(x) > 2 && strings.ToLower(x[:2]) == "0x" } // marDefaultExpr returns the correct schema.Expr based on the column attributes for MariaDB. func (i *inspect) marDefaultExpr(c *schema.Column, x string) schema.Expr { // Unlike MySQL, NULL means default to NULL or no default. if x == "NULL" { return nil } // From MariaDB 10.2.7, string-based literals are quoted to distinguish them from expressions. if i.GTE("10.2.7") && sqlx.IsQuoted(x, '\'') { return &schema.Literal{V: x} } // In this case, we need to manually check if the expression is literal, or fallback to raw expression. switch c.Type.Type.(type) { case *BitType: // Bit literal values. See https://mariadb.com/kb/en/binary-literals. if strings.HasPrefix(x, "b'") && strings.HasSuffix(x, "'") { return &schema.Literal{V: x} } case *schema.BoolType, *schema.IntegerType, *schema.DecimalType, *schema.FloatType: if _, err := strconv.ParseFloat(x, 64); err == nil { return &schema.Literal{V: x} } case *schema.TimeType: // "current_timestamp" is exceptional in old versions // of MySQL (i.e. MariaDB in this case). if strings.ToLower(x) == currentTS { return &schema.RawExpr{X: x} } } if !i.SupportsExprDefault() { return &schema.Literal{V: quote(x)} } return &schema.RawExpr{X: sqlx.MayWrap(x)} } func (i *inspect) querySchema(ctx context.Context, query string, s *schema.Schema) (*sql.Rows, error) { args := []any{s.Name} for _, t := range s.Tables { args = append(args, t.Name) } return i.QueryContext(ctx, fmt.Sprintf(query, nArgs(len(s.Tables))), args...) } func nArgs(n int) string { return strings.Repeat("?, ", n-1) + "?" } const ( // Query to list system variables. variablesQuery = "SELECT @@version, @@collation_server, @@character_set_server" // Query to list database schemas. schemasQuery = "SELECT `SCHEMA_NAME`, `DEFAULT_CHARACTER_SET_NAME`, `DEFAULT_COLLATION_NAME` from `INFORMATION_SCHEMA`.`SCHEMATA` WHERE `SCHEMA_NAME` NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys') ORDER BY `SCHEMA_NAME`" // Query to list specific database schemas. schemasQueryArgs = "SELECT `SCHEMA_NAME`, `DEFAULT_CHARACTER_SET_NAME`, `DEFAULT_COLLATION_NAME` from `INFORMATION_SCHEMA`.`SCHEMATA` WHERE `SCHEMA_NAME` %s ORDER BY `SCHEMA_NAME`" // Query to list table columns. columnsQuery = "SELECT `TABLE_NAME`, `COLUMN_NAME`, `COLUMN_TYPE`, `COLUMN_COMMENT`, `IS_NULLABLE`, `COLUMN_KEY`, `COLUMN_DEFAULT`, `EXTRA`, `CHARACTER_SET_NAME`, `COLLATION_NAME`, NULL AS `GENERATION_EXPRESSION` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` IN (%s) ORDER BY `ORDINAL_POSITION`" columnsExprQuery = "SELECT `TABLE_NAME`, `COLUMN_NAME`, `COLUMN_TYPE`, `COLUMN_COMMENT`, `IS_NULLABLE`, `COLUMN_KEY`, `COLUMN_DEFAULT`, `EXTRA`, `CHARACTER_SET_NAME`, `COLLATION_NAME`, `GENERATION_EXPRESSION` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` IN (%s) ORDER BY `ORDINAL_POSITION`" // Query to list table indexes. indexesQuery = "SELECT `TABLE_NAME`, `INDEX_NAME`, `COLUMN_NAME`, `NON_UNIQUE`, `SEQ_IN_INDEX`, `INDEX_TYPE`, UPPER(`COLLATION`) = 'D' AS `DESC`, `INDEX_COMMENT`, `SUB_PART`, NULL AS `EXPRESSION` FROM `INFORMATION_SCHEMA`.`STATISTICS` WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` IN (%s) ORDER BY `index_name`, `seq_in_index`" indexesExprQuery = "SELECT `TABLE_NAME`, `INDEX_NAME`, `COLUMN_NAME`, `NON_UNIQUE`, `SEQ_IN_INDEX`, `INDEX_TYPE`, UPPER(`COLLATION`) = 'D' AS `DESC`, `INDEX_COMMENT`, `SUB_PART`, `EXPRESSION` FROM `INFORMATION_SCHEMA`.`STATISTICS` WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` IN (%s) ORDER BY `index_name`, `seq_in_index`" indexesNoCommentQuery = "SELECT `TABLE_NAME`, `INDEX_NAME`, `COLUMN_NAME`, `NON_UNIQUE`, `SEQ_IN_INDEX`, `INDEX_TYPE`, UPPER(`COLLATION`) = 'D' AS `DESC`, NULL AS `INDEX_COMMENT`, `SUB_PART`, NULL AS `EXPRESSION` FROM `INFORMATION_SCHEMA`.`STATISTICS` WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` IN (%s) ORDER BY `index_name`, `seq_in_index`" tablesQuery = ` SELECT t1.TABLE_SCHEMA, t1.TABLE_NAME, t2.CHARACTER_SET_NAME, t1.TABLE_COLLATION, t1.AUTO_INCREMENT, t1.TABLE_COMMENT, t1.CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES AS t1 LEFT JOIN INFORMATION_SCHEMA.COLLATIONS AS t2 ON t1.TABLE_COLLATION = t2.COLLATION_NAME WHERE TABLE_SCHEMA IN (%s) ORDER BY TABLE_SCHEMA, TABLE_NAME ` tablesQueryArgs = ` SELECT t1.TABLE_SCHEMA, t1.TABLE_NAME, t2.CHARACTER_SET_NAME, t1.TABLE_COLLATION, t1.AUTO_INCREMENT, t1.TABLE_COMMENT, t1.CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES AS t1 JOIN INFORMATION_SCHEMA.COLLATIONS AS t2 ON t1.TABLE_COLLATION = t2.COLLATION_NAME WHERE TABLE_SCHEMA IN (%s) AND TABLE_NAME IN (%s) ORDER BY TABLE_SCHEMA, TABLE_NAME ` // Query to list table check constraints. myChecksQuery = `SELECT t1.TABLE_NAME, t1.CONSTRAINT_NAME, t2.CHECK_CLAUSE, t1.ENFORCED` + checksQuery marChecksQuery = `SELECT t1.TABLE_NAME, t1.CONSTRAINT_NAME, t2.CHECK_CLAUSE, "YES" AS ENFORCED` + checksQuery checksQuery = ` FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS t1 JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS t2 ON t1.CONSTRAINT_NAME = t2.CONSTRAINT_NAME WHERE t1.CONSTRAINT_TYPE = 'CHECK' AND t1.TABLE_SCHEMA = ? AND t1.TABLE_NAME IN (%s) ORDER BY t1.CONSTRAINT_NAME ` // Query to list table foreign keys. fksQuery = ` SELECT t1.CONSTRAINT_NAME, t1.TABLE_NAME, t1.COLUMN_NAME, t1.TABLE_SCHEMA, t1.REFERENCED_TABLE_NAME, t1.REFERENCED_COLUMN_NAME, t1.REFERENCED_TABLE_SCHEMA, t3.UPDATE_RULE, t3.DELETE_RULE FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS t1 JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS t2 JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS t3 ON t1.CONSTRAINT_NAME = t2.CONSTRAINT_NAME AND t1.CONSTRAINT_NAME = t3.CONSTRAINT_NAME AND t1.TABLE_SCHEMA = t2.TABLE_SCHEMA AND t1.TABLE_SCHEMA = t3.CONSTRAINT_SCHEMA WHERE t2.CONSTRAINT_TYPE = 'FOREIGN KEY' AND t1.TABLE_SCHEMA = ? AND t1.TABLE_NAME IN (%s) ORDER BY t1.CONSTRAINT_NAME, t1.ORDINAL_POSITION` ) type ( // AutoIncrement attribute for columns with "AUTO_INCREMENT" as a default. // V represent an optional start value for the counter. AutoIncrement struct { schema.Attr V int64 } // CreateOptions attribute for describing extra options used with CREATE TABLE. CreateOptions struct { schema.Attr V string } // CreateStmt describes the SQL statement used to create a table. CreateStmt struct { schema.Attr S string } // OnUpdate attribute for columns with "ON UPDATE CURRENT_TIMESTAMP" as a default. OnUpdate struct { schema.Attr A string } // SubPart attribute defines an option index prefix length for columns. SubPart struct { schema.Attr Len int } // Enforced attribute defines the ENFORCED flag for CHECK constraint. Enforced struct { schema.Attr V bool // V indicates if the CHECK is enforced or not. } // The DisplayWidth represents a display width of an integer type. DisplayWidth struct { schema.Attr N int } // The ZeroFill represents the ZEROFILL attribute which is // deprecated for MySQL version >= 8.0.17. ZeroFill struct { schema.Attr A string } // IndexType represents an index type. IndexType struct { schema.Attr T string // BTREE, HASH, FULLTEXT, SPATIAL, RTREE } // BitType represents a bit type. BitType struct { schema.Type T string Size int } // SetType represents a set type. SetType struct { schema.Type Values []string } // putShow is an intermediate table attribute used // on inspection to indicate if the 'SHOW TABLE' is // required and for what. showTable struct { schema.Attr // AUTO_INCREMENT value to due missing value in information_schema. auto *AutoIncrement } ) func putShow(t *schema.Table) *showTable { for i := range t.Attrs { if s, ok := t.Attrs[i].(*showTable); ok { return s } } s := &showTable{} t.Attrs = append(t.Attrs, s) return s } func popShow(t *schema.Table) (*showTable, bool) { for i := range t.Attrs { if s, ok := t.Attrs[i].(*showTable); ok { t.Attrs = append(t.Attrs[:i], t.Attrs[i+1:]...) return s, true } } return nil, false } atlas-0.7.2/sql/mysql/inspect_test.go000066400000000000000000002205451431455511600176400ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "fmt" "testing" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) // Single table queries used by the different tests. var ( queryFKs = sqltest.Escape(fmt.Sprintf(fksQuery, "?")) queryTable = sqltest.Escape(fmt.Sprintf(tablesQuery, "?")) queryColumns = sqltest.Escape(fmt.Sprintf(columnsExprQuery, "?")) queryColumnsNoExpr = sqltest.Escape(fmt.Sprintf(columnsQuery, "?")) queryIndexes = sqltest.Escape(fmt.Sprintf(indexesQuery, "?")) queryIndexesNoComment = sqltest.Escape(fmt.Sprintf(indexesNoCommentQuery, "?")) queryIndexesExpr = sqltest.Escape(fmt.Sprintf(indexesExprQuery, "?")) queryMyChecks = sqltest.Escape(fmt.Sprintf(myChecksQuery, "?")) queryMarChecks = sqltest.Escape(fmt.Sprintf(marChecksQuery, "?")) ) func TestDriver_InspectTable(t *testing.T) { tests := []struct { name string version string before func(mock) expect func(*require.Assertions, *schema.Table, error) }{ { name: "table collation", before: func(m mock) { m.ExpectQuery(queryTable). WithArgs("public"). WillReturnRows(sqltest.Rows(` +--------------+--------------+--------------------+--------------------+----------------+---------------+--------------------+ | TABLE_SCHEMA | TABLE_NAME | CHARACTER_SET_NAME | TABLE_COLLATION | AUTO_INCREMENT | TABLE_COMMENT | CREATE_OPTIONS | +--------------+--------------+--------------------+--------------------+----------------+---------------+--------------------+ | public | users | utf8mb4 | utf8mb4_0900_ai_ci | nil | Comment | COMPRESSION="ZLIB" | +--------------+--------------+--------------------+--------------------+----------------+---------------+--------------------+ `)) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +--------------------+--------------------+----------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +--------------------+--------------------+----------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ | users | id | bigint(20) | | NO | PRI | NULL | auto_increment | NULL | NULL | NULL | +--------------------+--------------------+----------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ `)) m.ExpectQuery(queryIndexesExpr). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +--------------------+--------------+-------------+------------+--------------+--------------+----------+--------------+------------+------------------+ | TABLE_NAME | INDEX_NAME | COLUMN_NAME | NON_UNIQUE | SEQ_IN_INDEX | INDEX_TYPE | DESC | COMMENT | SUB_PART | EXPRESSION | +--------------------+--------------+-------------+------------+--------------+--------------+----------+--------------+------------+------------------+ | users | PRIMARY | id | 0 | 1 | BTREE | 0 | | NULL | NULL | +--------------------+--------------+-------------+------------+--------------+--------------+----------+--------------+------------+------------------+ `)) m.noFKs() m.ExpectQuery(sqltest.Escape("SHOW CREATE TABLE `public`.`users`")). WillReturnRows(sqltest.Rows(` +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ | Table | Create Table | +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ | users | CREATE TABLE users (id bigint NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=55834574848 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin | +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ `)) }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}, &schema.Comment{Text: "Comment"}, &CreateOptions{V: `COMPRESSION="ZLIB"`}, &CreateStmt{S: "CREATE TABLE users (id bigint NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=55834574848 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin"}, &AutoIncrement{V: 55834574848}, }, t.Attrs) require.Len(t.PrimaryKey.Parts, 1) require.True(t.PrimaryKey.Parts[0].C == t.Columns[0]) require.EqualValues([]*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint(20)", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&AutoIncrement{V: 55834574848}}}, }, t.Columns) }, }, { name: "int types", before: func(m mock) { m.ExpectQuery(queryTable). WithArgs("public"). WillReturnRows(sqltest.Rows(` +--------------+--------------+--------------------+--------------------+----------------+---------------+--------------------+ | TABLE_SCHEMA | TABLE_NAME | CHARACTER_SET_NAME | TABLE_COLLATION | AUTO_INCREMENT | TABLE_COMMENT | CREATE_OPTIONS | +--------------+--------------+--------------------+--------------------+----------------+---------------+--------------------+ | public | users | utf8mb4 | utf8mb4_0900_ai_ci | nil | Comment | COMPRESSION="ZLIB" | +--------------+--------------+--------------------+--------------------+----------------+---------------+--------------------+ `)) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+--------------------+------------------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +----------- +--------------------+------------------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ | users | id | bigint(20) | | NO | PRI | NULL | | NULL | NULL | NULL | | users | v57_tiny | tinyint(1) | | NO | | NULL | | NULL | NULL | NULL | | users | v57_tiny_unsigned | tinyint(4) unsigned | | NO | | NULL | | NULL | NULL | NULL | | users | v57_small | smallint(6) | | NO | | NULL | | NULL | NULL | NULL | | users | v57_small_unsigned | smallint(6) unsigned | | NO | | NULL | | NULL | NULL | NULL | | users | v57_int | bigint(11) | | NO | | NULL | | NULL | NULL | NULL | | users | v57_int_unsigned | bigint(11) unsigned | | NO | | NULL | | NULL | NULL | NULL | | users | v8_tiny | tinyint | | NO | | NULL | | NULL | NULL | NULL | | users | v8_tiny_unsigned | tinyint unsigned | | NO | | NULL | | NULL | NULL | NULL | | users | v8_small | smallint | | NO | | NULL | | NULL | NULL | NULL | | users | v8_small_unsigned | smallint unsigned | | NO | | NULL | | NULL | NULL | NULL | | users | v8_big | bigint | | NO | | NULL | | NULL | NULL | NULL | | users | v8_big_unsigned | bigint unsigned | comment | NO | | NULL | | NULL | NULL | NULL | | users | v8_big_zerofill | bigint(20) unsigned zerofill | comment | NO | | NULL | | NULL | NULL | NULL | +------------+--------------------+------------------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint(20)", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "v57_tiny", Type: &schema.ColumnType{Raw: "tinyint(1)", Type: &schema.BoolType{T: "bool"}}}, {Name: "v57_tiny_unsigned", Type: &schema.ColumnType{Raw: "tinyint(4) unsigned", Type: &schema.IntegerType{T: "tinyint", Unsigned: true}}}, {Name: "v57_small", Type: &schema.ColumnType{Raw: "smallint(6)", Type: &schema.IntegerType{T: "smallint"}}}, {Name: "v57_small_unsigned", Type: &schema.ColumnType{Raw: "smallint(6) unsigned", Type: &schema.IntegerType{T: "smallint", Unsigned: true}}}, {Name: "v57_int", Type: &schema.ColumnType{Raw: "bigint(11)", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "v57_int_unsigned", Type: &schema.ColumnType{Raw: "bigint(11) unsigned", Type: &schema.IntegerType{T: "bigint", Unsigned: true}}}, {Name: "v8_tiny", Type: &schema.ColumnType{Raw: "tinyint", Type: &schema.IntegerType{T: "tinyint"}}}, {Name: "v8_tiny_unsigned", Type: &schema.ColumnType{Raw: "tinyint unsigned", Type: &schema.IntegerType{T: "tinyint", Unsigned: true}}}, {Name: "v8_small", Type: &schema.ColumnType{Raw: "smallint", Type: &schema.IntegerType{T: "smallint"}}}, {Name: "v8_small_unsigned", Type: &schema.ColumnType{Raw: "smallint unsigned", Type: &schema.IntegerType{T: "smallint", Unsigned: true}}}, {Name: "v8_big", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "v8_big_unsigned", Type: &schema.ColumnType{Raw: "bigint unsigned", Type: &schema.IntegerType{T: "bigint", Unsigned: true}}, Attrs: []schema.Attr{&schema.Comment{Text: "comment"}}}, {Name: "v8_big_zerofill", Type: &schema.ColumnType{Raw: "bigint(20) unsigned zerofill", Type: &schema.IntegerType{T: "bigint", Unsigned: true, Attrs: []schema.Attr{&DisplayWidth{N: 20}, &ZeroFill{A: "zerofill"}}}}, Attrs: []schema.Attr{&schema.Comment{Text: "comment"}}}, }, t.Columns) }, }, { name: "maria/types", version: "10.7.1-MariaDB", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+----------------+------------------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+----------------+------------------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ | users | id | bigint(20) | | NO | PRI | NULL | | NULL | NULL | NULL | | users | tiny_int | tinyint(1) | | NO | | NULL | | NULL | NULL | NULL | | users | longtext | longtext | | NO | | NULL | | NULL | NULL | NULL | | users | jsonc | longtext | | NO | | NULL | | NULL | NULL | NULL | +------------+----------------+------------------------------+----------------------+-------------+------------+----------------+----------------+--------------------+----------------+---------------------------+ `)) m.ExpectQuery(queryIndexes). WillReturnRows(sqlmock.NewRows([]string{"table_name", "index_name", "column_name", "non_unique", "key_part", "expression"})) m.noFKs() m.ExpectQuery(queryMarChecks). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +--------+------------------+-------------------------------------------+------------+ | table | CONSTRAINT_NAME | CHECK_CLAUSE | ENFORCED | +--------+------------------+-------------------------------------------+------------+ | users | jsonc | json_valid(` + "`jsonc`" + `) | YES | | users | users_chk_1 | longtext <> '\'\'""' | YES | +--------+------------------+-------------------------------------------+------------+ `)) }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint(20)", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "tiny_int", Type: &schema.ColumnType{Raw: "tinyint(1)", Type: &schema.BoolType{T: "bool"}}}, {Name: "longtext", Type: &schema.ColumnType{Raw: "longtext", Type: &schema.StringType{T: "longtext"}}}, {Name: "jsonc", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, }, t.Columns) require.EqualValues([]schema.Attr{ &schema.Check{Name: "jsonc", Expr: "json_valid(`jsonc`)"}, &schema.Check{Name: "users_chk_1", Expr: `longtext <> '\'\'""'`}, }, t.Attrs) }, }, { name: "decimal types", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+--------------+------------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+--------------+------------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | users | d1 | decimal(10,2) | | NO | | 10.20 | | NULL | NULL | NULL | | users | d2 | decimal(10,0) | | NO | | 10 | | NULL | NULL | NULL | | users | d3 | decimal(10,2) unsigned | | NO | | 10.20 | | NULL | NULL | NULL | | users | d4 | decimal(10,0) unsigned | | NO | | 10 | | NULL | NULL | NULL | +------------+-------------+-------------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "d1", Type: &schema.ColumnType{Raw: "decimal(10,2)", Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 2}}, Default: &schema.Literal{V: "10.20"}}, {Name: "d2", Type: &schema.ColumnType{Raw: "decimal(10,0)", Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 0}}, Default: &schema.Literal{V: "10"}}, {Name: "d3", Type: &schema.ColumnType{Raw: "decimal(10,2) unsigned", Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 2, Unsigned: true}}, Default: &schema.Literal{V: "10.20"}}, {Name: "d4", Type: &schema.ColumnType{Raw: "decimal(10,0) unsigned", Type: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 0, Unsigned: true}}, Default: &schema.Literal{V: "10"}}, }, t.Columns) }, }, { name: "float types", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------------+--------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+-------------------+--------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | users | float | float | | NO | | | | NULL | NULL | NULL | | users | double | double | | NO | | | | NULL | NULL | NULL | | users | float_unsigned | float unsigned | | NO | | | | NULL | NULL | NULL | | users | double_unsigned | double unsigned | | NO | | | | NULL | NULL | NULL | | users | float_unsigned_p | float(10) unsigned | | NO | | | | NULL | NULL | NULL | +------------+-------------------+--------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "float", Type: &schema.ColumnType{Raw: "float", Type: &schema.FloatType{T: "float"}}}, {Name: "double", Type: &schema.ColumnType{Raw: "double", Type: &schema.FloatType{T: "double"}}}, {Name: "float_unsigned", Type: &schema.ColumnType{Raw: "float unsigned", Type: &schema.FloatType{T: "float", Unsigned: true}}}, {Name: "double_unsigned", Type: &schema.ColumnType{Raw: "double unsigned", Type: &schema.FloatType{T: "double", Unsigned: true}}}, {Name: "float_unsigned_p", Type: &schema.ColumnType{Raw: "float(10) unsigned", Type: &schema.FloatType{T: "float", Precision: 10, Unsigned: true}}}, }, t.Columns) }, }, { name: "binary types", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+--------------+---------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+--------------+---------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | users | c1 | binary(20) | | NO | | NULL | | NULL | NULL | NULL | | users | c2 | varbinary(30) | | NO | | NULL | | NULL | NULL | NULL | | users | c3 | tinyblob | | NO | | NULL | | NULL | NULL | NULL | | users | c4 | mediumblob | | NO | | NULL | | NULL | NULL | NULL | | users | c5 | blob | | NO | | NULL | | NULL | NULL | NULL | | users | c6 | longblob | | NO | | NULL | | NULL | NULL | NULL | +------------+--------------+---------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) p := func(i int) *int { return &i } require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "binary(20)", Type: &schema.BinaryType{T: "binary", Size: p(20)}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "varbinary(30)", Type: &schema.BinaryType{T: "varbinary", Size: p(30)}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "tinyblob", Type: &schema.BinaryType{T: "tinyblob"}}}, {Name: "c4", Type: &schema.ColumnType{Raw: "mediumblob", Type: &schema.BinaryType{T: "mediumblob"}}}, {Name: "c5", Type: &schema.ColumnType{Raw: "blob", Type: &schema.BinaryType{T: "blob"}}}, {Name: "c6", Type: &schema.ColumnType{Raw: "longblob", Type: &schema.BinaryType{T: "longblob"}}}, }, t.Columns) }, }, { name: "bit type", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+------------+-------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | TABLE_NAME |COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +------------+------------+-------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | users |c1 | bit | | NO | | NULL | | NULL | NULL | NULL | | users |c2 | bit(1) | | NO | | NULL | | NULL | NULL | NULL | | users |c3 | bit(2) | | NO | | NULL | | NULL | NULL | NULL | +------------+------------+-------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "bit", Type: &BitType{T: "bit"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "bit(1)", Type: &BitType{T: "bit", Size: 1}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "bit(2)", Type: &BitType{T: "bit", Size: 2}}}, }, t.Columns) }, }, { name: "string types", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+---------------+----------------+-------------+------------+--------------------------------------------+-------------------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+-------------+---------------+----------------+-------------+------------+--------------------------------------------+-------------------+--------------------+----------------+---------------------------+ | users | c1 | char(20) | | NO | | char | | NULL | NULL | NULL | | users | c2 | varchar(30) | | NO | | NULL | | NULL | NULL | NULL | | users | c3 | tinytext | | NO | | NULL | | NULL | NULL | NULL | | users | c4 | mediumtext | | NO | | NULL | | NULL | NULL | NULL | | users | c5 | text | | NO | | NULL | | NULL | NULL | NULL | | users | c6 | longtext | | NO | | NULL | | NULL | NULL | NULL | | users | c7 | varchar(20) | | NO | | concat(_latin1\'Hello \',` + "`name`" + `) | DEFAULT_GENERATED | NULL | NULL | NULL | +------------+-------------+---------------+----------------+-------------+------------+--------------------------------------------+-------------------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "char(20)", Type: &schema.StringType{T: "char", Size: 20}}, Default: &schema.Literal{V: `"char"`}}, {Name: "c2", Type: &schema.ColumnType{Raw: "varchar(30)", Type: &schema.StringType{T: "varchar", Size: 30}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "tinytext", Type: &schema.StringType{T: "tinytext"}}}, {Name: "c4", Type: &schema.ColumnType{Raw: "mediumtext", Type: &schema.StringType{T: "mediumtext"}}}, {Name: "c5", Type: &schema.ColumnType{Raw: "text", Type: &schema.StringType{T: "text"}}}, {Name: "c6", Type: &schema.ColumnType{Raw: "longtext", Type: &schema.StringType{T: "longtext"}}}, {Name: "c7", Type: &schema.ColumnType{Raw: "varchar(20)", Type: &schema.StringType{T: "varchar", Size: 20}}, Default: &schema.RawExpr{X: "(concat(_latin1'Hello ',`name`))"}}, }, t.Columns) }, }, { name: "enum type", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+---------------+----------------+-------------+------------+----------------+-------+--------------------+-------------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+-------------+---------------+----------------+-------------+------------+----------------+-------+--------------------+-------------------+---------------------------+ | users | c1 | enum('a','b') | | NO | | NULL | | latin1 | latin1_swedish_ci | NULL | | users | c2 | enum('c','d') | | NO | | d | | latin1 | latin1_swedish_ci | NULL | +------------+-------------+---------------+----------------+-------------+------------+----------------+-------+--------------------+-------------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "enum('a','b')", Type: &schema.EnumType{T: "enum", Values: []string{"a", "b"}}}, Attrs: []schema.Attr{&schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "enum('c','d')", Type: &schema.EnumType{T: "enum", Values: []string{"c", "d"}}}, Default: &schema.Literal{V: `"d"`}, Attrs: []schema.Attr{&schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}}}, }, t.Columns) }, }, { name: "time type", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+--------------+-------------------+-------------+------------+----------------------+--------------------------------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | generation_expression | +------------+-------------+--------------+-------------------+-------------+------------+----------------------+--------------------------------+--------------------+----------------+---------------------------+ | users | c1 | date | | NO | | NULL | | NULL | NULL | NULL | | users | c2 | datetime | | NO | | NULL | | NULL | NULL | NULL | | users | c3 | time | | NO | | NULL | | NULL | NULL | NULL | | users | c4 | timestamp | | NO | | CURRENT_TIMESTAMP | on update CURRENT_TIMESTAMP | NULL | NULL | NULL | | users | c5 | year(4) | | NO | | NULL | | NULL | NULL | NULL | | users | c6 | year | | NO | | NULL | | NULL | NULL | NULL | | users | c7 | timestamp(6) | | NO | | CURRENT_TIMESTAMP(6) | on update CURRENT_TIMESTAMP(6) | NULL | NULL | NULL | +------------+--------------+-------------------+-------------+------------+----------------------+--------------------------------+--------------------+-------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { p := func(i int) *int { return &i } require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "date", Type: &schema.TimeType{T: "date"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "datetime", Type: &schema.TimeType{T: "datetime"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "time", Type: &schema.TimeType{T: "time"}}}, {Name: "c4", Type: &schema.ColumnType{Raw: "timestamp", Type: &schema.TimeType{T: "timestamp"}}, Default: &schema.RawExpr{X: "CURRENT_TIMESTAMP"}, Attrs: []schema.Attr{&OnUpdate{A: "CURRENT_TIMESTAMP"}}}, {Name: "c5", Type: &schema.ColumnType{Raw: "year(4)", Type: &schema.TimeType{T: "year", Precision: p(4)}}}, {Name: "c6", Type: &schema.ColumnType{Raw: "year", Type: &schema.TimeType{T: "year"}}}, {Name: "c7", Type: &schema.ColumnType{Raw: "timestamp(6)", Type: &schema.TimeType{T: "timestamp", Precision: p(6)}}, Default: &schema.RawExpr{X: "CURRENT_TIMESTAMP(6)"}, Attrs: []schema.Attr{&OnUpdate{A: "CURRENT_TIMESTAMP(6)"}}}, }, t.Columns) }, }, { name: "json type", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+------------+-------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | TABLE_NAME |COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +------------+------------+-------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | users |c1 | json | | NO | | NULL | | NULL | NULL | NULL | +------------+------------+-------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, }, t.Columns) }, }, { name: "spatial type", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+--------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | table_name | column_name | column_type | column_comment | is_nullable | column_key | column_default | extra | character_set_name | collation_name | GENERATION_EXPRESSION | +------------+-------------+--------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ | users | c1 | point | | NO | | NULL | | NULL | NULL | NULL | | users | c2 | multipoint | | NO | | NULL | | NULL | NULL | NULL | | users | c3 | linestring | | NO | | NULL | | NULL | NULL | NULL | | users | c4 | multilinestring | | NO | | NULL | | NULL | NULL | NULL | | users | c5 | polygon | | NO | | NULL | | NULL | NULL | NULL | | users | c6 | multipolygon | | NO | | NULL | | NULL | NULL | NULL | | users | c7 | geometry | | NO | | NULL | | NULL | NULL | NULL | | users | c8 | geometrycollection | | NO | | NULL | | NULL | NULL | NULL | | users | c9 | geomcollection | | NO | | NULL | | NULL | NULL | NULL | +------------+-------------+--------------------+----------------+-------------+------------+----------------+-------+--------------------+----------------+---------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "point", Type: &schema.SpatialType{T: "point"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "multipoint", Type: &schema.SpatialType{T: "multipoint"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "linestring", Type: &schema.SpatialType{T: "linestring"}}}, {Name: "c4", Type: &schema.ColumnType{Raw: "multilinestring", Type: &schema.SpatialType{T: "multilinestring"}}}, {Name: "c5", Type: &schema.ColumnType{Raw: "polygon", Type: &schema.SpatialType{T: "polygon"}}}, {Name: "c6", Type: &schema.ColumnType{Raw: "multipolygon", Type: &schema.SpatialType{T: "multipolygon"}}}, {Name: "c7", Type: &schema.ColumnType{Raw: "geometry", Type: &schema.SpatialType{T: "geometry"}}}, {Name: "c8", Type: &schema.ColumnType{Raw: "geometrycollection", Type: &schema.SpatialType{T: "geometrycollection"}}}, {Name: "c9", Type: &schema.ColumnType{Raw: "geomcollection", Type: &schema.SpatialType{T: "geomcollection"}}}, }, t.Columns) }, }, { name: "generated columns", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+-------------+----------------+-------------+------------+----------------+-------------------+--------------------+----------------+--------------------------------------+ | TABLE_NAME | COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +------------+-------------+-------------+----------------+-------------+------------+----------------+-------------------+--------------------+----------------+--------------------------------------+ | users | c1 | int | | NO | | NULL | | NULL | NULL | | | users | c2 | int | | NO | | NULL | VIRTUAL GENERATED | NULL | NULL | ` + "(`c1` * `c1`)" + ` | | users | c3 | int | | NO | | NULL | STORED GENERATED | NULL | NULL | ` + "(`c1` + `c2`)" + ` | | users | c4 | varchar(20) | | NO | | NULL | STORED GENERATED | NULL | NULL | concat(_latin1\'\\\'\',_latin1\'"\') | +------------+-------------+-------------+----------------+-------------+------------+----------------+-------------------+--------------------+----------------+--------------------------------------+ `)) m.noIndexes() m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{&schema.GeneratedExpr{Expr: "(`c1` * `c1`)", Type: "VIRTUAL"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{&schema.GeneratedExpr{Expr: "(`c1` + `c2`)", Type: "STORED"}}}, {Name: "c4", Type: &schema.ColumnType{Raw: "varchar(20)", Type: &schema.StringType{T: "varchar", Size: 20}}, Attrs: []schema.Attr{&schema.GeneratedExpr{Expr: "concat(_latin1'\\'',_latin1'\"')", Type: "STORED"}}}, }, t.Columns) }, }, { name: "indexes", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | TABLE_NAME | COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | users | id | int | | NO | PRI | NULL | | NULL | NULL | NULL | | users | nickname | varchar(255) | | NO | UNI | NULL | | utf8mb4 | utf8mb4_0900_ai_ci | NULL | | users | oid | int | | NO | MUL | NULL | | NULL | NULL | NULL | | users | uid | int | | NO | MUL | NULL | | NULL | NULL | NULL | +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ `)) m.ExpectQuery(queryIndexesExpr). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +--------------+--------------+-------------+------------+--------------+--------------+---------+--------------+------------+------------------+ | TABLE_NAME | INDEX_NAME | COLUMN_NAME | NON_UNIQUE | SEQ_IN_INDEX | INDEX_TYPE | DESC | COMMENT | SUB_PART | EXPRESSION | +--------------+--------------+-------------+------------+--------------+--------------+---------+--------------+------------+------------------+ | users | nickname | nickname | 0 | 1 | BTREE | nil | | 255 | NULL | | users | lower_nick | NULL | 1 | 1 | HASH | 0 | | NULL | lower(nickname) | | users | non_unique | oid | 1 | 1 | BTREE | 0 | | NULL | NULL | | users | non_unique | uid | 1 | 2 | BTREE | 0 | | NULL | NULL | | users | PRIMARY | id | 0 | 1 | BTREE | 0 | | NULL | NULL | | users | unique_index | uid | 0 | 1 | BTREE | 1 | | NULL | NULL | | users | unique_index | oid | 0 | 2 | BTREE | 1 | | NULL | NULL | +--------------+--------------+-------------+------------+--------------+--------------+---------+--------------+------------+------------------+ `)) m.noFKs() m.ExpectQuery(sqltest.Escape("SHOW CREATE TABLE `public`.`users`")). WillReturnRows(sqltest.Rows(` +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ | Table | Create Table | +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ | users | CREATE TABLE users (id bigint NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=55834574848 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin | +-------+---------------------------------------------------------------------------------------------------------------------------------------------+ `)) }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) indexes := []*schema.Index{ {Name: "nickname", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "BTREE"}}}, // Implicitly created by the UNIQUE clause. {Name: "lower_nick", Table: t, Attrs: []schema.Attr{&IndexType{T: "HASH"}}}, {Name: "non_unique", Table: t, Attrs: []schema.Attr{&IndexType{T: "BTREE"}}}, {Name: "unique_index", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "BTREE"}}}, } columns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "nickname", Type: &schema.ColumnType{Raw: "varchar(255)", Type: &schema.StringType{T: "varchar", Size: 255}}, Indexes: indexes[0:1], Attrs: []schema.Attr{&schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}}}, {Name: "oid", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, Indexes: indexes[2:]}, {Name: "uid", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, Indexes: indexes[2:]}, } // nickname indexes[0].Parts = []*schema.IndexPart{ {SeqNo: 1, C: columns[1], Attrs: []schema.Attr{&SubPart{Len: 255}}}, } // lower(nickname) indexes[1].Parts = []*schema.IndexPart{ {SeqNo: 1, X: &schema.RawExpr{X: "lower(nickname)"}}, } // oid, uid indexes[2].Parts = []*schema.IndexPart{ {SeqNo: 1, C: columns[2]}, {SeqNo: 2, C: columns[3]}, } // uid, oid indexes[3].Parts = []*schema.IndexPart{ {SeqNo: 1, C: columns[3], Desc: true}, {SeqNo: 2, C: columns[2], Desc: true}, } require.EqualValues(columns, t.Columns) require.EqualValues(indexes, t.Indexes) }, }, { name: "indexes/not_support_comment", version: "5.1.60", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumnsNoExpr). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | TABLE_NAME | COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | users | id | int | | NO | PRI | NULL | | NULL | NULL | NULL | +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ `)) m.ExpectQuery(queryIndexesNoComment). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +--------------+--------------+-------------+------------+--------------+--------------+---------+--------------+------------+------------------+ | TABLE_NAME | INDEX_NAME | COLUMN_NAME | NON_UNIQUE | SEQ_IN_INDEX | INDEX_TYPE | DESC | COMMENT | SUB_PART | EXPRESSION | +--------------+--------------+-------------+------------+--------------+--------------+---------+--------------+------------+------------------+ | users | PRIMARY | id | 0 | 1 | BTREE | 0 | NULL | NULL | NULL | +--------------+--------------+-------------+------------+--------------+--------------+---------+--------------+------------+------------------+ `)) m.noFKs() }, expect: func(require *require.Assertions, t *schema.Table, err error) { // nothing to expect, ExpectQuery is enough for this test require.NoError(err) }, }, { name: "fks", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | TABLE_NAME | COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | users | id | int | | NO | PRI | NULL | | NULL | NULL | NULL | | users | oid | int | | NO | MUL | NULL | | NULL | NULL | NULL | | users | uid | int | | NO | MUL | NULL | | NULL | NULL | NULL | +------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ `)) m.noIndexes() m.ExpectQuery(queryFKs). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +------------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+-------------+-------------+ | CONSTRAINT_NAME | TABLE_NAME | COLUMN_NAME | TABLE_SCHEMA | REFERENCED_TABLE_NAME | REFERENCED_COLUMN_NAME | REFERENCED_SCHEMA_NAME | UPDATE_RULE | DELETE_RULE | +------------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+-------------+-------------+ | multi_column | users | id | public | t1 | gid | public | NO ACTION | CASCADE | | multi_column | users | oid | public | t1 | xid | public | NO ACTION | CASCADE | | self_reference | users | uid | public | users | id | public | NO ACTION | CASCADE | +------------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+ ------------+-------------+ `)) }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.Equal("public", t.Schema.Name) fks := []*schema.ForeignKey{ {Symbol: "multi_column", Table: t, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: &schema.Table{Name: "t1", Schema: t.Schema}, RefColumns: []*schema.Column{{Name: "gid"}, {Name: "xid"}}}, {Symbol: "self_reference", Table: t, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: t}, } columns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, ForeignKeys: fks[0:1]}, {Name: "oid", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, ForeignKeys: fks[0:1]}, {Name: "uid", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, ForeignKeys: fks[1:2]}, } fks[0].Columns = columns[:2] fks[1].Columns = columns[2:] fks[1].RefColumns = columns[:1] require.EqualValues(columns, t.Columns) require.EqualValues(fks, t.ForeignKeys) }, }, { name: "checks", version: "8.0.16", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +-------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | TABLE_NAME | COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +-------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | users | id | int | | NO | PRI | NULL | | NULL | NULL | NULL | | users | c1 | int | | NO | MUL | NULL | | NULL | NULL | NULL | +-------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ `)) m.noIndexes() m.noFKs() m.ExpectQuery(queryMyChecks). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` +-------------------+-------------------+-------------------------------------------+------------+ | TABLE_NAME | CONSTRAINT_NAME | CHECK_CLAUSE | ENFORCED | +-------------------+-------------------+-------------------------------------------+------------+ | users | users_chk_1 | (` + "`c6`" + ` <> _latin1\'foo\\\'s\') | YES | | users | users_chk_2 | (c1 <> _latin1\'dev/atlas\') | YES | | users | users_chk_3 | (c1 <> _latin1\'a\\\'b""\') | YES | | users | users_chk_4 | (c1 <> in (_latin1\'usa\',_latin1\'uk\')) | YES | | users | users_chk_5 | (c1 <> _latin1\'\\\\\\\\\\\'\\\'\') | YES | +-------------------+-------------------+-------------------------------------------+------------+ `)) m.ExpectQuery(sqltest.Escape("SHOW CREATE TABLE `public`.`users`")). WillReturnRows(sqltest.Rows(` +-------+------------------------+ | Table | Create Table | +-------+------------------------+ | users | CREATE TABLE users() | +-------+------------------------+ `)) }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.Equal("public", t.Schema.Name) columns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "c1", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, } require.EqualValues(columns, t.Columns) require.EqualValues([]schema.Attr{ &schema.Check{Name: "users_chk_1", Expr: "(`c6` <> _latin1'foo\\'s')"}, &schema.Check{Name: "users_chk_2", Expr: "(c1 <> _latin1'dev/atlas')"}, &schema.Check{Name: "users_chk_3", Expr: `(c1 <> _latin1'a\'b""')`}, &schema.Check{Name: "users_chk_4", Expr: `(c1 <> in (_latin1'usa',_latin1'uk'))`}, &schema.Check{Name: "users_chk_5", Expr: `(c1 <> _latin1'\\\\\'\'')`}, }, t.Attrs) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} if tt.version == "" { tt.version = "8.0.13" } mk.version(tt.version) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= ?"))). WithArgs("public"). WillReturnRows(sqltest.Rows(` +-------------+----------------------------+------------------------+ | SCHEMA_NAME | DEFAULT_CHARACTER_SET_NAME | DEFAULT_COLLATION_NAME | +-------------+----------------------------+------------------------+ | public | utf8mb4 | utf8mb4_unicode_ci | +-------------+----------------------------+------------------------+ `)) tt.before(mk) drv, err := Open(db) require.NoError(t, err) s, err := drv.InspectSchema(context.Background(), "public", nil) require.NoError(t, err) require.NotNil(t, s) tt.expect(require.New(t), s.Tables[0], err) }) } } func TestDriver_InspectSchema(t *testing.T) { tests := []struct { name string schema string opts *schema.InspectOptions before func(mock) expect func(*require.Assertions, *schema.Schema, error) }{ { name: "attached schema", before: func(m mock) { m.version("5.7.23") m.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= SCHEMA()"))). WillReturnRows(sqltest.Rows(` +-------------+----------------------------+------------------------+ | SCHEMA_NAME | DEFAULT_CHARACTER_SET_NAME | DEFAULT_COLLATION_NAME | +-------------+----------------------------+------------------------+ | public | utf8mb4 | utf8mb4_unicode_ci | +-------------+----------------------------+------------------------+ `)) m.tables("public") }, expect: func(require *require.Assertions, s *schema.Schema, err error) { require.NoError(err) require.EqualValues(func() *schema.Schema { realm := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "public", Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_unicode_ci"}, }, }, }, Attrs: []schema.Attr{ &schema.Charset{ V: "utf8", }, &schema.Collation{ V: "utf8_general_ci", }, }, } realm.Schemas[0].Realm = realm return realm.Schemas[0] }(), s) }, }, { name: "multi table", schema: "public", before: func(m mock) { m.version("8.0.13") m.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= ?"))). WithArgs("public"). WillReturnRows(sqltest.Rows(` +-------------+----------------------------+------------------------+ | SCHEMA_NAME | DEFAULT_CHARACTER_SET_NAME | DEFAULT_COLLATION_NAME | +-------------+----------------------------+------------------------+ | public | utf8mb4 | utf8mb4_unicode_ci | +-------------+----------------------------+------------------------+ `)) m.tables("public", "users", "pets") m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsExprQuery, "?, ?"))). WithArgs("public", "users", "pets"). WillReturnRows(sqltest.Rows(` +-------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | TABLE_NAME | COLUMN_NAME | COLUMN_TYPE | COLUMN_COMMENT | IS_NULLABLE | COLUMN_KEY | COLUMN_DEFAULT | EXTRA | CHARACTER_SET_NAME | COLLATION_NAME | GENERATION_EXPRESSION | +-------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ | users | id | int | | NO | PRI | NULL | | NULL | NULL | NULL | | users | spouse_id | int | | YES | NULL | NULL | | NULL | NULL | NULL | | pets | id | int | | NO | PRI | NULL | | NULL | NULL | NULL | | pets | owner_id | int | | YES | NULL | NULL | | NULL | NULL | NULL | +-------------+-------------+--------------+----------------+-------------+------------+----------------+----------------+--------------------+--------------------+---------------------------+ `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexesExprQuery, "?, ?"))). WillReturnRows(sqlmock.NewRows([]string{"table_name", "index_name", "column_name", "non_unique", "key_part", "expression"})) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(fksQuery, "?, ?"))). WithArgs("public", "users", "pets"). WillReturnRows(sqltest.Rows(` +------------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+-------------+-------------+ | CONSTRAINT_NAME | TABLE_NAME | COLUMN_NAME | TABLE_SCHEMA | REFERENCED_TABLE_NAME | REFERENCED_COLUMN_NAME | REFERENCED_SCHEMA_NAME | UPDATE_RULE | DELETE_RULE | +------------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+-------------+-------------+ | spouse_id | users | spouse_id | public | users | id | public | NO ACTION | CASCADE | | owner_id | pets | owner_id | public | users | id | public | NO ACTION | CASCADE | +------------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+-------------+-------------+ `)) }, expect: func(require *require.Assertions, s *schema.Schema, err error) { require.NoError(err) ts := s.Tables require.Len(ts, 2) users, pets := ts[0], ts[1] require.Equal("users", users.Name) userFKs := []*schema.ForeignKey{ {Symbol: "spouse_id", Table: users, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: users}, } userColumns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "spouse_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}, Null: true}, ForeignKeys: userFKs}, } userFKs[0].Columns = userColumns[1:] userFKs[0].RefColumns = userColumns[:1] require.EqualValues(userColumns, users.Columns) require.EqualValues(userFKs, users.ForeignKeys) require.Equal("pets", pets.Name) petsFKs := []*schema.ForeignKey{ {Symbol: "owner_id", Table: pets, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: users, RefColumns: userColumns[:1]}, } petsColumns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "owner_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}, Null: true}, ForeignKeys: petsFKs}, } petsFKs[0].Columns = petsColumns[1:] require.EqualValues(petsColumns, pets.Columns) require.EqualValues(petsFKs, pets.ForeignKeys) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) tt.before(mock{m}) drv, err := Open(db) require.NoError(t, err) tables, err := drv.InspectSchema(context.Background(), tt.schema, tt.opts) tt.expect(require.New(t), tables, err) }) } } func TestDriver_Realm(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("8.0.13") mk.ExpectQuery(sqltest.Escape(schemasQuery)). WillReturnRows(sqltest.Rows(` +-------------+----------------------------+------------------------+ | SCHEMA_NAME | DEFAULT_CHARACTER_SET_NAME | DEFAULT_COLLATION_NAME | +-------------+----------------------------+------------------------+ | test | utf8mb4 | utf8mb4_unicode_ci | +-------------+----------------------------+------------------------+ `)) mk.tables("test") drv, err := Open(db) require.NoError(t, err) realm, err := drv.InspectRealm(context.Background(), &schema.InspectRealmOption{}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_unicode_ci"}, }, }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Charset{ V: "utf8", }, &schema.Collation{ V: "utf8_general_ci", }, }, } r.Schemas[0].Realm = r return r }(), realm) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "IN (?, ?)"))). WithArgs("test", "public"). WillReturnRows(sqltest.Rows(` +-------------+----------------------------+------------------------+ | SCHEMA_NAME | DEFAULT_CHARACTER_SET_NAME | DEFAULT_COLLATION_NAME | +-------------+----------------------------+------------------------+ | test | utf8mb4 | utf8mb4_unicode_ci | | public | utf8 | utf8_general_ci | +-------------+----------------------------+------------------------+ `)) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(tablesQuery, "?, ?"))). WithArgs("test", "public"). WillReturnRows(sqlmock.NewRows([]string{"schema", "table", "charset", "collate", "inc", "comment", "options"})) realm, err = drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Schemas: []string{"test", "public"}}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_unicode_ci"}, }, }, { Name: "public", Attrs: []schema.Attr{ &schema.Charset{V: "utf8"}, &schema.Collation{V: "utf8_general_ci"}, }, }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Charset{ V: "utf8", }, &schema.Collation{ V: "utf8_general_ci", }, }, } r.Schemas[0].Realm = r r.Schemas[1].Realm = r return r }(), realm) } func TestInspectMode_InspectRealm(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("8.0.13") mk.ExpectQuery(sqltest.Escape(schemasQuery)). WillReturnRows(sqltest.Rows(` +-------------+----------------------------+------------------------+ | SCHEMA_NAME | DEFAULT_CHARACTER_SET_NAME | DEFAULT_COLLATION_NAME | +-------------+----------------------------+------------------------+ | test | latin1 | lain1_ci | +-------------+----------------------------+------------------------+ `)) drv, err := Open(db) require.NoError(t, err) realm, err := drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Mode: schema.InspectSchemas}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "lain1_ci"}, }, }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Charset{ V: "utf8", }, &schema.Collation{ V: "utf8_general_ci", }, }, } r.Schemas[0].Realm = r return r }(), realm) } type mock struct { sqlmock.Sqlmock } func (m mock) version(version string) { m.ExpectQuery(sqltest.Escape(variablesQuery)). WillReturnRows(sqltest.Rows(` +-----------------+--------------------+------------------------+ | @@version | @@collation_server | @@character_set_server | +-----------------+--------------------+------------------------+ | ` + version + ` | utf8_general_ci | utf8 | +-----------------+--------------------+------------------------+ `)) } func (m mock) noIndexes() { m.ExpectQuery(queryIndexesExpr). WillReturnRows(sqlmock.NewRows([]string{"table_name", "index_name", "column_name", "non_unique", "key_part", "expression"})) } func (m mock) noFKs() { m.ExpectQuery(queryFKs). WillReturnRows(sqlmock.NewRows([]string{"TABLE_NAME", "CONSTRAINT_NAME", "TABLE_NAME", "COLUMN_NAME", "REFERENCED_TABLE_NAME", "REFERENCED_COLUMN_NAME", "REFERENCED_TABLE_SCHEMA", "UPDATE_RULE", "DELETE_RULE"})) } func (m mock) tableExists(schema, table string, exists bool) { rows := sqlmock.NewRows([]string{"table_schema", "table_name", "table_collation", "character_set", "auto_increment", "table_comment", "create_options"}) if exists { rows.AddRow(schema, table, nil, nil, nil, nil, nil) } m.ExpectQuery(queryTable). WithArgs(schema). WillReturnRows(rows) } func (m mock) tables(schema string, tables ...string) { rows := sqlmock.NewRows([]string{"schema", "table", "charset", "collate", "inc", "comment", "options"}) for _, t := range tables { rows.AddRow(schema, t, nil, nil, nil, nil, nil) } m.ExpectQuery(queryTable). WithArgs(schema). WillReturnRows(rows) } atlas-0.7.2/sql/mysql/internal/000077500000000000000000000000001431455511600164115ustar00rootroot00000000000000atlas-0.7.2/sql/mysql/internal/mysqlversion/000077500000000000000000000000001431455511600211645ustar00rootroot00000000000000atlas-0.7.2/sql/mysql/internal/mysqlversion/is/000077500000000000000000000000001431455511600215775ustar00rootroot00000000000000atlas-0.7.2/sql/mysql/internal/mysqlversion/is/.README.md000066400000000000000000000010141431455511600231300ustar00rootroot00000000000000## Charset and Collation of MySQL and MariaDB latest versions `collate2charset` and `collate2charset.maria` hold a mapping from the collation to their charset. ```sql select json_objectagg(collation_name, character_set_name) from information_schema.collations\G; ``` `charset2collate` and `charset2collate.maria` hold a mapping from the charset to its default collation extracted by the following query: ```sql select json_objectagg(character_set_name, default_collate_name) from information_schema.character_sets\G; ```atlas-0.7.2/sql/mysql/internal/mysqlversion/is/charset2collate000066400000000000000000000023001431455511600245740ustar00rootroot00000000000000{"gbk": "gbk_chinese_ci", "hp8": "hp8_english_ci", "big5": "big5_chinese_ci", "dec8": "dec8_swedish_ci", "sjis": "sjis_japanese_ci", "swe7": "swe7_swedish_ci", "ucs2": "ucs2_general_ci", "ujis": "ujis_japanese_ci", "utf8": "utf8_general_ci", "ascii": "ascii_general_ci", "cp850": "cp850_general_ci", "cp852": "cp852_general_ci", "cp866": "cp866_general_ci", "cp932": "cp932_japanese_ci", "euckr": "euckr_korean_ci", "greek": "greek_general_ci", "koi8r": "koi8r_general_ci", "koi8u": "koi8u_general_ci", "macce": "macce_general_ci", "utf16": "utf16_general_ci", "utf32": "utf32_general_ci", "binary": "binary", "cp1250": "cp1250_general_ci", "cp1251": "cp1251_general_ci", "cp1256": "cp1256_general_ci", "cp1257": "cp1257_general_ci", "gb2312": "gb2312_chinese_ci", "hebrew": "hebrew_general_ci", "latin1": "latin1_swedish_ci", "latin2": "latin2_general_ci", "latin5": "latin5_turkish_ci", "latin7": "latin7_general_ci", "tis620": "tis620_thai_ci", "eucjpms": "eucjpms_japanese_ci", "gb18030": "gb18030_chinese_ci", "geostd8": "geostd8_general_ci", "keybcs2": "keybcs2_general_ci", "utf16le": "utf16le_general_ci", "utf8mb4": "utf8mb4_0900_ai_ci", "armscii8": "armscii8_general_ci", "macroman": "macroman_general_ci"}atlas-0.7.2/sql/mysql/internal/mysqlversion/is/charset2collate.maria000066400000000000000000000021751431455511600256760ustar00rootroot00000000000000{"big5":"big5_chinese_ci", "dec8":"dec8_swedish_ci", "cp850":"cp850_general_ci", "hp8":"hp8_english_ci", "koi8r":"koi8r_general_ci", "latin1":"latin1_swedish_ci", "latin2":"latin2_general_ci", "swe7":"swe7_swedish_ci", "ascii":"ascii_general_ci", "ujis":"ujis_japanese_ci", "sjis":"sjis_japanese_ci", "hebrew":"hebrew_general_ci", "tis620":"tis620_thai_ci", "euckr":"euckr_korean_ci", "koi8u":"koi8u_general_ci", "gb2312":"gb2312_chinese_ci", "greek":"greek_general_ci", "cp1250":"cp1250_general_ci", "gbk":"gbk_chinese_ci", "latin5":"latin5_turkish_ci", "armscii8":"armscii8_general_ci", "utf8mb3":"utf8mb3_general_ci", "ucs2":"ucs2_general_ci", "cp866":"cp866_general_ci", "keybcs2":"keybcs2_general_ci", "macce":"macce_general_ci", "macroman":"macroman_general_ci", "cp852":"cp852_general_ci", "latin7":"latin7_general_ci", "utf8mb4":"utf8mb4_general_ci", "cp1251":"cp1251_general_ci", "utf16":"utf16_general_ci", "utf16le":"utf16le_general_ci", "cp1256":"cp1256_general_ci", "cp1257":"cp1257_general_ci", "utf32":"utf32_general_ci", "binary":"binary", "geostd8":"geostd8_general_ci", "cp932":"cp932_japanese_ci", "eucjpms":"eucjpms_japanese_ci"}atlas-0.7.2/sql/mysql/internal/mysqlversion/is/collate2charset000066400000000000000000000177341431455511600246150ustar00rootroot00000000000000{"binary": "binary", "gbk_bin": "gbk", "hp8_bin": "hp8", "big5_bin": "big5", "dec8_bin": "dec8", "sjis_bin": "sjis", "swe7_bin": "swe7", "ucs2_bin": "ucs2", "ujis_bin": "ujis", "utf8_bin": "utf8", "ascii_bin": "ascii", "cp850_bin": "cp850", "cp852_bin": "cp852", "cp866_bin": "cp866", "cp932_bin": "cp932", "euckr_bin": "euckr", "greek_bin": "greek", "koi8r_bin": "koi8r", "koi8u_bin": "koi8u", "macce_bin": "macce", "utf16_bin": "utf16", "utf32_bin": "utf32", "cp1250_bin": "cp1250", "cp1251_bin": "cp1251", "cp1256_bin": "cp1256", "cp1257_bin": "cp1257", "gb2312_bin": "gb2312", "hebrew_bin": "hebrew", "latin1_bin": "latin1", "latin2_bin": "latin2", "latin5_bin": "latin5", "latin7_bin": "latin7", "tis620_bin": "tis620", "eucjpms_bin": "eucjpms", "gb18030_bin": "gb18030", "geostd8_bin": "geostd8", "keybcs2_bin": "keybcs2", "utf16le_bin": "utf16le", "utf8mb4_bin": "utf8mb4", "armscii8_bin": "armscii8", "macroman_bin": "macroman", "ucs2_czech_ci": "ucs2", "ucs2_roman_ci": "ucs2", "utf8_czech_ci": "utf8", "utf8_roman_ci": "utf8", "gbk_chinese_ci": "gbk", "hp8_english_ci": "hp8", "tis620_thai_ci": "tis620", "ucs2_danish_ci": "ucs2", "ucs2_polish_ci": "ucs2", "ucs2_slovak_ci": "ucs2", "utf16_czech_ci": "utf16", "utf16_roman_ci": "utf16", "utf32_czech_ci": "utf32", "utf32_roman_ci": "utf32", "utf8_danish_ci": "utf8", "utf8_polish_ci": "utf8", "utf8_slovak_ci": "utf8", "big5_chinese_ci": "big5", "cp1250_czech_cs": "cp1250", "dec8_swedish_ci": "dec8", "euckr_korean_ci": "euckr", "latin2_czech_cs": "latin2", "swe7_swedish_ci": "swe7", "ucs2_general_ci": "ucs2", "ucs2_german2_ci": "ucs2", "ucs2_latvian_ci": "ucs2", "ucs2_persian_ci": "ucs2", "ucs2_sinhala_ci": "ucs2", "ucs2_spanish_ci": "ucs2", "ucs2_swedish_ci": "ucs2", "ucs2_turkish_ci": "ucs2", "ucs2_unicode_ci": "ucs2", "utf16_danish_ci": "utf16", "utf16_polish_ci": "utf16", "utf16_slovak_ci": "utf16", "utf32_danish_ci": "utf32", "utf32_polish_ci": "utf32", "utf32_slovak_ci": "utf32", "utf8_general_ci": "utf8", "utf8_german2_ci": "utf8", "utf8_latvian_ci": "utf8", "utf8_persian_ci": "utf8", "utf8_sinhala_ci": "utf8", "utf8_spanish_ci": "utf8", "utf8_swedish_ci": "utf8", "utf8_tolower_ci": "utf8", "utf8_turkish_ci": "utf8", "utf8_unicode_ci": "utf8", "ascii_general_ci": "ascii", "cp1250_polish_ci": "cp1250", "cp850_general_ci": "cp850", "cp852_general_ci": "cp852", "cp866_general_ci": "cp866", "greek_general_ci": "greek", "koi8r_general_ci": "koi8r", "koi8u_general_ci": "koi8u", "latin1_danish_ci": "latin1", "macce_general_ci": "macce", "sjis_japanese_ci": "sjis", "ucs2_croatian_ci": "ucs2", "ucs2_estonian_ci": "ucs2", "ucs2_romanian_ci": "ucs2", "ucs2_spanish2_ci": "ucs2", "ujis_japanese_ci": "ujis", "utf16_general_ci": "utf16", "utf16_german2_ci": "utf16", "utf16_latvian_ci": "utf16", "utf16_persian_ci": "utf16", "utf16_sinhala_ci": "utf16", "utf16_spanish_ci": "utf16", "utf16_swedish_ci": "utf16", "utf16_turkish_ci": "utf16", "utf16_unicode_ci": "utf16", "utf32_general_ci": "utf32", "utf32_german2_ci": "utf32", "utf32_latvian_ci": "utf32", "utf32_persian_ci": "utf32", "utf32_sinhala_ci": "utf32", "utf32_spanish_ci": "utf32", "utf32_swedish_ci": "utf32", "utf32_turkish_ci": "utf32", "utf32_unicode_ci": "utf32", "utf8_croatian_ci": "utf8", "utf8_estonian_ci": "utf8", "utf8_romanian_ci": "utf8", "utf8_spanish2_ci": "utf8", "utf8mb4_0900_bin": "utf8mb4", "utf8mb4_czech_ci": "utf8mb4", "utf8mb4_roman_ci": "utf8mb4", "cp1250_general_ci": "cp1250", "cp1251_general_ci": "cp1251", "cp1251_general_cs": "cp1251", "cp1256_general_ci": "cp1256", "cp1257_general_ci": "cp1257", "cp932_japanese_ci": "cp932", "gb2312_chinese_ci": "gb2312", "hebrew_general_ci": "hebrew", "latin1_general_ci": "latin1", "latin1_general_cs": "latin1", "latin1_german1_ci": "latin1", "latin1_german2_ci": "latin1", "latin1_spanish_ci": "latin1", "latin1_swedish_ci": "latin1", "latin2_general_ci": "latin2", "latin5_turkish_ci": "latin5", "latin7_general_ci": "latin7", "latin7_general_cs": "latin7", "ucs2_esperanto_ci": "ucs2", "ucs2_hungarian_ci": "ucs2", "ucs2_icelandic_ci": "ucs2", "ucs2_slovenian_ci": "ucs2", "utf16_croatian_ci": "utf16", "utf16_estonian_ci": "utf16", "utf16_romanian_ci": "utf16", "utf16_spanish2_ci": "utf16", "utf32_croatian_ci": "utf32", "utf32_estonian_ci": "utf32", "utf32_romanian_ci": "utf32", "utf32_spanish2_ci": "utf32", "utf8_esperanto_ci": "utf8", "utf8_hungarian_ci": "utf8", "utf8_icelandic_ci": "utf8", "utf8_slovenian_ci": "utf8", "utf8mb4_danish_ci": "utf8mb4", "utf8mb4_polish_ci": "utf8mb4", "utf8mb4_slovak_ci": "utf8mb4", "cp1250_croatian_ci": "cp1250", "gb18030_chinese_ci": "gb18030", "geostd8_general_ci": "geostd8", "keybcs2_general_ci": "keybcs2", "latin2_croatian_ci": "latin2", "latin7_estonian_cs": "latin7", "ucs2_lithuanian_ci": "ucs2", "ucs2_vietnamese_ci": "ucs2", "utf16_esperanto_ci": "utf16", "utf16_hungarian_ci": "utf16", "utf16_icelandic_ci": "utf16", "utf16_slovenian_ci": "utf16", "utf16le_general_ci": "utf16le", "utf32_esperanto_ci": "utf32", "utf32_hungarian_ci": "utf32", "utf32_icelandic_ci": "utf32", "utf32_slovenian_ci": "utf32", "utf8_lithuanian_ci": "utf8", "utf8_vietnamese_ci": "utf8", "utf8mb4_0900_ai_ci": "utf8mb4", "utf8mb4_0900_as_ci": "utf8mb4", "utf8mb4_0900_as_cs": "utf8mb4", "utf8mb4_general_ci": "utf8mb4", "utf8mb4_german2_ci": "utf8mb4", "utf8mb4_latvian_ci": "utf8mb4", "utf8mb4_persian_ci": "utf8mb4", "utf8mb4_sinhala_ci": "utf8mb4", "utf8mb4_spanish_ci": "utf8mb4", "utf8mb4_swedish_ci": "utf8mb4", "utf8mb4_turkish_ci": "utf8mb4", "utf8mb4_unicode_ci": "utf8mb4", "armscii8_general_ci": "armscii8", "cp1251_bulgarian_ci": "cp1251", "cp1251_ukrainian_ci": "cp1251", "eucjpms_japanese_ci": "eucjpms", "latin2_hungarian_ci": "latin2", "macroman_general_ci": "macroman", "ucs2_unicode_520_ci": "ucs2", "utf16_lithuanian_ci": "utf16", "utf16_vietnamese_ci": "utf16", "utf32_lithuanian_ci": "utf32", "utf32_vietnamese_ci": "utf32", "utf8_unicode_520_ci": "utf8", "utf8mb4_croatian_ci": "utf8mb4", "utf8mb4_estonian_ci": "utf8mb4", "utf8mb4_romanian_ci": "utf8mb4", "utf8mb4_spanish2_ci": "utf8mb4", "cp1257_lithuanian_ci": "cp1257", "utf16_unicode_520_ci": "utf16", "utf32_unicode_520_ci": "utf32", "utf8mb4_esperanto_ci": "utf8mb4", "utf8mb4_hungarian_ci": "utf8mb4", "utf8mb4_icelandic_ci": "utf8mb4", "utf8mb4_slovenian_ci": "utf8mb4", "utf8mb4_cs_0900_ai_ci": "utf8mb4", "utf8mb4_cs_0900_as_cs": "utf8mb4", "utf8mb4_da_0900_ai_ci": "utf8mb4", "utf8mb4_da_0900_as_cs": "utf8mb4", "utf8mb4_eo_0900_ai_ci": "utf8mb4", "utf8mb4_eo_0900_as_cs": "utf8mb4", "utf8mb4_es_0900_ai_ci": "utf8mb4", "utf8mb4_es_0900_as_cs": "utf8mb4", "utf8mb4_et_0900_ai_ci": "utf8mb4", "utf8mb4_et_0900_as_cs": "utf8mb4", "utf8mb4_hr_0900_ai_ci": "utf8mb4", "utf8mb4_hr_0900_as_cs": "utf8mb4", "utf8mb4_hu_0900_ai_ci": "utf8mb4", "utf8mb4_hu_0900_as_cs": "utf8mb4", "utf8mb4_is_0900_ai_ci": "utf8mb4", "utf8mb4_is_0900_as_cs": "utf8mb4", "utf8mb4_ja_0900_as_cs": "utf8mb4", "utf8mb4_la_0900_ai_ci": "utf8mb4", "utf8mb4_la_0900_as_cs": "utf8mb4", "utf8mb4_lithuanian_ci": "utf8mb4", "utf8mb4_lt_0900_ai_ci": "utf8mb4", "utf8mb4_lt_0900_as_cs": "utf8mb4", "utf8mb4_lv_0900_ai_ci": "utf8mb4", "utf8mb4_lv_0900_as_cs": "utf8mb4", "utf8mb4_pl_0900_ai_ci": "utf8mb4", "utf8mb4_pl_0900_as_cs": "utf8mb4", "utf8mb4_ro_0900_ai_ci": "utf8mb4", "utf8mb4_ro_0900_as_cs": "utf8mb4", "utf8mb4_ru_0900_ai_ci": "utf8mb4", "utf8mb4_ru_0900_as_cs": "utf8mb4", "utf8mb4_sk_0900_ai_ci": "utf8mb4", "utf8mb4_sk_0900_as_cs": "utf8mb4", "utf8mb4_sl_0900_ai_ci": "utf8mb4", "utf8mb4_sl_0900_as_cs": "utf8mb4", "utf8mb4_sv_0900_ai_ci": "utf8mb4", "utf8mb4_sv_0900_as_cs": "utf8mb4", "utf8mb4_tr_0900_ai_ci": "utf8mb4", "utf8mb4_tr_0900_as_cs": "utf8mb4", "utf8mb4_vi_0900_ai_ci": "utf8mb4", "utf8mb4_vi_0900_as_cs": "utf8mb4", "utf8mb4_vietnamese_ci": "utf8mb4", "utf8mb4_zh_0900_as_cs": "utf8mb4", "gb18030_unicode_520_ci": "gb18030", "utf8mb4_unicode_520_ci": "utf8mb4", "ucs2_general_mysql500_ci": "ucs2", "utf8_general_mysql500_ci": "utf8", "utf8mb4_de_pb_0900_ai_ci": "utf8mb4", "utf8mb4_de_pb_0900_as_cs": "utf8mb4", "utf8mb4_ja_0900_as_cs_ks": "utf8mb4", "utf8mb4_es_trad_0900_ai_ci": "utf8mb4", "utf8mb4_es_trad_0900_as_cs": "utf8mb4"}atlas-0.7.2/sql/mysql/internal/mysqlversion/is/collate2charset.maria000066400000000000000000000224671431455511600257040ustar00rootroot00000000000000{"big5_chinese_ci":"big5", "big5_bin":"big5", "big5_chinese_nopad_ci":"big5", "big5_nopad_bin":"big5", "dec8_swedish_ci":"dec8", "dec8_bin":"dec8", "dec8_swedish_nopad_ci":"dec8", "dec8_nopad_bin":"dec8", "cp850_general_ci":"cp850", "cp850_bin":"cp850", "cp850_general_nopad_ci":"cp850", "cp850_nopad_bin":"cp850", "hp8_english_ci":"hp8", "hp8_bin":"hp8", "hp8_english_nopad_ci":"hp8", "hp8_nopad_bin":"hp8", "koi8r_general_ci":"koi8r", "koi8r_bin":"koi8r", "koi8r_general_nopad_ci":"koi8r", "koi8r_nopad_bin":"koi8r", "latin1_german1_ci":"latin1", "latin1_swedish_ci":"latin1", "latin1_danish_ci":"latin1", "latin1_german2_ci":"latin1", "latin1_bin":"latin1", "latin1_general_ci":"latin1", "latin1_general_cs":"latin1", "latin1_spanish_ci":"latin1", "latin1_swedish_nopad_ci":"latin1", "latin1_nopad_bin":"latin1", "latin2_czech_cs":"latin2", "latin2_general_ci":"latin2", "latin2_hungarian_ci":"latin2", "latin2_croatian_ci":"latin2", "latin2_bin":"latin2", "latin2_general_nopad_ci":"latin2", "latin2_nopad_bin":"latin2", "swe7_swedish_ci":"swe7", "swe7_bin":"swe7", "swe7_swedish_nopad_ci":"swe7", "swe7_nopad_bin":"swe7", "ascii_general_ci":"ascii", "ascii_bin":"ascii", "ascii_general_nopad_ci":"ascii", "ascii_nopad_bin":"ascii", "ujis_japanese_ci":"ujis", "ujis_bin":"ujis", "ujis_japanese_nopad_ci":"ujis", "ujis_nopad_bin":"ujis", "sjis_japanese_ci":"sjis", "sjis_bin":"sjis", "sjis_japanese_nopad_ci":"sjis", "sjis_nopad_bin":"sjis", "hebrew_general_ci":"hebrew", "hebrew_bin":"hebrew", "hebrew_general_nopad_ci":"hebrew", "hebrew_nopad_bin":"hebrew", "tis620_thai_ci":"tis620", "tis620_bin":"tis620", "tis620_thai_nopad_ci":"tis620", "tis620_nopad_bin":"tis620", "euckr_korean_ci":"euckr", "euckr_bin":"euckr", "euckr_korean_nopad_ci":"euckr", "euckr_nopad_bin":"euckr", "koi8u_general_ci":"koi8u", "koi8u_bin":"koi8u", "koi8u_general_nopad_ci":"koi8u", "koi8u_nopad_bin":"koi8u", "gb2312_chinese_ci":"gb2312", "gb2312_bin":"gb2312", "gb2312_chinese_nopad_ci":"gb2312", "gb2312_nopad_bin":"gb2312", "greek_general_ci":"greek", "greek_bin":"greek", "greek_general_nopad_ci":"greek", "greek_nopad_bin":"greek", "cp1250_general_ci":"cp1250", "cp1250_czech_cs":"cp1250", "cp1250_croatian_ci":"cp1250", "cp1250_bin":"cp1250", "cp1250_polish_ci":"cp1250", "cp1250_general_nopad_ci":"cp1250", "cp1250_nopad_bin":"cp1250", "gbk_chinese_ci":"gbk", "gbk_bin":"gbk", "gbk_chinese_nopad_ci":"gbk", "gbk_nopad_bin":"gbk", "latin5_turkish_ci":"latin5", "latin5_bin":"latin5", "latin5_turkish_nopad_ci":"latin5", "latin5_nopad_bin":"latin5", "armscii8_general_ci":"armscii8", "armscii8_bin":"armscii8", "armscii8_general_nopad_ci":"armscii8", "armscii8_nopad_bin":"armscii8", "utf8mb3_general_ci":"utf8mb3", "utf8mb3_bin":"utf8mb3", "utf8mb3_unicode_ci":"utf8mb3", "utf8mb3_icelandic_ci":"utf8mb3", "utf8mb3_latvian_ci":"utf8mb3", "utf8mb3_romanian_ci":"utf8mb3", "utf8mb3_slovenian_ci":"utf8mb3", "utf8mb3_polish_ci":"utf8mb3", "utf8mb3_estonian_ci":"utf8mb3", "utf8mb3_spanish_ci":"utf8mb3", "utf8mb3_swedish_ci":"utf8mb3", "utf8mb3_turkish_ci":"utf8mb3", "utf8mb3_czech_ci":"utf8mb3", "utf8mb3_danish_ci":"utf8mb3", "utf8mb3_lithuanian_ci":"utf8mb3", "utf8mb3_slovak_ci":"utf8mb3", "utf8mb3_spanish2_ci":"utf8mb3", "utf8mb3_roman_ci":"utf8mb3", "utf8mb3_persian_ci":"utf8mb3", "utf8mb3_esperanto_ci":"utf8mb3", "utf8mb3_hungarian_ci":"utf8mb3", "utf8mb3_sinhala_ci":"utf8mb3", "utf8mb3_german2_ci":"utf8mb3", "utf8mb3_croatian_mysql561_ci":"utf8mb3", "utf8mb3_unicode_520_ci":"utf8mb3", "utf8mb3_vietnamese_ci":"utf8mb3", "utf8mb3_general_mysql500_ci":"utf8mb3", "utf8mb3_croatian_ci":"utf8mb3", "utf8mb3_myanmar_ci":"utf8mb3", "utf8mb3_thai_520_w2":"utf8mb3", "utf8mb3_general_nopad_ci":"utf8mb3", "utf8mb3_nopad_bin":"utf8mb3", "utf8mb3_unicode_nopad_ci":"utf8mb3", "utf8mb3_unicode_520_nopad_ci":"utf8mb3", "ucs2_general_ci":"ucs2", "ucs2_bin":"ucs2", "ucs2_unicode_ci":"ucs2", "ucs2_icelandic_ci":"ucs2", "ucs2_latvian_ci":"ucs2", "ucs2_romanian_ci":"ucs2", "ucs2_slovenian_ci":"ucs2", "ucs2_polish_ci":"ucs2", "ucs2_estonian_ci":"ucs2", "ucs2_spanish_ci":"ucs2", "ucs2_swedish_ci":"ucs2", "ucs2_turkish_ci":"ucs2", "ucs2_czech_ci":"ucs2", "ucs2_danish_ci":"ucs2", "ucs2_lithuanian_ci":"ucs2", "ucs2_slovak_ci":"ucs2", "ucs2_spanish2_ci":"ucs2", "ucs2_roman_ci":"ucs2", "ucs2_persian_ci":"ucs2", "ucs2_esperanto_ci":"ucs2", "ucs2_hungarian_ci":"ucs2", "ucs2_sinhala_ci":"ucs2", "ucs2_german2_ci":"ucs2", "ucs2_croatian_mysql561_ci":"ucs2", "ucs2_unicode_520_ci":"ucs2", "ucs2_vietnamese_ci":"ucs2", "ucs2_general_mysql500_ci":"ucs2", "ucs2_croatian_ci":"ucs2", "ucs2_myanmar_ci":"ucs2", "ucs2_thai_520_w2":"ucs2", "ucs2_general_nopad_ci":"ucs2", "ucs2_nopad_bin":"ucs2", "ucs2_unicode_nopad_ci":"ucs2", "ucs2_unicode_520_nopad_ci":"ucs2", "cp866_general_ci":"cp866", "cp866_bin":"cp866", "cp866_general_nopad_ci":"cp866", "cp866_nopad_bin":"cp866", "keybcs2_general_ci":"keybcs2", "keybcs2_bin":"keybcs2", "keybcs2_general_nopad_ci":"keybcs2", "keybcs2_nopad_bin":"keybcs2", "macce_general_ci":"macce", "macce_bin":"macce", "macce_general_nopad_ci":"macce", "macce_nopad_bin":"macce", "macroman_general_ci":"macroman", "macroman_bin":"macroman", "macroman_general_nopad_ci":"macroman", "macroman_nopad_bin":"macroman", "cp852_general_ci":"cp852", "cp852_bin":"cp852", "cp852_general_nopad_ci":"cp852", "cp852_nopad_bin":"cp852", "latin7_estonian_cs":"latin7", "latin7_general_ci":"latin7", "latin7_general_cs":"latin7", "latin7_bin":"latin7", "latin7_general_nopad_ci":"latin7", "latin7_nopad_bin":"latin7", "utf8mb4_general_ci":"utf8mb4", "utf8mb4_bin":"utf8mb4", "utf8mb4_unicode_ci":"utf8mb4", "utf8mb4_icelandic_ci":"utf8mb4", "utf8mb4_latvian_ci":"utf8mb4", "utf8mb4_romanian_ci":"utf8mb4", "utf8mb4_slovenian_ci":"utf8mb4", "utf8mb4_polish_ci":"utf8mb4", "utf8mb4_estonian_ci":"utf8mb4", "utf8mb4_spanish_ci":"utf8mb4", "utf8mb4_swedish_ci":"utf8mb4", "utf8mb4_turkish_ci":"utf8mb4", "utf8mb4_czech_ci":"utf8mb4", "utf8mb4_danish_ci":"utf8mb4", "utf8mb4_lithuanian_ci":"utf8mb4", "utf8mb4_slovak_ci":"utf8mb4", "utf8mb4_spanish2_ci":"utf8mb4", "utf8mb4_roman_ci":"utf8mb4", "utf8mb4_persian_ci":"utf8mb4", "utf8mb4_esperanto_ci":"utf8mb4", "utf8mb4_hungarian_ci":"utf8mb4", "utf8mb4_sinhala_ci":"utf8mb4", "utf8mb4_german2_ci":"utf8mb4", "utf8mb4_croatian_mysql561_ci":"utf8mb4", "utf8mb4_unicode_520_ci":"utf8mb4", "utf8mb4_vietnamese_ci":"utf8mb4", "utf8mb4_croatian_ci":"utf8mb4", "utf8mb4_myanmar_ci":"utf8mb4", "utf8mb4_thai_520_w2":"utf8mb4", "utf8mb4_general_nopad_ci":"utf8mb4", "utf8mb4_nopad_bin":"utf8mb4", "utf8mb4_unicode_nopad_ci":"utf8mb4", "utf8mb4_unicode_520_nopad_ci":"utf8mb4", "cp1251_bulgarian_ci":"cp1251", "cp1251_ukrainian_ci":"cp1251", "cp1251_bin":"cp1251", "cp1251_general_ci":"cp1251", "cp1251_general_cs":"cp1251", "cp1251_nopad_bin":"cp1251", "cp1251_general_nopad_ci":"cp1251", "utf16_general_ci":"utf16", "utf16_bin":"utf16", "utf16_unicode_ci":"utf16", "utf16_icelandic_ci":"utf16", "utf16_latvian_ci":"utf16", "utf16_romanian_ci":"utf16", "utf16_slovenian_ci":"utf16", "utf16_polish_ci":"utf16", "utf16_estonian_ci":"utf16", "utf16_spanish_ci":"utf16", "utf16_swedish_ci":"utf16", "utf16_turkish_ci":"utf16", "utf16_czech_ci":"utf16", "utf16_danish_ci":"utf16", "utf16_lithuanian_ci":"utf16", "utf16_slovak_ci":"utf16", "utf16_spanish2_ci":"utf16", "utf16_roman_ci":"utf16", "utf16_persian_ci":"utf16", "utf16_esperanto_ci":"utf16", "utf16_hungarian_ci":"utf16", "utf16_sinhala_ci":"utf16", "utf16_german2_ci":"utf16", "utf16_croatian_mysql561_ci":"utf16", "utf16_unicode_520_ci":"utf16", "utf16_vietnamese_ci":"utf16", "utf16_croatian_ci":"utf16", "utf16_myanmar_ci":"utf16", "utf16_thai_520_w2":"utf16", "utf16_general_nopad_ci":"utf16", "utf16_nopad_bin":"utf16", "utf16_unicode_nopad_ci":"utf16", "utf16_unicode_520_nopad_ci":"utf16", "utf16le_general_ci":"utf16le", "utf16le_bin":"utf16le", "utf16le_general_nopad_ci":"utf16le", "utf16le_nopad_bin":"utf16le", "cp1256_general_ci":"cp1256", "cp1256_bin":"cp1256", "cp1256_general_nopad_ci":"cp1256", "cp1256_nopad_bin":"cp1256", "cp1257_lithuanian_ci":"cp1257", "cp1257_bin":"cp1257", "cp1257_general_ci":"cp1257", "cp1257_nopad_bin":"cp1257", "cp1257_general_nopad_ci":"cp1257", "utf32_general_ci":"utf32", "utf32_bin":"utf32", "utf32_unicode_ci":"utf32", "utf32_icelandic_ci":"utf32", "utf32_latvian_ci":"utf32", "utf32_romanian_ci":"utf32", "utf32_slovenian_ci":"utf32", "utf32_polish_ci":"utf32", "utf32_estonian_ci":"utf32", "utf32_spanish_ci":"utf32", "utf32_swedish_ci":"utf32", "utf32_turkish_ci":"utf32", "utf32_czech_ci":"utf32", "utf32_danish_ci":"utf32", "utf32_lithuanian_ci":"utf32", "utf32_slovak_ci":"utf32", "utf32_spanish2_ci":"utf32", "utf32_roman_ci":"utf32", "utf32_persian_ci":"utf32", "utf32_esperanto_ci":"utf32", "utf32_hungarian_ci":"utf32", "utf32_sinhala_ci":"utf32", "utf32_german2_ci":"utf32", "utf32_croatian_mysql561_ci":"utf32", "utf32_unicode_520_ci":"utf32", "utf32_vietnamese_ci":"utf32", "utf32_croatian_ci":"utf32", "utf32_myanmar_ci":"utf32", "utf32_thai_520_w2":"utf32", "utf32_general_nopad_ci":"utf32", "utf32_nopad_bin":"utf32", "utf32_unicode_nopad_ci":"utf32", "utf32_unicode_520_nopad_ci":"utf32", "binary":"binary", "geostd8_general_ci":"geostd8", "geostd8_bin":"geostd8", "geostd8_general_nopad_ci":"geostd8", "geostd8_nopad_bin":"geostd8", "cp932_japanese_ci":"cp932", "cp932_bin":"cp932", "cp932_japanese_nopad_ci":"cp932", "cp932_nopad_bin":"cp932", "eucjpms_japanese_ci":"eucjpms", "eucjpms_bin":"eucjpms", "eucjpms_japanese_nopad_ci":"eucjpms", "eucjpms_nopad_bin":"eucjpms"}atlas-0.7.2/sql/mysql/internal/mysqlversion/mysqlversion.go000066400000000000000000000070711431455511600242730ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysqlversion import ( "embed" "encoding/json" "fmt" "strings" "golang.org/x/mod/semver" ) // V provides information about MySQL versions. type V string // SupportsCheck reports if the version supports the CHECK // clause, and return the querying for getting them. func (v V) SupportsCheck() bool { u := "8.0.16" if v.Maria() { u = "10.2.1" } return v.GTE(u) } // SupportsIndexExpr reports if the version supports // index expressions (functional key part). func (v V) SupportsIndexExpr() bool { return !v.Maria() && v.GTE("8.0.13") } // SupportsDisplayWidth reports if the version supports getting // the display width information from the information schema. func (v V) SupportsDisplayWidth() bool { // MySQL v8.0.19 dropped the display width // information from the information schema return v.Maria() || v.LT("8.0.19") } // SupportsExprDefault reports if the version supports // expressions in the DEFAULT clause on column definition. func (v V) SupportsExprDefault() bool { u := "8.0.13" if v.Maria() { u = "10.2.1" } return v.GTE(u) } // SupportsEnforceCheck reports if the version supports // the ENFORCED option in CHECK constraint syntax. func (v V) SupportsEnforceCheck() bool { return !v.Maria() && v.GTE("8.0.16") } // SupportsGeneratedColumns reports if the version supports // the generated columns in information schema. func (v V) SupportsGeneratedColumns() bool { u := "5.7" if v.Maria() { u = "10.2" } return v.GTE(u) } // SupportsRenameColumn reports if the version supports // the "RENAME COLUMN" clause. func (v V) SupportsRenameColumn() bool { u := "8" if v.Maria() { u = "10.5.2" } return v.GTE(u) } // SupportsIndexComment reports if the version // supports comments on indexes. func (v V) SupportsIndexComment() bool { // According to Oracle release notes, comments on // indexes were added in version 5.5.3. return v.Maria() || v.GTE("5.5.3") } // CharsetToCollate returns the mapping from charset to its default collation. func (v V) CharsetToCollate() (map[string]string, error) { name := "is/charset2collate" if v.Maria() { name += ".maria" } return decode(name) } // CollateToCharset returns the mapping from a collation to its charset. func (v V) CollateToCharset() (map[string]string, error) { name := "is/collate2charset" if v.Maria() { name += ".maria" } return decode(name) } // Maria reports if the MySQL version is MariaDB. func (v V) Maria() bool { return strings.Index(string(v), "MariaDB") > 0 } // TiDB reports if the MySQL version is TiDB. func (v V) TiDB() bool { return strings.Index(string(v), "TiDB") > 0 } // Compare returns an integer comparing two versions according to // semantic version precedence. func (v V) Compare(w string) int { u := string(v) switch { case v.Maria(): u = u[:strings.Index(u, "MariaDB")-1] case v.TiDB(): u = u[:strings.Index(u, "TiDB")-1] } return semver.Compare("v"+u, "v"+w) } // GTE reports if the version is >= w. func (v V) GTE(w string) bool { return v.Compare(w) >= 0 } // LT reports if the version is < w. func (v V) LT(w string) bool { return v.Compare(w) == -1 } //go:embed is/* var encoding embed.FS func decode(name string) (map[string]string, error) { f, err := encoding.Open(name) if err != nil { return nil, err } var m map[string]string if err := json.NewDecoder(f).Decode(&m); err != nil { return nil, fmt.Errorf("decode %q", name) } return m, nil } atlas-0.7.2/sql/mysql/migrate.go000066400000000000000000000557711431455511600165730ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "errors" "fmt" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) // A planApply provides migration capabilities for schema elements. type planApply struct{ conn } // PlanChanges returns a migration plan for the given schema changes. func (p *planApply) PlanChanges(_ context.Context, name string, changes []schema.Change, opts ...migrate.PlanOption) (*migrate.Plan, error) { s := &state{ conn: p.conn, Plan: migrate.Plan{ Name: name, // A plan is reversible, if all // its changes are reversible. Reversible: true, // All statements generated by state will cause implicit commit. // https://dev.mysql.com/doc/refman/8.0/en/implicit-commit.html Transactional: false, }, } for _, o := range opts { o(&s.PlanOptions) } if err := s.plan(changes); err != nil { return nil, err } for _, c := range s.Changes { if c.Reverse == "" { s.Reversible = false break } } return &s.Plan, nil } // ApplyChanges applies the changes on the database. An error is returned // if the driver is unable to produce a plan to it, or one of the statements // is failed or unsupported. func (p *planApply) ApplyChanges(ctx context.Context, changes []schema.Change, opts ...migrate.PlanOption) error { return sqlx.ApplyChanges(ctx, changes, p, opts...) } // state represents the state of a planning. It is not part of // planApply so that multiple planning/applying can be called // in parallel. type state struct { conn migrate.Plan migrate.PlanOptions } // plan builds the migration plan for applying the // given changes on the attached connection. func (s *state) plan(changes []schema.Change) error { if s.SchemaQualifier != nil { if err := sqlx.CheckChangesScope(changes); err != nil { return err } } planned, err := s.topLevel(changes) if err != nil { return err } planned, err = sqlx.DetachCycles(planned) if err != nil { return err } for _, c := range planned { switch c := c.(type) { case *schema.AddTable: err = s.addTable(c) case *schema.DropTable: s.dropTable(c) case *schema.ModifyTable: err = s.modifyTable(c) case *schema.RenameTable: s.renameTable(c) default: err = fmt.Errorf("unsupported change %T", c) } if err != nil { return err } } return nil } // topLevel appends first the changes for creating or dropping schemas (top-level schema elements). func (s *state) topLevel(changes []schema.Change) ([]schema.Change, error) { planned := make([]schema.Change, 0, len(changes)) for _, c := range changes { switch c := c.(type) { case *schema.AddSchema: b := s.Build("CREATE DATABASE") if sqlx.Has(c.Extra, &schema.IfNotExists{}) { b.P("IF NOT EXISTS") } b.Ident(c.S.Name) // Schema was created with CHARSET and it is not the default database character set. if a := (schema.Charset{}); sqlx.Has(c.S.Attrs, &a) && a.V != "" && a.V != s.charset { b.P("CHARSET", a.V) } // Schema was created with COLLATE and it is not the default database collation. if a := (schema.Collation{}); sqlx.Has(c.S.Attrs, &a) && a.V != "" && a.V != s.collate { b.P("COLLATE", a.V) } s.append(&migrate.Change{ Cmd: b.String(), Source: c, Reverse: s.Build("DROP DATABASE").Ident(c.S.Name).String(), Comment: fmt.Sprintf("add new schema named %q", c.S.Name), }) case *schema.DropSchema: b := s.Build("DROP DATABASE") if sqlx.Has(c.Extra, &schema.IfExists{}) { b.P("IF EXISTS") } b.Ident(c.S.Name) s.append(&migrate.Change{ Cmd: b.String(), Source: c, Comment: fmt.Sprintf("drop schema named %q", c.S.Name), }) case *schema.ModifySchema: if err := s.modifySchema(c); err != nil { return nil, err } default: planned = append(planned, c) } } return planned, nil } // modifySchema builds and appends the migrate.Changes for bringing // the schema into its modified state. func (s *state) modifySchema(modify *schema.ModifySchema) error { b, r := s.Build(), s.Build() for _, change := range modify.Changes { switch change := change.(type) { // Add schema attributes to an existing schema only if // it is different from the default server configuration. case *schema.AddAttr: switch a := change.A.(type) { case *schema.Charset: if a.V != "" && a.V != s.charset { b.P("CHARSET", a.V) r.P("CHARSET", s.charset) } case *schema.Collation: if a.V != "" && a.V != s.collate { b.P("COLLATE", a.V) r.P("COLLATE", s.collate) } default: return fmt.Errorf("unexpected schema AddAttr: %T", a) } case *schema.ModifyAttr: switch to := change.To.(type) { case *schema.Charset: from, ok := change.From.(*schema.Charset) if !ok { return fmt.Errorf("mismatch ModifyAttr attributes: %T != %T", change.To, change.From) } b.P("CHARSET", to.V) r.P("CHARSET", from.V) case *schema.Collation: from, ok := change.From.(*schema.Collation) if !ok { return fmt.Errorf("mismatch ModifyAttr attributes: %T != %T", change.To, change.From) } b.P("COLLATE", to.V) r.P("COLLATE", from.V) default: return fmt.Errorf("unexpected schema ModifyAttr: %T", change) } default: return fmt.Errorf("unsupported ModifySchema change %T", change) } } if b.Len() > 0 { bs := s.Build("ALTER DATABASE").Ident(modify.S.Name) rs := bs.Clone() bs.WriteString(b.String()) rs.WriteString(r.String()) s.append(&migrate.Change{ Cmd: bs.String(), Reverse: rs.String(), Source: modify, Comment: fmt.Sprintf("modify %q schema", modify.S.Name), }) } return nil } // addTable builds and appends a migration change // for creating a table in a schema. func (s *state) addTable(add *schema.AddTable) error { var ( errs []string b = s.Build("CREATE TABLE") ) if sqlx.Has(add.Extra, &schema.IfNotExists{}) { b.P("IF NOT EXISTS") } b.Table(add.T) if len(add.T.Columns) == 0 { return fmt.Errorf("table %q has no columns", add.T.Name) } b.Wrap(func(b *sqlx.Builder) { b.MapComma(add.T.Columns, func(i int, b *sqlx.Builder) { if err := s.column(b, add.T, add.T.Columns[i]); err != nil { errs = append(errs, err.Error()) } }) if pk := add.T.PrimaryKey; pk != nil { b.Comma().P("PRIMARY KEY") indexParts(b, pk.Parts) } if len(add.T.Indexes) > 0 { b.Comma() } b.MapComma(add.T.Indexes, func(i int, b *sqlx.Builder) { idx := add.T.Indexes[i] index(b, idx) }) if len(add.T.ForeignKeys) > 0 { b.Comma() if err := s.fks(b, add.T.ForeignKeys...); err != nil { errs = append(errs, err.Error()) } } for _, attr := range add.T.Attrs { if c, ok := attr.(*schema.Check); ok { b.Comma() s.check(b, c) } } }) if len(errs) > 0 { return fmt.Errorf("create table %q: %s", add.T.Name, strings.Join(errs, ", ")) } s.tableAttr(b, add, add.T.Attrs...) s.append(&migrate.Change{ Cmd: b.String(), Source: add, Reverse: s.Build("DROP TABLE").Table(add.T).String(), Comment: fmt.Sprintf("create %q table", add.T.Name), }) return nil } // dropTable builds and appends the migrate.Change // for dropping a table from a schema. func (s *state) dropTable(drop *schema.DropTable) { b := s.Build("DROP TABLE") if sqlx.Has(drop.Extra, &schema.IfExists{}) { b.P("IF EXISTS") } b.Table(drop.T) s.append(&migrate.Change{ Cmd: b.String(), Source: drop, Comment: fmt.Sprintf("drop %q table", drop.T.Name), }) } // modifyTable builds and appends the migration changes for // bringing the table into its modified state. func (s *state) modifyTable(modify *schema.ModifyTable) error { var changes [2][]schema.Change if len(modify.T.Columns) == 0 { return fmt.Errorf("table %q has no columns; drop the table instead", modify.T.Name) } for _, change := range skipAutoChanges(modify.Changes) { switch change := change.(type) { // Foreign-key modification is translated into 2 steps. // Dropping the current foreign key and creating a new one. case *schema.ModifyForeignKey: // DROP and ADD of the same constraint cannot be mixed // on the ALTER TABLE command. changes[0] = append(changes[0], &schema.DropForeignKey{ F: change.From, }) // Drop the auto-created index for referenced if the reference was changed. if change.Change.Is(schema.ChangeRefTable | schema.ChangeRefColumn) { changes[0] = append(changes[0], &schema.DropIndex{ I: &schema.Index{ Name: change.From.Symbol, Table: modify.T, }, }) } changes[1] = append(changes[1], &schema.AddForeignKey{ F: change.To, }) // Index modification requires rebuilding the index. case *schema.ModifyIndex: changes[0] = append(changes[0], &schema.DropIndex{ I: change.From, }) changes[1] = append(changes[1], &schema.AddIndex{ I: change.To, }) case *schema.DropAttr: return fmt.Errorf("unsupported change type: %v", change.A) default: changes[1] = append(changes[1], change) } } for i := range changes { if len(changes[i]) > 0 { if err := s.alterTable(modify.T, changes[i]); err != nil { return err } } } return nil } // alterTable modifies the given table by executing on it a list of // changes in one SQL statement. func (s *state) alterTable(t *schema.Table, changes []schema.Change) error { var ( reverse []schema.Change reversible = true ) build := func(changes []schema.Change) (string, error) { b := s.Build("ALTER TABLE").Table(t) err := b.MapCommaErr(changes, func(i int, b *sqlx.Builder) error { switch change := changes[i].(type) { case *schema.AddColumn: b.P("ADD COLUMN") if err := s.column(b, t, change.C); err != nil { return err } reverse = append(reverse, &schema.DropColumn{C: change.C}) case *schema.ModifyColumn: if err := checkChangeGenerated(change.From, change.To); err != nil { return err } b.P("MODIFY COLUMN") if err := s.column(b, t, change.To); err != nil { return err } reverse = append(reverse, &schema.ModifyColumn{ From: change.To, To: change.From, Change: change.Change, }) case *schema.RenameColumn: if s.SupportsRenameColumn() { b.P("RENAME COLUMN").Ident(change.From.Name).P("TO").Ident(change.To.Name) } else { b.P("CHANGE COLUMN").Ident(change.From.Name) if err := s.column(b, t, change.To); err != nil { return err } } reverse = append(reverse, &schema.RenameColumn{From: change.To, To: change.From}) case *schema.DropColumn: b.P("DROP COLUMN").Ident(change.C.Name) reverse = append(reverse, &schema.AddColumn{C: change.C}) case *schema.AddIndex: b.P("ADD") index(b, change.I) reverse = append(reverse, &schema.DropIndex{I: change.I}) case *schema.RenameIndex: b.P("RENAME INDEX").Ident(change.From.Name).P("TO").Ident(change.To.Name) reverse = append(reverse, &schema.RenameIndex{From: change.To, To: change.From}) case *schema.DropIndex: b.P("DROP INDEX").Ident(change.I.Name) reverse = append(reverse, &schema.AddIndex{I: change.I}) case *schema.AddForeignKey: b.P("ADD") if err := s.fks(b, change.F); err != nil { return err } reverse = append(reverse, &schema.DropForeignKey{F: change.F}) case *schema.DropForeignKey: b.P("DROP FOREIGN KEY").Ident(change.F.Symbol) reverse = append(reverse, &schema.AddForeignKey{F: change.F}) case *schema.AddAttr: s.tableAttr(b, change, change.A) // Unsupported reverse operation. reversible = false case *schema.ModifyAttr: s.tableAttr(b, change, change.To) reverse = append(reverse, &schema.ModifyAttr{ From: change.To, To: change.From, }) case *schema.AddCheck: s.check(b.P("ADD"), change.C) // Reverse operation is supported if // the constraint name is not generated. if reversible = reversible && change.C.Name != ""; reversible { reverse = append(reverse, &schema.DropCheck{C: change.C}) } case *schema.DropCheck: b.P("DROP CONSTRAINT").Ident(change.C.Name) reverse = append(reverse, &schema.AddCheck{C: change.C}) case *schema.ModifyCheck: switch { case change.From.Name == "": return errors.New("cannot modify unnamed check constraint") case change.From.Name != change.To.Name: return fmt.Errorf("mismatch check constraint names: %q != %q", change.From.Name, change.To.Name) // Enforcement added. case s.SupportsEnforceCheck() && sqlx.Has(change.From.Attrs, &Enforced{}) && !sqlx.Has(change.To.Attrs, &Enforced{}): b.P("ALTER CHECK").Ident(change.From.Name).P("ENFORCED") // Enforcement dropped. case s.SupportsEnforceCheck() && !sqlx.Has(change.From.Attrs, &Enforced{}) && sqlx.Has(change.To.Attrs, &Enforced{}): b.P("ALTER CHECK").Ident(change.From.Name).P("NOT ENFORCED") // Expr was changed. case change.From.Expr != change.To.Expr: b.P("DROP CHECK").Ident(change.From.Name).Comma().P("ADD") s.check(b, change.To) default: return errors.New("unknown check constraint change") } reverse = append(reverse, &schema.ModifyCheck{ From: change.To, To: change.From, }) } return nil }) if err != nil { return "", err } return b.String(), nil } cmd, err := build(changes) if err != nil { return fmt.Errorf("alter table %q: %v", t.Name, err) } change := &migrate.Change{ Cmd: cmd, Source: &schema.ModifyTable{ T: t, Changes: changes, }, Comment: fmt.Sprintf("modify %q table", t.Name), } if reversible { // Changes should be reverted in // a reversed order they were created. sqlx.ReverseChanges(reverse) if change.Reverse, err = build(reverse); err != nil { return fmt.Errorf("reversd alter table %q: %v", t.Name, err) } } s.append(change) return nil } func (s *state) renameTable(c *schema.RenameTable) { s.append(&migrate.Change{ Source: c, Comment: fmt.Sprintf("rename a table from %q to %q", c.From.Name, c.To.Name), Cmd: s.Build("RENAME TABLE").Table(c.From).P("TO").Table(c.To).String(), Reverse: s.Build("RENAME TABLE").Table(c.To).P("TO").Table(c.From).String(), }) } func (s *state) column(b *sqlx.Builder, t *schema.Table, c *schema.Column) error { typ, err := FormatType(c.Type.Type) if err != nil { return fmt.Errorf("format type for column %q: %w", c.Name, err) } b.Ident(c.Name).P(typ) if cs := (schema.Charset{}); sqlx.Has(c.Attrs, &cs) { if !supportsCharset(c.Type.Type) { return fmt.Errorf("column %q of type %T does not support the CHARSET attribute", c.Name, c.Type.Type) } // Define the charset explicitly // in case it is not the default. if s.character(t) != cs.V { b.P("CHARSET", cs.V) } } var ( x schema.GeneratedExpr asX = sqlx.Has(c.Attrs, &x) ) if asX { b.P("AS", sqlx.MayWrap(x.Expr), x.Type) } // MariaDB does not accept [NOT NULL | NULL] // as part of the generated columns' syntax. if !asX || !s.Maria() { if !c.Type.Null { b.P("NOT") } b.P("NULL") } s.columnDefault(b, c) // Add manually the JSON_VALID constraint for older // versions < 10.4.3. See Driver.checks for full info. if _, ok := c.Type.Type.(*schema.JSONType); ok && s.Maria() && s.LT("10.4.3") && !sqlx.Has(c.Attrs, &schema.Check{}) { b.P("CHECK").Wrap(func(b *sqlx.Builder) { b.WriteString(fmt.Sprintf("json_valid(`%s`)", c.Name)) }) } for _, a := range c.Attrs { switch a := a.(type) { case *schema.Charset: // CHARSET is handled above in the "data_type" stage. case *schema.Collation: if !supportsCharset(c.Type.Type) { return fmt.Errorf("column %q of type %T does not support the COLLATE attribute", c.Name, c.Type.Type) } // Define the collation explicitly // in case it is not the default. if s.collation(t) != a.V { b.P("COLLATE", a.V) } case *OnUpdate: b.P("ON UPDATE", a.A) case *AutoIncrement: b.P("AUTO_INCREMENT") // Auto increment with value should be configured on table options. if a.V > 0 && !sqlx.Has(t.Attrs, &AutoIncrement{}) { t.Attrs = append(t.Attrs, a) } default: s.attr(b, a) } } return nil } func index(b *sqlx.Builder, idx *schema.Index) { var t IndexType if sqlx.Has(idx.Attrs, &t) { t.T = strings.ToUpper(t.T) } switch { case idx.Unique: b.P("UNIQUE") case t.T == IndexTypeFullText || t.T == IndexTypeSpatial: b.P(t.T) } b.P("INDEX").Ident(idx.Name) // Skip BTREE as it is the default type. if t.T == IndexTypeHash { b.P("USING", t.T) } indexParts(b, idx.Parts) if c := (schema.Comment{}); sqlx.Has(idx.Attrs, &c) { b.P("COMMENT", quote(c.Text)) } } func indexParts(b *sqlx.Builder, parts []*schema.IndexPart) { b.Wrap(func(b *sqlx.Builder) { b.MapComma(parts, func(i int, b *sqlx.Builder) { switch part := parts[i]; { case part.C != nil: b.Ident(part.C.Name) case part.X != nil: b.WriteString(sqlx.MayWrap(part.X.(*schema.RawExpr).X)) } if s := (&SubPart{}); sqlx.Has(parts[i].Attrs, s) { b.WriteString(fmt.Sprintf("(%d)", s.Len)) } // Ignore default collation (i.e. "ASC") if parts[i].Desc { b.P("DESC") } }) }) } func (s *state) fks(b *sqlx.Builder, fks ...*schema.ForeignKey) error { return b.MapCommaErr(fks, func(i int, b *sqlx.Builder) error { fk := fks[i] if fk.Symbol != "" { b.P("CONSTRAINT").Ident(fk.Symbol) } b.P("FOREIGN KEY") b.Wrap(func(b *sqlx.Builder) { b.MapComma(fk.Columns, func(i int, b *sqlx.Builder) { b.Ident(fk.Columns[i].Name) }) }) b.P("REFERENCES").Table(fk.RefTable) b.Wrap(func(b *sqlx.Builder) { b.MapComma(fk.RefColumns, func(i int, b *sqlx.Builder) { b.Ident(fk.RefColumns[i].Name) }) }) if fk.OnUpdate != "" { b.P("ON UPDATE", string(fk.OnUpdate)) } if fk.OnDelete != "" { b.P("ON DELETE", string(fk.OnDelete)) } if fk.OnUpdate == schema.SetNull || fk.OnDelete == schema.SetNull { for _, c := range fk.Columns { if !c.Type.Null { return fmt.Errorf("foreign key constraint was %[1]q SET NULL, but column %[1]q is NOT NULL", c.Name) } } } return nil }) } // tableAttr writes the given table attribute to the SQL // statement builder when a table is created or altered. func (s *state) tableAttr(b *sqlx.Builder, c schema.Change, attrs ...schema.Attr) { for _, a := range attrs { switch a := a.(type) { case *CreateOptions: b.P(a.V) case *AutoIncrement: // Update the AUTO_INCREMENT if it is an update change or it is not the default. if _, ok := c.(*schema.ModifyAttr); ok || a.V > 1 { b.P("AUTO_INCREMENT", strconv.FormatInt(a.V, 10)) } case *schema.Check: // Ignore CHECK constraints as they are not real attributes, // and handled on CREATE or ALTER. case *schema.Charset: b.P("CHARSET", a.V) case *schema.Collation: b.P("COLLATE", a.V) case *schema.Comment: b.P("COMMENT", quote(a.Text)) } } } // character returns the table character-set from its attributes // or from the default defined in the schema or the database. func (s *state) character(t *schema.Table) string { var c schema.Charset if sqlx.Has(t.Attrs, &c) || t.Schema != nil && sqlx.Has(t.Schema.Attrs, &c) { return c.V } return s.charset } // collation returns the table collation from its attributes // or from the default defined in the schema or the database. func (s *state) collation(t *schema.Table) string { var c schema.Collation if sqlx.Has(t.Attrs, &c) || t.Schema != nil && sqlx.Has(t.Schema.Attrs, &c) { return c.V } return s.collate } func (s *state) append(c *migrate.Change) { s.Changes = append(s.Changes, c) } func (*state) attr(b *sqlx.Builder, attrs ...schema.Attr) { for _, a := range attrs { switch a := a.(type) { case *schema.Collation: b.P("COLLATE", a.V) case *schema.Comment: b.P("COMMENT", quote(a.Text)) } } } // columnDefault writes the default value of column to the builder. func (s *state) columnDefault(b *sqlx.Builder, c *schema.Column) { switch x := c.Default.(type) { case *schema.Literal: v := x.V if !hasNumericDefault(c.Type.Type) && !isHex(v) { v = quote(v) } b.P("DEFAULT", v) case *schema.RawExpr: v := x.X // For backwards compatibility, quote raw expressions that are not wrapped // with parens for non-numeric column types (i.e. literals). switch t := c.Type.Type; { case isHex(v), hasNumericDefault(t), strings.HasPrefix(v, "(") && strings.HasSuffix(v, ")"): default: if _, ok := t.(*schema.TimeType); !ok || !strings.HasPrefix(strings.ToLower(v), currentTS) { v = quote(v) } } b.P("DEFAULT", v) } } // Build instantiates a new builder and writes the given phrase to it. func (s *state) Build(phrases ...string) *sqlx.Builder { b := &sqlx.Builder{QuoteChar: '`', Schema: s.SchemaQualifier} return b.P(phrases...) } // skipAutoChanges filters unnecessary changes that are automatically // happened by the database when ALTER TABLE is executed. func skipAutoChanges(changes []schema.Change) []schema.Change { var ( dropC = make(map[string]bool) planned = make([]schema.Change, 0, len(changes)) ) for _, c := range changes { if c, ok := c.(*schema.DropColumn); ok { dropC[c.C.Name] = true } } for i, c := range changes { // Simple case for skipping key dropping, if its columns are dropped. // https://dev.mysql.com/doc/refman/8.0/en/alter-table.html#alter-table-add-drop-column c, ok := c.(*schema.DropIndex) if !ok { planned = append(planned, changes[i]) continue } for _, p := range c.I.Parts { if p.C == nil || !dropC[p.C.Name] { planned = append(planned, c) break } } } return planned } // checks writes the CHECK constraint to the builder. func (s *state) check(b *sqlx.Builder, c *schema.Check) { if c.Name != "" { b.P("CONSTRAINT").Ident(c.Name) } b.P("CHECK", sqlx.MayWrap(c.Expr)) if s.SupportsEnforceCheck() && sqlx.Has(c.Attrs, &Enforced{}) { b.P("ENFORCED") } } // supportsCharset reports if the given type supports the CHARSET and COLLATE // clauses. See: https://dev.mysql.com/doc/refman/8.0/en/charset-column.html func supportsCharset(t schema.Type) bool { switch t.(type) { case *schema.StringType, *schema.EnumType, *SetType: return true default: return false } } // checkChangeGenerated checks if the change of a generated column is valid. func checkChangeGenerated(from, to *schema.Column) error { var fromX, toX schema.GeneratedExpr switch fromHas, toHas := sqlx.Has(from.Attrs, &fromX), sqlx.Has(to.Attrs, &toX); { case !fromHas && toHas && storedOrVirtual(toX.Type) == virtual: return fmt.Errorf("changing column %q to VIRTUAL generated column is not supported (drop and add is required)", from.Name) case fromHas && !toHas && storedOrVirtual(fromX.Type) == virtual: return fmt.Errorf("changing VIRTUAL generated column %q to non-generated column is not supported (drop and add is required)", from.Name) case fromHas && toHas && storedOrVirtual(fromX.Type) != storedOrVirtual(toX.Type): return fmt.Errorf("changing the store type of generated column %q from %q to %q is not supported", from.Name, storedOrVirtual(fromX.Type), storedOrVirtual(toX.Type)) } return nil } func quote(s string) string { if sqlx.IsQuoted(s, '"', '\'') { return s } return strconv.Quote(s) } atlas-0.7.2/sql/mysql/migrate_test.go000066400000000000000000000752721431455511600176300ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "strconv" "testing" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestMigrate_ApplyChanges(t *testing.T) { migrate, mk, err := newMigrate("8.0.13") require.NoError(t, err) mk.ExpectExec(sqltest.Escape("CREATE DATABASE `test` CHARSET latin")). WillReturnResult(sqlmock.NewResult(0, 1)) mk.ExpectExec(sqltest.Escape("DROP DATABASE `atlas`")). WillReturnResult(sqlmock.NewResult(0, 1)) mk.ExpectExec(sqltest.Escape("DROP TABLE `users`")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("DROP TABLE IF EXISTS `public`.`pets`")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("CREATE TABLE IF NOT EXISTS `pets` (`a` int NOT NULL DEFAULT (int(rand())), `b` bigint NOT NULL DEFAULT 1, `c` bigint NULL, PRIMARY KEY (`a`, `b`), UNIQUE INDEX `b_c_unique` (`b`, `c`) COMMENT \"comment\")")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("ALTER TABLE `users` DROP INDEX `id_spouse_id`")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("ALTER TABLE `users` ADD CONSTRAINT `spouse` FOREIGN KEY (`spouse_id`) REFERENCES `users` (`id`) ON DELETE SET NULL, ADD INDEX `id_spouse_id` (`spouse_id`, `id` DESC) COMMENT \"comment\"")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("CREATE TABLE `posts` (`id` bigint NOT NULL, `author_id` bigint NULL, CONSTRAINT `author` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`))")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("CREATE TABLE `comments` (`id` bigint NOT NULL, `post_id` bigint NULL, CONSTRAINT `comment` FOREIGN KEY (`post_id`) REFERENCES `posts` (`id`))")). WillReturnResult(sqlmock.NewResult(0, 0)) err = migrate.ApplyChanges(context.Background(), []schema.Change{ &schema.AddSchema{S: &schema.Schema{Name: "test", Attrs: []schema.Attr{&schema.Charset{V: "latin"}}}}, &schema.DropSchema{S: &schema.Schema{Name: "atlas", Attrs: []schema.Attr{&schema.Charset{V: "latin"}}}}, &schema.DropTable{T: &schema.Table{Name: "users"}}, &schema.DropTable{T: &schema.Table{Name: "pets", Schema: &schema.Schema{Name: "public"}}, Extra: []schema.Clause{&schema.IfExists{}}}, &schema.AddTable{ T: func() *schema.Table { t := &schema.Table{ Name: "pets", Columns: []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, Default: &schema.RawExpr{X: "(int(rand()))"}}, {Name: "b", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Default: &schema.Literal{V: "1"}}, {Name: "c", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } t.PrimaryKey = &schema.Index{ Parts: []*schema.IndexPart{{C: t.Columns[0]}, {C: t.Columns[1]}}, } t.Indexes = []*schema.Index{ {Name: "b_c_unique", Unique: true, Parts: []*schema.IndexPart{{C: t.Columns[1]}, {C: t.Columns[2]}}, Attrs: []schema.Attr{&schema.Comment{Text: "comment"}}}, } return t }(), Extra: []schema.Clause{ &schema.IfNotExists{}, }, }, }) require.NoError(t, err) err = migrate.ApplyChanges(context.Background(), func() []schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "spouse_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } posts := &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "author_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } posts.ForeignKeys = []*schema.ForeignKey{ {Symbol: "author", Table: posts, Columns: posts.Columns[1:], RefTable: users, RefColumns: users.Columns[:1]}, } comments := &schema.Table{ Name: "comments", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "post_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } comments.ForeignKeys = []*schema.ForeignKey{ {Symbol: "comment", Table: comments, Columns: comments.Columns[1:], RefTable: posts, RefColumns: posts.Columns[:1]}, } return []schema.Change{ &schema.AddTable{T: posts}, &schema.AddTable{T: comments}, &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddForeignKey{ F: &schema.ForeignKey{ Symbol: "spouse", Table: users, Columns: users.Columns[1:], RefTable: users, RefColumns: users.Columns[:1], OnDelete: "SET NULL", }, }, &schema.ModifyIndex{ From: &schema.Index{Name: "id_spouse_id", Parts: []*schema.IndexPart{{C: users.Columns[0]}, {C: users.Columns[1]}}}, To: &schema.Index{ Name: "id_spouse_id", Parts: []*schema.IndexPart{ {C: users.Columns[1]}, {C: users.Columns[0], Desc: true}, }, Attrs: []schema.Attr{ &schema.Comment{Text: "comment"}, }, }, }, }, }, } }()) require.NoError(t, err) } func TestMigrate_DetachCycles(t *testing.T) { migrate, mk, err := newMigrate("8.0.13") require.NoError(t, err) mk.ExpectExec(sqltest.Escape("CREATE TABLE `users` (`id` bigint NOT NULL, `workplace_id` bigint NULL)")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("CREATE TABLE `workplaces` (`id` bigint NOT NULL, `owner_id` bigint NULL)")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("ALTER TABLE `users` ADD CONSTRAINT `workplace` FOREIGN KEY (`workplace_id`) REFERENCES `workplaces` (`id`)")). WillReturnResult(sqlmock.NewResult(0, 0)) mk.ExpectExec(sqltest.Escape("ALTER TABLE `workplaces` ADD CONSTRAINT `owner` FOREIGN KEY (`owner_id`) REFERENCES `users` (`id`)")). WillReturnResult(sqlmock.NewResult(0, 0)) err = migrate.ApplyChanges(context.Background(), func() []schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "workplace_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } workplaces := &schema.Table{ Name: "workplaces", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "owner_id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}, Null: true}}, }, } users.ForeignKeys = []*schema.ForeignKey{ {Symbol: "workplace", Table: users, Columns: users.Columns[1:], RefTable: workplaces, RefColumns: workplaces.Columns[:1]}, } workplaces.ForeignKeys = []*schema.ForeignKey{ {Symbol: "owner", Table: workplaces, Columns: workplaces.Columns[1:], RefTable: users, RefColumns: users.Columns[:1]}, } return []schema.Change{ &schema.AddTable{T: users}, &schema.AddTable{T: workplaces}, } }()) require.NoError(t, err) } func TestPlanChanges(t *testing.T) { tests := []struct { version string changes []schema.Change options []migrate.PlanOption wantPlan *migrate.Plan wantErr bool }{ { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("users")}, }, // Table "users" has no columns. wantErr: true, }, { changes: []schema.Change{ &schema.ModifyTable{T: schema.NewTable("users")}, }, // Table "users" has no columns; drop the table instead. wantErr: true, }, { changes: []schema.Change{ &schema.AddSchema{S: schema.New("test").SetCharset("utf8mb4"), Extra: []schema.Clause{&schema.IfNotExists{}}}, &schema.DropSchema{S: schema.New("test").SetCharset("utf8mb4"), Extra: []schema.Clause{&schema.IfExists{}}}, }, wantPlan: &migrate.Plan{ Reversible: false, Changes: []*migrate.Change{ { Cmd: "CREATE DATABASE IF NOT EXISTS `test` CHARSET utf8mb4", Reverse: "DROP DATABASE `test`", }, { Cmd: "DROP DATABASE IF EXISTS `test`", }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, { Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}, Indexes: []*schema.Index{ schema.NewIndex("name_index"). AddParts(schema.NewColumnPart(schema.NewColumn("name"))), }, }}, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropIndex{ I: schema.NewIndex("name_index"). AddParts(schema.NewColumnPart(schema.NewColumn("name"))), }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` DROP INDEX `name_index`", Reverse: "ALTER TABLE `users` ADD INDEX `name_index` (`name`)", }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } pets := &schema.Table{ Name: "pets", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "user_id", Type: &schema.ColumnType{ Type: &schema.IntegerType{T: "bigint"}, }, }, }, } fk := &schema.ForeignKey{ Symbol: "user_id", Table: pets, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: users, Columns: []*schema.Column{pets.Columns[1]}, RefColumns: []*schema.Column{users.Columns[0]}, } pets.ForeignKeys = []*schema.ForeignKey{fk} return &schema.ModifyTable{ T: pets, Changes: []schema.Change{ &schema.DropForeignKey{ F: fk, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `pets` DROP FOREIGN KEY `user_id`", Reverse: "ALTER TABLE `pets` ADD CONSTRAINT `user_id` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON UPDATE NO ACTION ON DELETE CASCADE", }, }, }, }, { changes: []schema.Change{ &schema.AddSchema{S: &schema.Schema{Name: "test", Attrs: []schema.Attr{&schema.Charset{V: "latin"}}}}, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "CREATE DATABASE `test` CHARSET latin", Reverse: "DROP DATABASE `test`"}}, }, }, // Default database charset can be omitted. { changes: []schema.Change{ &schema.AddSchema{S: schema.New("test").SetCharset("utf8")}, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "CREATE DATABASE `test`", Reverse: "DROP DATABASE `test`"}}, }, }, // Add the default database charset on modify can be omitted. { changes: []schema.Change{ &schema.ModifySchema{ S: schema.New("test").SetCharset("utf8"), Changes: []schema.Change{ &schema.AddAttr{A: &schema.Charset{V: "utf8"}}, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, }, }, // Add custom charset. { changes: []schema.Change{ &schema.ModifySchema{ S: schema.New("test").SetCharset("latin1"), Changes: []schema.Change{ &schema.AddAttr{A: &schema.Charset{V: "latin1"}}, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "ALTER DATABASE `test` CHARSET latin1", Reverse: "ALTER DATABASE `test` CHARSET utf8"}}, }, }, // Modify charset. { changes: []schema.Change{ &schema.ModifySchema{ S: schema.New("test").SetCharset("utf8"), Changes: []schema.Change{ &schema.ModifyAttr{From: &schema.Charset{V: "latin1"}, To: &schema.Charset{V: "utf8"}}, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "ALTER DATABASE `test` CHARSET utf8", Reverse: "ALTER DATABASE `test` CHARSET latin1"}}, }, }, { changes: []schema.Change{ &schema.DropSchema{S: &schema.Schema{Name: "atlas", Attrs: []schema.Attr{&schema.Charset{V: "latin"}}}}, }, wantPlan: &migrate.Plan{ Changes: []*migrate.Change{{Cmd: "DROP DATABASE `atlas`"}}, }, }, { changes: []schema.Change{ func() *schema.AddTable { t := &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&AutoIncrement{}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, {Name: "uuid", Type: &schema.ColumnType{Type: &schema.StringType{T: "char", Size: 36}, Null: true}, Attrs: []schema.Attr{&schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_bin"}}}, }, } t.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: t.Columns[0]}}} return &schema.AddTable{T: t} }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "CREATE TABLE `posts` (`id` bigint NOT NULL AUTO_INCREMENT, `text` text NULL, `uuid` char(36) CHARSET utf8mb4 NULL COLLATE utf8mb4_bin, PRIMARY KEY (`id`))", Reverse: "DROP TABLE `posts`"}}, }, }, { changes: []schema.Change{ func() *schema.AddTable { t := &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&AutoIncrement{V: 100}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, {Name: "ch", Type: &schema.ColumnType{Type: &schema.StringType{T: "char"}}}, }, Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_bin"}, &schema.Comment{Text: "posts comment"}, &schema.Check{Name: "id_nonzero", Expr: "(`id` > 0)"}, &CreateOptions{V: `COMPRESSION="ZLIB"`}, }, Indexes: []*schema.Index{ { Name: "text_prefix", Parts: []*schema.IndexPart{ {Desc: true, Attrs: []schema.Attr{&SubPart{Len: 100}}}, }, }, }, } t.Indexes[0].Parts[0].C = t.Columns[1] t.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: t.Columns[0]}}} return &schema.AddTable{T: t} }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "CREATE TABLE `posts` (`id` bigint NOT NULL AUTO_INCREMENT, `text` text NULL, `ch` char NOT NULL, PRIMARY KEY (`id`), INDEX `text_prefix` (`text` (100) DESC), CONSTRAINT `id_nonzero` CHECK (`id` > 0)) CHARSET utf8mb4 COLLATE utf8mb4_bin COMMENT \"posts comment\" COMPRESSION=\"ZLIB\" AUTO_INCREMENT 100", Reverse: "DROP TABLE `posts`"}}, }, }, { changes: []schema.Change{ func() *schema.AddTable { t := &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&AutoIncrement{}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, }, Attrs: []schema.Attr{&AutoIncrement{V: 10}}, } t.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: t.Columns[0]}}} return &schema.AddTable{T: t} }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{{Cmd: "CREATE TABLE `posts` (`id` bigint NOT NULL AUTO_INCREMENT, `text` text NULL, PRIMARY KEY (`id`)) AUTO_INCREMENT 10", Reverse: "DROP TABLE `posts`"}}, }, }, { changes: []schema.Change{ &schema.DropTable{T: &schema.Table{Name: "posts"}}, }, wantPlan: &migrate.Plan{ Changes: []*migrate.Change{{Cmd: "DROP TABLE `posts`"}}, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddColumn{ C: &schema.Column{Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}}, }, &schema.AddIndex{ I: &schema.Index{ Name: "id_key", Parts: []*schema.IndexPart{ {C: users.Columns[0]}, }, Attrs: []schema.Attr{ &schema.Comment{Text: "comment"}, &IndexType{T: IndexTypeHash}, }, }, }, &schema.AddCheck{ C: &schema.Check{ Name: "id_nonzero", Expr: "(id > 0)", Attrs: []schema.Attr{&Enforced{}}, }, }, &schema.ModifyAttr{ From: &AutoIncrement{V: 1}, To: &AutoIncrement{V: 1000}, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` ADD COLUMN `name` varchar(255) NOT NULL, ADD INDEX `id_key` USING HASH (`id`) COMMENT \"comment\", ADD CONSTRAINT `id_nonzero` CHECK (id > 0) ENFORCED, AUTO_INCREMENT 1000", Reverse: "ALTER TABLE `users` AUTO_INCREMENT 1, DROP CONSTRAINT `id_nonzero`, DROP INDEX `id_key`, DROP COLUMN `name`", }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). AddColumns(schema.NewIntColumn("id", "int")) posts := schema.NewTable("posts"). AddColumns( schema.NewIntColumn("id", "int"), schema.NewIntColumn("author_id", "int"), ) posts.AddForeignKeys( schema.NewForeignKey("author"). AddColumns(posts.Columns[1]). SetRefTable(users). AddRefColumns(users.Columns[0]), ) return &schema.ModifyTable{ T: posts, Changes: []schema.Change{ &schema.AddColumn{C: posts.Columns[1]}, &schema.AddForeignKey{F: posts.ForeignKeys[0]}, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `posts` ADD COLUMN `author_id` int NOT NULL, ADD CONSTRAINT `author` FOREIGN KEY (`author_id`) REFERENCES `users` (`id`)", Reverse: "ALTER TABLE `posts` DROP FOREIGN KEY `author`, DROP COLUMN `author_id`", }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). AddColumns(schema.NewIntColumn("c1", "int")) return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddColumn{ C: schema.NewIntColumn("c2", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c1*2"}), }, &schema.AddColumn{ C: schema.NewIntColumn("c3", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c1*c2", Type: "STORED"}), }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` ADD COLUMN `c2` int AS (c1*2) NOT NULL, ADD COLUMN `c3` int AS (c1*c2) STORED NOT NULL", Reverse: "ALTER TABLE `users` DROP COLUMN `c3`, DROP COLUMN `c2`", }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropCheck{ C: &schema.Check{ Name: "id_nonzero", Expr: "(id > 0)", Attrs: []schema.Attr{&Enforced{}}, }, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` DROP CONSTRAINT `id_nonzero`", Reverse: "ALTER TABLE `users` ADD CONSTRAINT `id_nonzero` CHECK (id > 0) ENFORCED", }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyCheck{ From: &schema.Check{ Name: "check1", Expr: "(id > 0)", Attrs: []schema.Attr{&Enforced{}}, }, To: &schema.Check{ Name: "check1", Expr: "(id > 0)", }, }, &schema.ModifyCheck{ From: &schema.Check{ Name: "check2", Expr: "(id > 0)", }, To: &schema.Check{ Name: "check2", Expr: "(id > 0)", Attrs: []schema.Attr{&Enforced{}}, }, }, &schema.ModifyCheck{ From: &schema.Check{ Name: "check3", Expr: "(id > 0)", }, To: &schema.Check{ Name: "check3", Expr: "(id >= 0)", }, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` ALTER CHECK `check1` ENFORCED, ALTER CHECK `check2` NOT ENFORCED, DROP CHECK `check3`, ADD CONSTRAINT `check3` CHECK (id >= 0)", Reverse: "ALTER TABLE `users` DROP CHECK `check3`, ADD CONSTRAINT `check3` CHECK (id > 0), ALTER CHECK `check2` ENFORCED, ALTER CHECK `check1` NOT ENFORCED", }, }, }, }, { changes: []schema.Change{ &schema.AddTable{ T: schema.NewTable("users").AddColumns(schema.NewIntColumn("id", "bigint").SetCharset("utf8mb4")), }, }, wantErr: true, }, { changes: []schema.Change{ &schema.AddTable{ T: schema.NewTable("users").AddColumns(schema.NewIntColumn("id", "bigint").SetCollation("utf8mb4_general_ci")), }, }, wantErr: true, }, // Changing a regular column to a VIRTUAL generated column is not allowed. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("users"). AddColumns(schema.NewColumn("c").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"})), Changes: []schema.Change{ &schema.ModifyColumn{ Change: schema.ChangeGenerated, From: schema.NewColumn("c"), To: schema.NewColumn("c").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"}), }, }, }, }, wantErr: true, }, // Changing a VIRTUAL generated column to a regular column is not allowed. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("users"). AddColumns(schema.NewColumn("c")), Changes: []schema.Change{ &schema.ModifyColumn{ Change: schema.ChangeGenerated, From: schema.NewColumn("c").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "VIRTUAL"}), To: schema.NewColumn("c"), }, }, }, }, wantErr: true, }, // Changing the storage type of generated column is not allowed. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("users"). AddColumns(schema.NewColumn("c").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"})), Changes: []schema.Change{ &schema.ModifyColumn{ Change: schema.ChangeGenerated, From: schema.NewColumn("c").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "VIRTUAL"}), To: schema.NewColumn("c").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), }, }, }, }, wantErr: true, }, // Changing a STORED generated column to a regular column. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("users"). AddColumns(schema.NewIntColumn("c", "int")), Changes: []schema.Change{ &schema.ModifyColumn{ Change: schema.ChangeGenerated, From: schema.NewIntColumn("c", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), To: schema.NewIntColumn("c", "int"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` MODIFY COLUMN `c` int NOT NULL", Reverse: "ALTER TABLE `users` MODIFY COLUMN `c` int AS (1) STORED NOT NULL", }, }, }, }, // Changing a regular column to a STORED generated column. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("users"). AddColumns(schema.NewIntColumn("c", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"})), Changes: []schema.Change{ &schema.ModifyColumn{ Change: schema.ChangeGenerated, From: schema.NewIntColumn("c", "int"), To: schema.NewIntColumn("c", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `users` MODIFY COLUMN `c` int AS (1) STORED NOT NULL", Reverse: "ALTER TABLE `users` MODIFY COLUMN `c` int NOT NULL", }, }, }, }, { changes: []schema.Change{ &schema.RenameTable{ From: schema.NewTable("t1"), To: schema.NewTable("t2"), }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "RENAME TABLE `t1` TO `t2`", Reverse: "RENAME TABLE `t2` TO `t1`", }, }, }, }, { changes: []schema.Change{ &schema.RenameTable{ From: schema.NewTable("t1").SetSchema(schema.New("s1")), To: schema.NewTable("t2").SetSchema(schema.New("s2")), }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "RENAME TABLE `s1`.`t1` TO `s2`.`t2`", Reverse: "RENAME TABLE `s2`.`t2` TO `s1`.`t1`", }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1"). SetSchema(schema.New("s1")). AddColumns(schema.NewColumn("b")), Changes: []schema.Change{ &schema.RenameColumn{ From: schema.NewColumn("a"), To: schema.NewColumn("b"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `s1`.`t1` RENAME COLUMN `a` TO `b`", Reverse: "ALTER TABLE `s1`.`t1` RENAME COLUMN `b` TO `a`", }, }, }, }, { version: "5.6", changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1"). SetSchema(schema.New("s1")). AddColumns(schema.NewIntColumn("b", "int")), Changes: []schema.Change{ &schema.RenameColumn{ From: schema.NewIntColumn("a", "int"), To: schema.NewIntColumn("b", "int"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `s1`.`t1` CHANGE COLUMN `a` `b` int NOT NULL", Reverse: "ALTER TABLE `s1`.`t1` CHANGE COLUMN `b` `a` int NOT NULL", }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1"). SetSchema(schema.New("s1")). AddColumns(schema.NewIntColumn("b", "int")), Changes: []schema.Change{ &schema.RenameIndex{ From: schema.NewIndex("a"), To: schema.NewIndex("b"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `s1`.`t1` RENAME INDEX `a` TO `b`", Reverse: "ALTER TABLE `s1`.`t1` RENAME INDEX `b` TO `a`", }, }, }, }, // Empty qualifier. { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("t").SetSchema(schema.New("d")).AddColumns(schema.NewIntColumn("a", "int"))}, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { o.SchemaQualifier = new(string) }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "CREATE TABLE `t` (`a` int NOT NULL)", Reverse: "DROP TABLE `t`", }, }, }, }, // Custom qualifier. { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("t").SetSchema(schema.New("d")).AddColumns(schema.NewIntColumn("a", "int"))}, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { s := "other" o.SchemaQualifier = &s }, }, wantPlan: &migrate.Plan{ Reversible: true, Changes: []*migrate.Change{ { Cmd: "CREATE TABLE `other`.`t` (`a` int NOT NULL)", Reverse: "DROP TABLE `other`.`t`", }, }, }, }, // Empty qualifier in multi-schema mode should fail. { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("t1").SetSchema(schema.New("s1")).AddColumns(schema.NewIntColumn("a", "int"))}, &schema.AddTable{T: schema.NewTable("t2").SetSchema(schema.New("s2")).AddColumns(schema.NewIntColumn("a", "int"))}, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { o.SchemaQualifier = new(string) }, }, wantErr: true, }, } for i, tt := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { if tt.version == "" { tt.version = "8.0.16" } db, _, err := newMigrate(tt.version) require.NoError(t, err) plan, err := db.PlanChanges(context.Background(), "wantPlan", tt.changes, tt.options...) if tt.wantErr { require.Error(t, err, "expect plan to fail") return } require.NoError(t, err) require.NotNil(t, plan) require.Equal(t, tt.wantPlan.Reversible, plan.Reversible) require.Equal(t, tt.wantPlan.Transactional, plan.Transactional) require.Equal(t, len(tt.wantPlan.Changes), len(plan.Changes)) for i, c := range plan.Changes { require.Equal(t, tt.wantPlan.Changes[i].Cmd, c.Cmd) require.Equal(t, tt.wantPlan.Changes[i].Reverse, c.Reverse) } }) } } func newMigrate(version string) (migrate.PlanApplier, *mock, error) { db, m, err := sqlmock.New() if err != nil { return nil, nil, err } mk := &mock{m} mk.version(version) drv, err := Open(db) if err != nil { return nil, nil, err } return drv, mk, nil } atlas-0.7.2/sql/mysql/mysqlcheck/000077500000000000000000000000001431455511600167405ustar00rootroot00000000000000atlas-0.7.2/sql/mysql/mysqlcheck/mysqlcheck.go000066400000000000000000000105501431455511600214330ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysqlcheck import ( "fmt" "strconv" "strings" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/mysql" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlcheck/datadepend" "ariga.io/atlas/sql/sqlcheck/destructive" ) // codeImplicitUpdate is a MySQL specific code for reporting implicit update. var codeImplicitUpdate = sqlcheck.Code("MY101") func addNotNull(p *datadepend.ColumnPass) (diags []sqlcheck.Diagnostic, err error) { // Two types of reporting, implicit rows update and // changes that may cause the migration to fail. mightFail := func(tt string) { diags = append(diags, sqlcheck.Diagnostic{ Pos: p.Change.Stmt.Pos, Text: fmt.Sprintf( "Adding a non-nullable %q column %q will fail in case table %q is not empty", tt, p.Column.Name, p.Table.Name, ), }) } implicitUpdate := func(tt, v string) { diags = append(diags, sqlcheck.Diagnostic{ Code: codeImplicitUpdate, Pos: p.Change.Stmt.Pos, Text: fmt.Sprintf( "Adding a non-nullable %q column %q on table %q without a default value implicitly sets existing rows with %s", tt, p.Column.Name, p.Table.Name, v, ), }) } drv, ok := p.Dev.Driver.(*mysql.Driver) if !ok { return nil, fmt.Errorf("unexpected migrate driver %T", p.Dev.Driver) } switch ct := p.Column.Type.Type.(type) { case *mysql.BitType, *schema.BoolType, *schema.IntegerType, *schema.DecimalType, *schema.FloatType, *schema.BinaryType: tt, err := mysql.FormatType(p.Column.Type.Type) if err != nil { return nil, err } implicitUpdate(tt, "0") case *schema.StringType: switch ct.T { case mysql.TypeVarchar, mysql.TypeChar: implicitUpdate(ct.T, `""`) case mysql.TypeText, mysql.TypeTinyText, mysql.TypeMediumText, mysql.TypeLongText: // On MySQL, Existing rows are updated with ''. Skip it // as we cannot propose and detect multi-steps migration // (ALTER + UPDATE) at this stage. if drv.Maria() { implicitUpdate(ct.T, `""`) } } case *schema.EnumType: if len(ct.Values) == 0 { return nil, fmt.Errorf("unexpected empty values for enum column %q.%q", p.Table.Name, p.Column.Name) } implicitUpdate("enum", strconv.Quote(ct.Values[0])) case *mysql.SetType: implicitUpdate("set", `""`) case *schema.JSONType: // On MySQL, Existing rows are updated with 'null' JSON. Same as TEXT // columns, we cannot propose multi-steps migration (ALTER + UPDATE) // as it cannot be detected at this stage. if drv.Maria() { implicitUpdate(ct.T, `""`) } case *schema.TimeType: switch ct.T { case mysql.TypeDate, mysql.TypeDateTime: if drv.Maria() { implicitUpdate(ct.T, "00:00:00") } else { // The suggested solution is to add a DEFAULT clause // with valid value or set the column to nullable. mightFail(ct.T) } case mysql.TypeYear: implicitUpdate(ct.T, "0000") case mysql.TypeTime: implicitUpdate(ct.T, "00:00:00") case mysql.TypeTimestamp: v := "CURRENT_TIMESTAMP" switch { case drv.Maria(): // Maria has a special behavior for the first TIMESTAMP column. // See: https://mariadb.com/kb/en/timestamp/#automatic-values for i := 0; i < len(p.Table.Columns) && p.Table.Columns[i].Name != p.Column.Name; i++ { tt, err := mysql.FormatType(p.Table.Columns[i].Type.Type) if err != nil { return nil, err } if strings.HasPrefix(tt, mysql.TypeTimestamp) { v = "0000-00-00 00:00:00" break } } // Following MySQL 8.0.2, the explicit_defaults_for_timestamp // system variable is now enabled by default. case drv.GTE("8.0.2"): v = "0000-00-00 00:00:00" } implicitUpdate(ct.T, v) } case *schema.SpatialType: if drv.Maria() { implicitUpdate(ct.T, `""`) } else { // The suggested solution is to add the column as // null, update values and then set it to not-null. mightFail(ct.T) } } return } func init() { sqlcheck.Register(mysql.DriverName, func(r *schemahcl.Resource) ([]sqlcheck.Analyzer, error) { ds, err := destructive.New(r) if err != nil { return nil, err } dd, err := datadepend.New(r, datadepend.Handler{ AddNotNull: addNotNull, }) if err != nil { return nil, err } return []sqlcheck.Analyzer{ds, dd}, nil }) } atlas-0.7.2/sql/mysql/mysqlcheck/mysqlcheck_test.go000066400000000000000000000223241431455511600224740ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysqlcheck_test import ( "context" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/mysql" _ "ariga.io/atlas/sql/mysql/mysqlcheck" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlclient" "github.com/stretchr/testify/require" ) func TestDataDepend_MySQL_ImplicitUpdate(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{ Name: "mysql", Driver: &mysql.Driver{}, }, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewIntColumn("a", mysql.TypeInt), schema.NewIntColumn("b", mysql.TypeInt), schema.NewFloatColumn("c", mysql.TypeFloat), schema.NewStringColumn("d", mysql.TypeVarchar, schema.StringSize(10)), schema.NewEnumColumn("e", schema.EnumValues("foo", "bar")), schema.NewTimeColumn("f", mysql.TypeTimestamp), ), Changes: []schema.Change{ &schema.AddColumn{C: schema.NewIntColumn("b", mysql.TypeInt)}, &schema.AddColumn{C: schema.NewFloatColumn("c", mysql.TypeFloat)}, &schema.AddColumn{C: schema.NewStringColumn("d", mysql.TypeVarchar, schema.StringSize(10))}, &schema.AddColumn{C: schema.NewEnumColumn("e", schema.EnumValues("foo", "bar"))}, &schema.AddColumn{C: schema.NewTimeColumn("f", mysql.TypeTimestamp)}, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) azs, err := sqlcheck.AnalyzerFor(mysql.DriverName, nil) require.NoError(t, err) require.NoError(t, sqlcheck.Analyzers(azs).Analyze(context.Background(), pass)) require.Equal(t, report.Diagnostics[0].Text, `Adding a non-nullable "int" column "b" on table "users" without a default value implicitly sets existing rows with 0`) require.Equal(t, report.Diagnostics[1].Text, `Adding a non-nullable "float" column "c" on table "users" without a default value implicitly sets existing rows with 0`) require.Equal(t, report.Diagnostics[2].Text, `Adding a non-nullable "varchar" column "d" on table "users" without a default value implicitly sets existing rows with ""`) require.Equal(t, report.Diagnostics[3].Text, `Adding a non-nullable "enum" column "e" on table "users" without a default value implicitly sets existing rows with "foo"`) require.Equal(t, report.Diagnostics[4].Text, `Adding a non-nullable "timestamp" column "f" on table "users" without a default value implicitly sets existing rows with CURRENT_TIMESTAMP`) } func TestDataDepend_MySQL8_ImplicitUpdate(t *testing.T) { drv := &mysql.Driver{} drv.V = "8.0.19" var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{ Name: "mysql", Driver: drv, }, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewIntColumn("a", mysql.TypeInt), schema.NewTimeColumn("b", mysql.TypeTimestamp), ), Changes: []schema.Change{ &schema.AddColumn{C: schema.NewTimeColumn("b", mysql.TypeTimestamp)}, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) azs, err := sqlcheck.AnalyzerFor(mysql.DriverName, nil) require.NoError(t, err) require.NoError(t, sqlcheck.Analyzers(azs).Analyze(context.Background(), pass)) require.Equal(t, report.Diagnostics[0].Text, `Adding a non-nullable "timestamp" column "b" on table "users" without a default value implicitly sets existing rows with 0000-00-00 00:00:00`, "explicit_defaults_for_timestamp is enabled by default for versions >= 8.0.2", ) } func TestDataDepend_MySQL_MightFail(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{ Name: "mysql", Driver: &mysql.Driver{}, }, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewIntColumn("a", mysql.TypeInt), schema.NewTimeColumn("b", mysql.TypeDate), schema.NewTimeColumn("c", mysql.TypeDateTime), schema.NewSpatialColumn("d", mysql.TypePoint), ), Changes: []schema.Change{ &schema.AddColumn{C: schema.NewTimeColumn("b", mysql.TypeDate)}, &schema.AddColumn{C: schema.NewTimeColumn("c", mysql.TypeDateTime)}, &schema.AddColumn{C: schema.NewSpatialColumn("d", mysql.TypePoint)}, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) azs, err := sqlcheck.AnalyzerFor(mysql.DriverName, nil) require.NoError(t, err) require.NoError(t, sqlcheck.Analyzers(azs).Analyze(context.Background(), pass)) require.Equal(t, report.Diagnostics[0].Text, `Adding a non-nullable "date" column "b" will fail in case table "users" is not empty`) require.Equal(t, report.Diagnostics[1].Text, `Adding a non-nullable "datetime" column "c" will fail in case table "users" is not empty`) require.Equal(t, report.Diagnostics[2].Text, `Adding a non-nullable "point" column "d" will fail in case table "users" is not empty`) } func TestDataDepend_Maria_ImplicitUpdate(t *testing.T) { drv := &mysql.Driver{} drv.V = "10.7.1-MariaDB" var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{ Name: "mysql", Driver: drv, }, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewIntColumn("a", mysql.TypeInt), schema.NewIntColumn("b", mysql.TypeText), schema.NewJSONColumn("c", mysql.TypeJSON), schema.NewTimeColumn("d", mysql.TypeDate), schema.NewTimeColumn("e", mysql.TypeDateTime), schema.NewSpatialColumn("f", mysql.TypePoint), schema.NewTimeColumn("first_ts", mysql.TypeTimestamp), schema.NewTimeColumn("second_ts", mysql.TypeTimestamp), ), Changes: []schema.Change{ &schema.AddColumn{C: schema.NewStringColumn("b", mysql.TypeText)}, &schema.AddColumn{C: schema.NewJSONColumn("c", mysql.TypeJSON)}, &schema.AddColumn{C: schema.NewTimeColumn("d", mysql.TypeDate)}, &schema.AddColumn{C: schema.NewTimeColumn("e", mysql.TypeDateTime)}, &schema.AddColumn{C: schema.NewSpatialColumn("f", mysql.TypePoint)}, &schema.AddColumn{C: schema.NewTimeColumn("first_ts", mysql.TypeTimestamp)}, &schema.AddColumn{C: schema.NewTimeColumn("second_ts", mysql.TypeTimestamp)}, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) azs, err := sqlcheck.AnalyzerFor(mysql.DriverName, nil) require.NoError(t, err) require.NoError(t, sqlcheck.Analyzers(azs).Analyze(context.Background(), pass)) require.Equal(t, report.Diagnostics[0].Text, `Adding a non-nullable "text" column "b" on table "users" without a default value implicitly sets existing rows with ""`) require.Equal(t, report.Diagnostics[1].Text, `Adding a non-nullable "json" column "c" on table "users" without a default value implicitly sets existing rows with ""`) require.Equal(t, report.Diagnostics[2].Text, `Adding a non-nullable "date" column "d" on table "users" without a default value implicitly sets existing rows with 00:00:00`) require.Equal(t, report.Diagnostics[3].Text, `Adding a non-nullable "datetime" column "e" on table "users" without a default value implicitly sets existing rows with 00:00:00`) require.Equal(t, report.Diagnostics[4].Text, `Adding a non-nullable "point" column "f" on table "users" without a default value implicitly sets existing rows with ""`) require.Equal(t, report.Diagnostics[5].Text, `Adding a non-nullable "timestamp" column "first_ts" on table "users" without a default value implicitly sets existing rows with CURRENT_TIMESTAMP`) require.Equal(t, report.Diagnostics[6].Text, `Adding a non-nullable "timestamp" column "second_ts" on table "users" without a default value implicitly sets existing rows with 0000-00-00 00:00:00`) } type testFile struct { name string migrate.File } func (t testFile) Name() string { return t.name } atlas-0.7.2/sql/mysql/sqlspec.go000066400000000000000000000362321431455511600166040ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "errors" "fmt" "reflect" "strings" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/specutil" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlspec" "github.com/hashicorp/hcl/v2/hclparse" ) type doc struct { Tables []*sqlspec.Table `spec:"table"` Schemas []*sqlspec.Schema `spec:"schema"` } // evalSpec evaluates an Atlas DDL document into v using the input. func evalSpec(p *hclparse.Parser, v any, input map[string]string) error { var d doc if err := hclState.Eval(p, &d, input); err != nil { return err } switch v := v.(type) { case *schema.Realm: err := specutil.Scan(v, d.Schemas, d.Tables, convertTable) if err != nil { return fmt.Errorf("mysql: failed converting to *schema.Realm: %w", err) } for _, schemaSpec := range d.Schemas { schm, ok := v.Schema(schemaSpec.Name) if !ok { return fmt.Errorf("could not find schema: %q", schemaSpec.Name) } if err := convertCharset(schemaSpec, &schm.Attrs); err != nil { return err } } case *schema.Schema: if len(d.Schemas) != 1 { return fmt.Errorf("mysql: expecting document to contain a single schema, got %d", len(d.Schemas)) } var r schema.Realm if err := specutil.Scan(&r, d.Schemas, d.Tables, convertTable); err != nil { return err } if err := convertCharset(d.Schemas[0], &r.Schemas[0].Attrs); err != nil { return err } r.Schemas[0].Realm = nil *v = *r.Schemas[0] default: return fmt.Errorf("mysql: failed unmarshaling spec. %T is not supported", v) } return nil } // MarshalSpec marshals v into an Atlas DDL document using a schemahcl.Marshaler. func MarshalSpec(v any, marshaler schemahcl.Marshaler) ([]byte, error) { return specutil.Marshal(v, marshaler, schemaSpec) } var ( hclState = schemahcl.New( schemahcl.WithTypes(TypeRegistry.Specs()), schemahcl.WithScopedEnums("table.index.type", IndexTypeBTree, IndexTypeHash, IndexTypeFullText, IndexTypeSpatial), schemahcl.WithScopedEnums("table.column.as.type", stored, persistent, virtual), schemahcl.WithScopedEnums("table.foreign_key.on_update", specutil.ReferenceVars...), schemahcl.WithScopedEnums("table.foreign_key.on_delete", specutil.ReferenceVars...), ) // MarshalHCL marshals v into an Atlas HCL DDL document. MarshalHCL = schemahcl.MarshalerFunc(func(v any) ([]byte, error) { return MarshalSpec(v, hclState) }) // EvalHCL implements the schemahcl.Evaluator interface. EvalHCL = schemahcl.EvalFunc(evalSpec) // EvalHCLBytes is a helper that evaluates an HCL document from a byte slice instead // of from an hclparse.Parser instance. EvalHCLBytes = specutil.HCLBytesFunc(EvalHCL) ) // convertTable converts a sqlspec.Table to a schema.Table. Table conversion is done without converting // ForeignKeySpecs into ForeignKeys, as the target tables do not necessarily exist in the schema // at this point. Instead, the linking is done by the convertSchema function. func convertTable(spec *sqlspec.Table, parent *schema.Schema) (*schema.Table, error) { t, err := specutil.Table(spec, parent, convertColumn, specutil.PrimaryKey, convertIndex, convertCheck) if err != nil { return nil, err } if err := convertCharset(spec, &t.Attrs); err != nil { return nil, err } // MySQL allows setting the initial AUTO_INCREMENT value // on the table definition. if attr, ok := spec.Attr("auto_increment"); ok { v, err := attr.Int64() if err != nil { return nil, err } t.AddAttrs(&AutoIncrement{V: v}) } return t, err } // convertIndex converts a sqlspec.Index into a schema.Index. func convertIndex(spec *sqlspec.Index, parent *schema.Table) (*schema.Index, error) { idx, err := specutil.Index(spec, parent, convertPart) if err != nil { return nil, err } if attr, ok := spec.Attr("type"); ok { t, err := attr.String() if err != nil { return nil, err } idx.AddAttrs(&IndexType{T: t}) } return idx, nil } func convertPart(spec *sqlspec.IndexPart, part *schema.IndexPart) error { if attr, ok := spec.Attr("prefix"); ok { if part.X != nil { return errors.New("attribute 'on.prefix' cannot be used in functional part") } p, err := attr.Int() if err != nil { return err } part.AddAttrs(&SubPart{Len: p}) } return nil } // convertCheck converts a sqlspec.Check into a schema.Check. func convertCheck(spec *sqlspec.Check) (*schema.Check, error) { c, err := specutil.Check(spec) if err != nil { return nil, err } if attr, ok := spec.Attr("enforced"); ok { b, err := attr.Bool() if err != nil { return nil, err } c.AddAttrs(&Enforced{V: b}) } return c, nil } // convertColumn converts a sqlspec.Column into a schema.Column. func convertColumn(spec *sqlspec.Column, _ *schema.Table) (*schema.Column, error) { c, err := specutil.Column(spec, convertColumnType) if err != nil { return nil, err } if err := convertCharset(spec, &c.Attrs); err != nil { return nil, err } if attr, ok := spec.Attr("on_update"); ok { exp, ok := attr.V.(*schemahcl.RawExpr) if !ok { return nil, fmt.Errorf(`unexpected type %T for atrribute "on_update"`, attr.V) } c.AddAttrs(&OnUpdate{A: exp.X}) } if attr, ok := spec.Attr("auto_increment"); ok { b, err := attr.Bool() if err != nil { return nil, err } if b { c.AddAttrs(&AutoIncrement{}) } } if err := specutil.ConvertGenExpr(spec.Remain(), c, storedOrVirtual); err != nil { return nil, err } return c, err } // convertColumnType converts a sqlspec.Column into a concrete MySQL schema.Type. func convertColumnType(spec *sqlspec.Column) (schema.Type, error) { return TypeRegistry.Type(spec.Type, spec.Extra.Attrs) } // schemaSpec converts from a concrete MySQL schema to Atlas specification. func schemaSpec(s *schema.Schema) (*sqlspec.Schema, []*sqlspec.Table, error) { sc, t, err := specutil.FromSchema(s, tableSpec) if err != nil { return nil, nil, err } if c, ok := hasCharset(s.Attrs, nil); ok { sc.Extra.Attrs = append(sc.Extra.Attrs, specutil.StrAttr("charset", c)) } if c, ok := hasCollate(s.Attrs, nil); ok { sc.Extra.Attrs = append(sc.Extra.Attrs, specutil.StrAttr("collate", c)) } return sc, t, nil } // tableSpec converts from a concrete MySQL sqlspec.Table to a schema.Table. func tableSpec(t *schema.Table) (*sqlspec.Table, error) { ts, err := specutil.FromTable( t, columnSpec, specutil.FromPrimaryKey, indexSpec, specutil.FromForeignKey, checkSpec, ) if err != nil { return nil, err } if c, ok := hasCharset(t.Attrs, t.Schema.Attrs); ok { ts.Extra.Attrs = append(ts.Extra.Attrs, specutil.StrAttr("charset", c)) } if c, ok := hasCollate(t.Attrs, t.Schema.Attrs); ok { ts.Extra.Attrs = append(ts.Extra.Attrs, specutil.StrAttr("collate", c)) } return ts, nil } func indexSpec(idx *schema.Index) (*sqlspec.Index, error) { spec, err := specutil.FromIndex(idx, partAttr) if err != nil { return nil, err } // Avoid printing the index type if it is the default. if i := (IndexType{}); sqlx.Has(idx.Attrs, &i) && i.T != IndexTypeBTree { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.VarAttr("type", strings.ToUpper(i.T))) } return spec, nil } func partAttr(part *schema.IndexPart, spec *sqlspec.IndexPart) { if p := (SubPart{}); sqlx.Has(part.Attrs, &p) && p.Len > 0 { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.IntAttr("prefix", p.Len)) } } // columnSpec converts from a concrete MySQL schema.Column into a sqlspec.Column. func columnSpec(c *schema.Column, t *schema.Table) (*sqlspec.Column, error) { spec, err := specutil.FromColumn(c, columnTypeSpec) if err != nil { return nil, err } if c, ok := hasCharset(c.Attrs, t.Attrs); ok { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.StrAttr("charset", c)) } if c, ok := hasCollate(c.Attrs, t.Attrs); ok { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.StrAttr("collate", c)) } if o := (OnUpdate{}); sqlx.Has(c.Attrs, &o) { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.RawAttr("on_update", o.A)) } if sqlx.Has(c.Attrs, &AutoIncrement{}) { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.BoolAttr("auto_increment", true)) } if x := (schema.GeneratedExpr{}); sqlx.Has(c.Attrs, &x) { spec.Extra.Children = append(spec.Extra.Children, specutil.FromGenExpr(x, storedOrVirtual)) } return spec, nil } // storedOrVirtual returns a STORED or VIRTUAL // generated type option based on the given string. func storedOrVirtual(s string) string { switch s = strings.ToUpper(s); s { // The default is VIRTUAL if no type is specified. case "": return virtual // In MariaDB, PERSISTENT is synonyms for STORED. case persistent: return stored } return s } // checkSpec converts from a concrete MySQL schema.Check into a sqlspec.Check. func checkSpec(s *schema.Check) *sqlspec.Check { c := specutil.FromCheck(s) if e := (Enforced{}); sqlx.Has(s.Attrs, &e) { c.Extra.Attrs = append(c.Extra.Attrs, specutil.BoolAttr("enforced", true)) } return c } // columnTypeSpec converts from a concrete MySQL schema.Type into sqlspec.Column Type. func columnTypeSpec(t schema.Type) (*sqlspec.Column, error) { st, err := TypeRegistry.Convert(t) if err != nil { return nil, err } c := &sqlspec.Column{Type: st} for _, attr := range st.Attrs { // TODO(rotemtam): infer this from the TypeSpec if attr.K == "unsigned" { c.Extra.Attrs = append(c.Extra.Attrs, attr) } } return c, nil } // convertCharset converts spec charset/collation // attributes to schema element attributes. func convertCharset(spec specutil.Attrer, attrs *[]schema.Attr) error { if attr, ok := spec.Attr("charset"); ok { s, err := attr.String() if err != nil { return err } *attrs = append(*attrs, &schema.Charset{V: s}) } // For backwards compatibility, accepts both "collate" and "collation". attr, ok := spec.Attr("collate") if !ok { attr, ok = spec.Attr("collation") } if ok { s, err := attr.String() if err != nil { return err } *attrs = append(*attrs, &schema.Collation{V: s}) } return nil } // hasCharset reports if the attribute contains the "charset" attribute, // and it needs to be defined explicitly on the schema. This is true, in // case the element charset is different from its parent charset. func hasCharset(attr []schema.Attr, parent []schema.Attr) (string, bool) { var c, p schema.Charset if sqlx.Has(attr, &c) && (parent == nil || sqlx.Has(parent, &p) && c.V != p.V) { return c.V, true } return "", false } // hasCollate reports if the attribute contains the "collation"/"collate" attribute, // and it needs to be defined explicitly on the schema. This is true, in // case the element collation is different from its parent collation. func hasCollate(attr []schema.Attr, parent []schema.Attr) (string, bool) { var c, p schema.Collation if sqlx.Has(attr, &c) && (parent == nil || sqlx.Has(parent, &p) && c.V != p.V) { return c.V, true } return "", false } // TypeRegistry contains the supported TypeSpecs for the mysql driver. var TypeRegistry = schemahcl.NewRegistry( schemahcl.WithFormatter(FormatType), schemahcl.WithParser(ParseType), schemahcl.WithSpecs( &schemahcl.TypeSpec{ Name: TypeEnum, T: TypeEnum, Attributes: []*schemahcl.TypeAttr{ {Name: "values", Kind: reflect.Slice, Required: true}, }, RType: reflect.TypeOf(schema.EnumType{}), }, &schemahcl.TypeSpec{ Name: TypeSet, T: TypeSet, Attributes: []*schemahcl.TypeAttr{ {Name: "values", Kind: reflect.Slice, Required: true}, }, RType: reflect.TypeOf(SetType{}), }, schemahcl.NewTypeSpec(TypeBool), schemahcl.NewTypeSpec(TypeBoolean), schemahcl.NewTypeSpec(TypeBit, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeInt, schemahcl.WithAttributes(unsignedTypeAttr(), schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeTinyInt, schemahcl.WithAttributes(unsignedTypeAttr(), schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeSmallInt, schemahcl.WithAttributes(unsignedTypeAttr(), schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeMediumInt, schemahcl.WithAttributes(unsignedTypeAttr(), schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeBigInt, schemahcl.WithAttributes(unsignedTypeAttr(), schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeDecimal, schemahcl.WithAttributes(unsignedTypeAttr(), &schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeNumeric, schemahcl.WithAttributes(unsignedTypeAttr(), &schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeFloat, schemahcl.WithAttributes(unsignedTypeAttr(), &schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeDouble, schemahcl.WithAttributes(unsignedTypeAttr(), &schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeReal, schemahcl.WithAttributes(unsignedTypeAttr(), &schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeTimestamp, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeDate), schemahcl.NewTypeSpec(TypeTime, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeDateTime, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeYear, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeVarchar, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(true))), schemahcl.NewTypeSpec(TypeChar, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeVarBinary, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(true))), schemahcl.NewTypeSpec(TypeBinary, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeBlob, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeTinyBlob), schemahcl.NewTypeSpec(TypeMediumBlob), schemahcl.NewTypeSpec(TypeLongBlob), schemahcl.NewTypeSpec(TypeJSON), schemahcl.NewTypeSpec(TypeText, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeTinyText), schemahcl.NewTypeSpec(TypeMediumText), schemahcl.NewTypeSpec(TypeLongText), schemahcl.NewTypeSpec(TypeGeometry), schemahcl.NewTypeSpec(TypePoint), schemahcl.NewTypeSpec(TypeMultiPoint), schemahcl.NewTypeSpec(TypeLineString), schemahcl.NewTypeSpec(TypeMultiLineString), schemahcl.NewTypeSpec(TypePolygon), schemahcl.NewTypeSpec(TypeMultiPolygon), schemahcl.NewTypeSpec(TypeGeometryCollection), ), ) func unsignedTypeAttr() *schemahcl.TypeAttr { return &schemahcl.TypeAttr{ Name: "unsigned", Kind: reflect.Bool, } } atlas-0.7.2/sql/mysql/sqlspec_test.go000066400000000000000000000722071431455511600176450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "fmt" "testing" "ariga.io/atlas/sql/internal/spectest" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestSQLSpec(t *testing.T) { f := ` schema "schema" { } table "table" { schema = schema.schema column "col" { type = int comment = "column comment" } column "age" { type = int } column "price1" { type = int auto_increment = false } column "price2" { type = int auto_increment = true } column "account_name" { type = varchar(32) } column "created_at" { type = datetime(4) default = sql("now(4)") } column "updated_at" { type = timestamp(6) default = sql("current_timestamp(6)") on_update = sql("current_timestamp(6)") } primary_key { columns = [table.table.column.col] } index "index" { unique = true columns = [ table.table.column.col, table.table.column.age, ] comment = "index comment" } foreign_key "accounts" { columns = [ table.table.column.account_name, ] ref_columns = [ table.accounts.column.name, ] on_delete = SET_NULL } check "positive price" { expr = "price1 > 0" } check { expr = "price1 <> price2" enforced = true } check { expr = "price2 <> price1" enforced = false } comment = "table comment" auto_increment = 1000 } table "accounts" { schema = schema.schema column "name" { type = varchar(32) } column "unsigned_float" { type = float(10) unsigned = true } column "unsigned_decimal" { type = decimal(10,2) unsigned = true } primary_key { columns = [table.accounts.column.name] } } ` var s schema.Schema err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) exp := &schema.Schema{ Name: "schema", } exp.Tables = []*schema.Table{ { Name: "table", Schema: exp, Columns: []*schema.Column{ { Name: "col", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: TypeInt, }, }, Attrs: []schema.Attr{ &schema.Comment{Text: "column comment"}, }, }, { Name: "age", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: TypeInt, }, }, }, { Name: "price1", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: TypeInt, }, }, }, { Name: "price2", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: TypeInt, }, }, Attrs: []schema.Attr{&AutoIncrement{}}, }, { Name: "account_name", Type: &schema.ColumnType{ Type: &schema.StringType{ T: TypeVarchar, Size: 32, }, }, }, { Name: "created_at", Type: &schema.ColumnType{ Type: typeTime(TypeDateTime, 4), }, Default: &schema.RawExpr{X: "now(4)"}, }, { Name: "updated_at", Type: &schema.ColumnType{ Type: typeTime(TypeTimestamp, 6), }, Default: &schema.RawExpr{X: "current_timestamp(6)"}, Attrs: []schema.Attr{&OnUpdate{A: "current_timestamp(6)"}}, }, }, Attrs: []schema.Attr{ &schema.Check{ Name: "positive price", Expr: "price1 > 0", }, &schema.Check{ Expr: "price1 <> price2", Attrs: []schema.Attr{&Enforced{V: true}}, }, &schema.Check{ Expr: "price2 <> price1", Attrs: []schema.Attr{&Enforced{V: false}}, }, &schema.Comment{Text: "table comment"}, &AutoIncrement{V: 1000}, }, }, { Name: "accounts", Schema: exp, Columns: []*schema.Column{ { Name: "name", Type: &schema.ColumnType{ Type: &schema.StringType{ T: TypeVarchar, Size: 32, }, }, }, { Name: "unsigned_float", Type: &schema.ColumnType{ Type: &schema.FloatType{ T: TypeFloat, Precision: 10, Unsigned: true, }, }, }, { Name: "unsigned_decimal", Type: &schema.ColumnType{ Type: &schema.DecimalType{ T: TypeDecimal, Precision: 10, Scale: 2, Unsigned: true, }, }, }, }, }, } exp.Tables[0].PrimaryKey = &schema.Index{ Table: exp.Tables[0], Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[0].Columns[0]}, }, } exp.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: exp.Tables[0], Unique: true, Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[0].Columns[0]}, {SeqNo: 1, C: exp.Tables[0].Columns[1]}, }, Attrs: []schema.Attr{ &schema.Comment{Text: "index comment"}, }, }, } exp.Tables[0].ForeignKeys = []*schema.ForeignKey{ { Symbol: "accounts", Table: exp.Tables[0], Columns: []*schema.Column{exp.Tables[0].Columns[4]}, RefTable: exp.Tables[1], RefColumns: []*schema.Column{exp.Tables[1].Columns[0]}, OnDelete: schema.SetNull, }, } exp.Tables[1].PrimaryKey = &schema.Index{ Table: exp.Tables[1], Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[1].Columns[0]}, }, } require.EqualValues(t, exp, &s) } func TestMarshalSpec_Charset(t *testing.T) { s := &schema.Schema{ Name: "test", Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}, }, Tables: []*schema.Table{ { Name: "users", Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}, }, Columns: []*schema.Column{ { Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "b", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}, }, }, }, }, { Name: "posts", Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, Columns: []*schema.Column{ { Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, }, }, { Name: "b", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}, }, }, }, }, }, } s.Tables[0].Schema = s s.Tables[1].Schema = s buf, err := MarshalSpec(s, hclState) require.NoError(t, err) // Charset and collate that are identical to their parent elements // should not be printed as they are inherited by default from it. const expected = `table "users" { schema = schema.test column "a" { null = false type = text charset = "latin1" collate = "latin1_swedish_ci" } column "b" { null = false type = text } } table "posts" { schema = schema.test charset = "latin1" collate = "latin1_swedish_ci" column "a" { null = false type = text } column "b" { null = false type = text charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } } schema "test" { charset = "utf8mb4" collate = "utf8mb4_0900_ai_ci" } ` require.EqualValues(t, expected, string(buf)) var ( s2 schema.Schema latin = []schema.Attr{ &schema.Charset{V: "latin1"}, &schema.Collation{V: "latin1_swedish_ci"}, } utf8mb4 = []schema.Attr{ &schema.Charset{V: "utf8mb4"}, &schema.Collation{V: "utf8mb4_0900_ai_ci"}, } ) require.NoError(t, EvalHCLBytes(buf, &s2, nil)) require.Equal(t, utf8mb4, s2.Attrs) posts, ok := s2.Table("posts") require.True(t, ok) require.Equal(t, latin, posts.Attrs) users, ok := s2.Table("users") require.True(t, ok) require.Empty(t, users.Attrs) a, ok := users.Column("a") require.True(t, ok) require.Equal(t, latin, a.Attrs) b, ok := posts.Column("b") require.True(t, ok) require.Equal(t, utf8mb4, b.Attrs) } func TestMarshalSpec_Comment(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Attrs: []schema.Attr{ &schema.Comment{Text: "table comment"}, }, Columns: []*schema.Column{ { Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{ &schema.Comment{Text: "column comment"}, }, }, }, }, { Name: "posts", Columns: []*schema.Column{ { Name: "a", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, }, }, }, }, } s.Tables[0].Schema = s s.Tables[1].Schema = s s.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: s.Tables[0], Unique: true, Parts: []*schema.IndexPart{{SeqNo: 0, C: s.Tables[0].Columns[0]}}, Attrs: []schema.Attr{ &schema.Comment{Text: "index comment"}, }, }, } buf, err := MarshalSpec(s, hclState) require.NoError(t, err) // We expect a zero value comment to not be present in the marshaled HCL. const expected = `table "users" { schema = schema.test comment = "table comment" column "a" { null = false type = text comment = "column comment" } index "index" { unique = true columns = [column.a] comment = "index comment" } } table "posts" { schema = schema.test column "a" { null = false type = text } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_AutoIncrement(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{ &AutoIncrement{}, }, }, }, }, }, } s.Tables[0].Schema = s buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "id" { null = false type = bigint auto_increment = true } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_Check(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("products"). AddColumns( schema.NewIntColumn("price1", TypeInt), schema.NewIntColumn("price2", TypeInt), ). AddChecks( schema.NewCheck().SetName("price1 positive").SetExpr("price1 > 0"), schema.NewCheck().SetExpr("price1 <> price2").AddAttrs(&Enforced{}), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "products" { schema = schema.test column "price1" { null = false type = int } column "price2" { null = false type = int } check "price1 positive" { expr = "price1 > 0" } check { expr = "price1 <> price2" enforced = true } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestUnmarshalSpec_IndexParts(t *testing.T) { var ( s schema.Schema f = ` schema "test" {} table "users" { schema = schema.test column "name" { type = text } index "idx" { on { column = table.users.column.name desc = true prefix = 10 } on { expr = "lower(name)" } } } ` ) err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) c := schema.NewStringColumn("name", "text") exp := schema.New("test"). AddTables( schema.NewTable("users"). AddColumns(c). AddIndexes( schema.NewIndex("idx"). AddParts( schema.NewColumnPart(c).SetDesc(true).AddAttrs(&SubPart{Len: 10}), schema.NewExprPart(&schema.RawExpr{X: "lower(name)"}), ), ), ) exp.Tables[0].Columns[0].Indexes = nil require.EqualValues(t, exp, &s) } func TestMarshalSpec_IndexParts(t *testing.T) { c := schema.NewStringColumn("name", "text") c2 := schema.NewStringColumn("Full Name", "text") s := schema.New("test"). AddTables( schema.NewTable("users"). AddColumns(c, c2). AddIndexes( schema.NewIndex("idx"). AddParts( schema.NewColumnPart(c).SetDesc(true).AddAttrs(&SubPart{Len: 10}), schema.NewExprPart(&schema.RawExpr{X: "lower(name)"}), ), schema.NewIndex("idx2"). AddParts( schema.NewColumnPart(c2).SetDesc(true).AddAttrs(&SubPart{Len: 10}), ), schema.NewIndex("idx3"). AddParts(schema.NewColumnPart(c2)), ), ) buf, err := MarshalHCL(s) require.NoError(t, err) exp := `table "users" { schema = schema.test column "name" { null = false type = text } column "Full Name" { null = false type = text } index "idx" { on { desc = true column = column.name prefix = 10 } on { expr = "lower(name)" } } index "idx2" { on { desc = true column = column["Full Name"] prefix = 10 } } index "idx3" { columns = [column["Full Name"]] } } schema "test" { } ` require.EqualValues(t, exp, string(buf)) } func TestMarshalSpec_TimePrecision(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("times"). AddColumns( schema.NewTimeColumn("tTimeDef", TypeTime), schema.NewTimeColumn("tTime", TypeTime, schema.TimePrecision(1)), schema.NewTimeColumn("tDatetime", TypeDateTime, schema.TimePrecision(2)), schema.NewTimeColumn("tTimestamp", TypeTimestamp, schema.TimePrecision(3)). SetDefault(&schema.RawExpr{X: "current_timestamp(3)"}). AddAttrs(&OnUpdate{A: "current_timestamp(3)"}), schema.NewTimeColumn("tDate", TypeDate), schema.NewTimeColumn("tYear", TypeYear, schema.TimePrecision(2)), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "times" { schema = schema.test column "tTimeDef" { null = false type = time } column "tTime" { null = false type = time(1) } column "tDatetime" { null = false type = datetime(2) } column "tTimestamp" { null = false type = timestamp(3) default = sql("current_timestamp(3)") on_update = sql("current_timestamp(3)") } column "tDate" { null = false type = date } column "tYear" { null = false type = year(2) } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_GeneratedColumn(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("users"). AddColumns( schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c1 * 2"}), schema.NewIntColumn("c3", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c2 * c3", Type: "VIRTUAL"}), schema.NewIntColumn("c4", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c3 * c4", Type: "STORED"}), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "c1" { null = false type = int } column "c2" { null = false type = int as { expr = "c1 * 2" type = VIRTUAL } } column "c3" { null = false type = int as { expr = "c2 * c3" type = VIRTUAL } } column "c4" { null = false type = int as { expr = "c3 * c4" type = STORED } } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestUnmarshalSpec_GeneratedColumns(t *testing.T) { var ( s schema.Schema f = ` schema "test" {} table "users" { schema = schema.test column "c1" { type = int } column "c2" { type = int as = "c1 * 2" } column "c3" { type = int as { expr = "c2 * 2" } } column "c4" { type = int as { expr = "c3 * 2" type = STORED } } } ` ) err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) exp := schema.New("test"). AddTables( schema.NewTable("users"). AddColumns( schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c1 * 2", Type: "VIRTUAL"}), schema.NewIntColumn("c3", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c2 * 2", Type: "VIRTUAL"}), schema.NewIntColumn("c4", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c3 * 2", Type: "STORED"}), ), ) require.EqualValues(t, exp, &s) } func TestMarshalSpec_FloatUnsigned(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("test"). AddColumns( schema.NewFloatColumn( "float_col", TypeFloat, schema.FloatPrecision(10), schema.FloatUnsigned(true), ), schema.NewDecimalColumn( "decimal_col", TypeDecimal, schema.DecimalPrecision(10), schema.DecimalScale(2), schema.DecimalUnsigned(true), ), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "test" { schema = schema.test column "float_col" { null = false type = float(10) unsigned = true } column "decimal_col" { null = false type = decimal(10,2) unsigned = true } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestTypes(t *testing.T) { p := func(i int) *int { return &i } tests := []struct { typeExpr string extraAttr string expected schema.Type }{ { typeExpr: "varchar(255)", expected: &schema.StringType{T: TypeVarchar, Size: 255}, }, { typeExpr: "char(255)", expected: &schema.StringType{T: TypeChar, Size: 255}, }, { typeExpr: `sql("custom")`, expected: &schema.UnsupportedType{T: "custom"}, }, { typeExpr: "binary(255)", expected: &schema.BinaryType{T: TypeBinary, Size: p(255)}, }, { typeExpr: "varbinary(255)", expected: &schema.BinaryType{T: TypeVarBinary, Size: p(255)}, }, { typeExpr: "int", expected: &schema.IntegerType{T: TypeInt}, }, { typeExpr: "int", extraAttr: "unsigned=true", expected: &schema.IntegerType{T: TypeInt, Unsigned: true}, }, { typeExpr: "int", expected: &schema.IntegerType{T: TypeInt}, }, { typeExpr: "bigint", expected: &schema.IntegerType{T: TypeBigInt}, }, { typeExpr: "bigint", extraAttr: "unsigned=true", expected: &schema.IntegerType{T: TypeBigInt, Unsigned: true}, }, { typeExpr: "tinyint", expected: &schema.IntegerType{T: TypeTinyInt}, }, { typeExpr: "tinyint", extraAttr: "unsigned=true", expected: &schema.IntegerType{T: TypeTinyInt, Unsigned: true}, }, { typeExpr: "smallint", expected: &schema.IntegerType{T: TypeSmallInt}, }, { typeExpr: "smallint", extraAttr: "unsigned=true", expected: &schema.IntegerType{T: TypeSmallInt, Unsigned: true}, }, { typeExpr: "mediumint", expected: &schema.IntegerType{T: TypeMediumInt}, }, { typeExpr: "mediumint", extraAttr: "unsigned=true", expected: &schema.IntegerType{T: TypeMediumInt, Unsigned: true}, }, { typeExpr: "tinytext", expected: &schema.StringType{T: TypeTinyText}, }, { typeExpr: "mediumtext", expected: &schema.StringType{T: TypeMediumText}, }, { typeExpr: "longtext", expected: &schema.StringType{T: TypeLongText}, }, { typeExpr: "text", expected: &schema.StringType{T: TypeText}, }, { typeExpr: `enum("on","off")`, expected: &schema.EnumType{T: TypeEnum, Values: []string{"on", "off"}}, }, { typeExpr: "bit", expected: &BitType{T: TypeBit}, }, { typeExpr: "bit(10)", expected: &BitType{T: TypeBit, Size: 10}, }, { typeExpr: "int(10)", expected: &schema.IntegerType{T: TypeInt}, }, { typeExpr: "tinyint(10)", expected: &schema.IntegerType{T: TypeTinyInt}, }, { typeExpr: "smallint(10)", expected: &schema.IntegerType{T: TypeSmallInt}, }, { typeExpr: "mediumint(10)", expected: &schema.IntegerType{T: TypeMediumInt}, }, { typeExpr: "bigint(10)", expected: &schema.IntegerType{T: TypeBigInt}, }, { typeExpr: "decimal", expected: &schema.DecimalType{T: TypeDecimal}, }, { typeExpr: "decimal(10)", expected: &schema.DecimalType{T: TypeDecimal, Precision: 10}, }, { typeExpr: "decimal(10,2)", expected: &schema.DecimalType{T: TypeDecimal, Precision: 10, Scale: 2}, }, { typeExpr: "decimal(10,2)", extraAttr: "unsigned=true", expected: &schema.DecimalType{T: TypeDecimal, Precision: 10, Scale: 2, Unsigned: true}, }, { typeExpr: "numeric", expected: &schema.DecimalType{T: TypeNumeric}, }, { typeExpr: "numeric", extraAttr: "unsigned=true", expected: &schema.DecimalType{T: TypeNumeric, Unsigned: true}, }, { typeExpr: "numeric(10)", expected: &schema.DecimalType{T: TypeNumeric, Precision: 10}, }, { typeExpr: "numeric(10,2)", expected: &schema.DecimalType{T: TypeNumeric, Precision: 10, Scale: 2}, }, { typeExpr: "float(10,0)", expected: &schema.FloatType{T: TypeFloat, Precision: 10}, }, { typeExpr: "float(10)", extraAttr: "unsigned=true", expected: &schema.FloatType{T: TypeFloat, Precision: 10, Unsigned: true}, }, { typeExpr: "double(10,0)", expected: &schema.FloatType{T: TypeDouble, Precision: 10}, }, { typeExpr: "real", expected: &schema.FloatType{T: TypeReal}, }, { typeExpr: "real", extraAttr: "unsigned=true", expected: &schema.FloatType{T: TypeReal, Unsigned: true}, }, { typeExpr: "timestamp", expected: &schema.TimeType{T: TypeTimestamp}, }, { typeExpr: "timestamp(6)", expected: typeTime(TypeTimestamp, 6), }, { typeExpr: "date", expected: &schema.TimeType{T: TypeDate}, }, { typeExpr: "time", expected: &schema.TimeType{T: TypeTime}, }, { typeExpr: "time(6)", expected: typeTime(TypeTime, 6), }, { typeExpr: "datetime", expected: &schema.TimeType{T: TypeDateTime}, }, { typeExpr: "datetime(6)", expected: typeTime(TypeDateTime, 6), }, { typeExpr: "year", expected: &schema.TimeType{T: TypeYear}, }, { typeExpr: "year(2)", expected: typeTime(TypeYear, 2), }, { typeExpr: "varchar(10)", expected: &schema.StringType{T: TypeVarchar, Size: 10}, }, { typeExpr: "char(25)", expected: &schema.StringType{T: TypeChar, Size: 25}, }, { typeExpr: "varbinary(30)", expected: &schema.BinaryType{T: TypeVarBinary, Size: p(30)}, }, { typeExpr: "binary", expected: &schema.BinaryType{T: TypeBinary}, }, { typeExpr: "binary(5)", expected: &schema.BinaryType{T: TypeBinary, Size: p(5)}, }, { typeExpr: "blob(5)", expected: &schema.BinaryType{T: TypeBlob}, }, { typeExpr: "tinyblob", expected: &schema.BinaryType{T: TypeTinyBlob}, }, { typeExpr: "mediumblob", expected: &schema.BinaryType{T: TypeMediumBlob}, }, { typeExpr: "longblob", expected: &schema.BinaryType{T: TypeLongBlob}, }, { typeExpr: "json", expected: &schema.JSONType{T: TypeJSON}, }, { typeExpr: "text(13)", expected: &schema.StringType{T: TypeText}, }, { typeExpr: "tinytext", expected: &schema.StringType{T: TypeTinyText}, }, { typeExpr: "mediumtext", expected: &schema.StringType{T: TypeMediumText}, }, { typeExpr: "longtext", expected: &schema.StringType{T: TypeLongText}, }, { typeExpr: `set("a","b")`, expected: &SetType{Values: []string{"a", "b"}}, }, { typeExpr: "geometry", expected: &schema.SpatialType{T: TypeGeometry}, }, { typeExpr: "point", expected: &schema.SpatialType{T: TypePoint}, }, { typeExpr: "multipoint", expected: &schema.SpatialType{T: TypeMultiPoint}, }, { typeExpr: "linestring", expected: &schema.SpatialType{T: TypeLineString}, }, { typeExpr: "multilinestring", expected: &schema.SpatialType{T: TypeMultiLineString}, }, { typeExpr: "polygon", expected: &schema.SpatialType{T: TypePolygon}, }, { typeExpr: "multipolygon", expected: &schema.SpatialType{T: TypeMultiPolygon}, }, { typeExpr: "geometrycollection", expected: &schema.SpatialType{T: TypeGeometryCollection}, }, { typeExpr: "tinyint(1)", expected: &schema.BoolType{T: TypeBool}, }, { typeExpr: "bool", expected: &schema.BoolType{T: TypeBool}, }, { typeExpr: "boolean", expected: &schema.BoolType{T: TypeBool}, }, } for _, tt := range tests { t.Run(tt.typeExpr, func(t *testing.T) { doc := fmt.Sprintf(`table "test" { schema = schema.test column "test" { null = false type = %s%s } } schema "test" { } `, tt.typeExpr, lineIfSet(tt.extraAttr)) var test schema.Schema err := EvalHCLBytes([]byte(doc), &test, nil) require.NoError(t, err) colspec := test.Tables[0].Columns[0] require.EqualValues(t, tt.expected, colspec.Type.Type) spec, err := MarshalHCL(&test) require.NoError(t, err) var after schema.Schema err = EvalHCLBytes(spec, &after, nil) require.NoError(t, err) require.EqualValues(t, tt.expected, after.Tables[0].Columns[0].Type.Type) }) } } func TestInputVars(t *testing.T) { spectest.TestInputVars(t, EvalHCL) } func TestParseType_Decimal(t *testing.T) { for _, tt := range []struct { input string wantT *schema.DecimalType wantErr bool }{ { input: "decimal", wantT: &schema.DecimalType{T: TypeDecimal}, }, { input: "decimal unsigned", wantT: &schema.DecimalType{T: TypeDecimal, Unsigned: true}, }, { input: "decimal(10)", wantT: &schema.DecimalType{T: TypeDecimal, Precision: 10}, }, { input: "decimal(10) unsigned", wantT: &schema.DecimalType{T: TypeDecimal, Precision: 10, Unsigned: true}, }, { input: "decimal(10,2)", wantT: &schema.DecimalType{T: TypeDecimal, Precision: 10, Scale: 2}, }, { input: "decimal(10, 2) unsigned", wantT: &schema.DecimalType{T: TypeDecimal, Precision: 10, Scale: 2, Unsigned: true}, }, } { d, err := ParseType(tt.input) require.Equal(t, tt.wantErr, err != nil) require.Equal(t, tt.wantT, d) } } func typeTime(t string, p int) schema.Type { return &schema.TimeType{T: t, Precision: &p} } func lineIfSet(s string) string { if s != "" { return "\n" + s } return s } func TestUnmarshalSpec(t *testing.T) { s := []byte(` schema "s1" {} schema "s2" {} table "s1" "t1" { schema = schema.s1 column "id" { type = int } } table "s2" "t1" { schema = schema.s2 column "id" { type = int } } table "s2" "t2" { schema = schema.s2 column "oid" { type = int } foreign_key "fk" { columns = [column.oid] ref_columns = [table.s2.t1.column.id] } } `) var ( r schema.Realm expected = schema.NewRealm( schema.New("s1").AddTables(schema.NewTable("t1").AddColumns(schema.NewIntColumn("id", "int"))), schema.New("s2").AddTables( schema.NewTable("t1").AddColumns(schema.NewIntColumn("id", "int")), schema.NewTable("t2").AddColumns(schema.NewIntColumn("oid", "int")), ), ) ) expected.Schemas[1].Tables[1].AddForeignKeys(schema.NewForeignKey("fk"). AddColumns(expected.Schemas[1].Tables[1].Columns[0]). SetRefTable(expected.Schemas[1].Tables[0]). AddRefColumns(expected.Schemas[1].Tables[0].Columns[0])) require.NoError(t, EvalHCLBytes(s, &r, nil)) } func TestMarshalRealm(t *testing.T) { t1 := schema.NewTable("t1"). AddColumns(schema.NewIntColumn("id", "int")) t2 := schema.NewTable("t2"). SetComment("Qualified with s1"). AddColumns(schema.NewIntColumn("oid", "int")) t2.AddForeignKeys(schema.NewForeignKey("oid2id").AddColumns(t2.Columns[0]).SetRefTable(t1).AddRefColumns(t1.Columns[0])) t3 := schema.NewTable("t3"). AddColumns(schema.NewIntColumn("id", "int")) t4 := schema.NewTable("t2"). SetComment("Qualified with s2"). AddColumns(schema.NewIntColumn("oid", "int")) t4.AddForeignKeys(schema.NewForeignKey("oid2id").AddColumns(t4.Columns[0]).SetRefTable(t3).AddRefColumns(t3.Columns[0])) t5 := schema.NewTable("t5"). AddColumns(schema.NewIntColumn("oid", "int")) t5.AddForeignKeys(schema.NewForeignKey("oid2id1").AddColumns(t5.Columns[0]).SetRefTable(t1).AddRefColumns(t1.Columns[0])) // Reference is qualified with s1. t5.AddForeignKeys(schema.NewForeignKey("oid2id2").AddColumns(t5.Columns[0]).SetRefTable(t2).AddRefColumns(t2.Columns[0])) r := schema.NewRealm( schema.New("s1").AddTables(t1, t2), schema.New("s2").AddTables(t3, t4, t5), ) got, err := MarshalHCL.MarshalSpec(r) require.NoError(t, err) require.Equal( t, `table "t1" { schema = schema.s1 column "id" { null = false type = int } } table "s1" "t2" { schema = schema.s1 comment = "Qualified with s1" column "oid" { null = false type = int } foreign_key "oid2id" { columns = [column.oid] ref_columns = [table.t1.column.id] } } table "t3" { schema = schema.s2 column "id" { null = false type = int } } table "s2" "t2" { schema = schema.s2 comment = "Qualified with s2" column "oid" { null = false type = int } foreign_key "oid2id" { columns = [column.oid] ref_columns = [table.t3.column.id] } } table "t5" { schema = schema.s2 column "oid" { null = false type = int } foreign_key "oid2id1" { columns = [column.oid] ref_columns = [table.t1.column.id] } foreign_key "oid2id2" { columns = [column.oid] ref_columns = [table.s1.t2.column.oid] } } schema "s1" { } schema "s2" { } `, string(got)) } atlas-0.7.2/sql/mysql/tidb.go000066400000000000000000000211141431455511600160450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package mysql import ( "context" "encoding/binary" "fmt" "regexp" "sort" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) type ( // tplanApply decorates MySQL planApply. tplanApply struct{ planApply } // tdiff decorates MySQL diff. tdiff struct{ diff } // tinspect decorates MySQL inspect. tinspect struct{ inspect } ) // priority computes the priority of each change. // // TiDB does not support multischema ALTERs (i.e. multiple changes in a single ALTER statement). // Therefore, we have to break down each alter. This function helps order the ALTERs so they work. // e.g. priority gives precedence to DropForeignKey over DropColumn, because a column cannot be // dropped if its foreign key was not dropped before. func priority(change schema.Change) int { switch c := change.(type) { case *schema.ModifyTable: // each modifyTable should have a single change since we apply `flat` before we sort. return priority(c.Changes[0]) case *schema.ModifySchema: // each modifyTable should have a single change since we apply `flat` before we sort. return priority(c.Changes[0]) case *schema.AddColumn: return 1 case *schema.DropIndex, *schema.DropForeignKey, *schema.DropAttr, *schema.DropCheck: return 2 case *schema.ModifyIndex, *schema.ModifyForeignKey: return 3 default: return 4 } } // flat takes a list of changes and breaks them down to single atomic changes (e.g: no ModifyTable // with multiple AddColumn inside it). Note that, the only "changes" that include sub-changes are // `ModifyTable` and `ModifySchema`. func flat(changes []schema.Change) []schema.Change { var flat []schema.Change for _, change := range changes { switch m := change.(type) { case *schema.ModifyTable: for _, c := range m.Changes { flat = append(flat, &schema.ModifyTable{ T: m.T, Changes: []schema.Change{c}, }) } case *schema.ModifySchema: for _, c := range m.Changes { flat = append(flat, &schema.ModifySchema{ S: m.S, Changes: []schema.Change{c}, }) } default: flat = append(flat, change) } } return flat } // PlanChanges returns a migration plan for the given schema changes. func (p *tplanApply) PlanChanges(ctx context.Context, name string, changes []schema.Change, opts ...migrate.PlanOption) (*migrate.Plan, error) { fc := flat(changes) sort.SliceStable(fc, func(i, j int) bool { return priority(fc[i]) < priority(fc[j]) }) s := &state{ conn: p.conn, Plan: migrate.Plan{ Name: name, // A plan is reversible, if all // its changes are reversible. Reversible: true, Transactional: false, }, } for _, c := range fc { // Use the planner of MySQL with each "atomic" change. plan, err := p.planApply.PlanChanges(ctx, name, []schema.Change{c}, opts...) if err != nil { return nil, err } if !plan.Reversible { s.Plan.Reversible = false } s.Plan.Changes = append(s.Plan.Changes, plan.Changes...) } return &s.Plan, nil } func (p *tplanApply) ApplyChanges(ctx context.Context, changes []schema.Change, opts ...migrate.PlanOption) error { return sqlx.ApplyChanges(ctx, changes, p, opts...) } func (i *tinspect) InspectSchema(ctx context.Context, name string, opts *schema.InspectOptions) (*schema.Schema, error) { s, err := i.inspect.InspectSchema(ctx, name, opts) if err != nil { return nil, err } return i.patchSchema(ctx, s) } func (i *tinspect) InspectRealm(ctx context.Context, opts *schema.InspectRealmOption) (*schema.Realm, error) { r, err := i.inspect.InspectRealm(ctx, opts) if err != nil { return nil, err } for _, s := range r.Schemas { if _, err := i.patchSchema(ctx, s); err != nil { return nil, err } } return r, nil } func (i *tinspect) patchSchema(ctx context.Context, s *schema.Schema) (*schema.Schema, error) { for _, t := range s.Tables { var createStmt CreateStmt if ok := sqlx.Has(t.Attrs, &createStmt); !ok { if err := i.createStmt(ctx, t); err != nil { return nil, err } } if err := i.setCollate(t); err != nil { return nil, err } if err := i.setFKs(s, t); err != nil { return nil, err } if err := i.setAutoIncrement(t); err != nil { return nil, err } for _, c := range t.Columns { i.patchColumn(ctx, c) } } return s, nil } func (i *tinspect) patchColumn(_ context.Context, c *schema.Column) { _, ok := c.Type.Type.(*BitType) if !ok { return } // TiDB has a bug where it does not format bit default value correctly. if lit, ok := c.Default.(*schema.Literal); ok && !strings.HasPrefix(lit.V, "b'") { lit.V = bytesToBitLiteral([]byte(lit.V)) } } // bytesToBitLiteral converts a bytes to MySQL bit literal. // e.g. []byte{4} -> b'100', []byte{2,1} -> b'1000000001'. // See: https://github.com/pingcap/tidb/issues/32655. func bytesToBitLiteral(b []byte) string { bytes := make([]byte, 8) for i := 0; i < len(b); i++ { bytes[8-len(b)+i] = b[i] } val := binary.BigEndian.Uint64(bytes) return fmt.Sprintf("b'%b'", val) } // e.g. CONSTRAINT "" FOREIGN KEY ("foo_id") REFERENCES "foo" ("id"). var reFK = regexp.MustCompile("(?i)CONSTRAINT\\s+[\"`]*(\\w+)[\"`]*\\s+FOREIGN\\s+KEY\\s*\\(([,\"` \\w]+)\\)\\s+REFERENCES\\s+[\"`]*(\\w+)[\"`]*\\s*\\(([,\"` \\w]+)\\).*") var reActions = regexp.MustCompile(fmt.Sprintf("(?i)(ON)\\s+(UPDATE|DELETE)\\s+(%s|%s|%s|%s|%s)", schema.NoAction, schema.Restrict, schema.SetNull, schema.SetDefault, schema.Cascade)) func (i *tinspect) setFKs(s *schema.Schema, t *schema.Table) error { var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE TABLE statment in attribuets for %q", t.Name) } for _, m := range reFK.FindAllStringSubmatch(c.S, -1) { if len(m) != 5 { return fmt.Errorf("unexpected number of matches for a table constraint: %q", m) } stmt, ctName, clmns, refTableName, refClmns := m[0], m[1], m[2], m[3], m[4] fk := &schema.ForeignKey{ Symbol: ctName, Table: t, } actions := reActions.FindAllStringSubmatch(stmt, 2) for _, actionMatches := range actions { actionType, actionOp := actionMatches[2], actionMatches[3] switch actionType { case "UPDATE": fk.OnUpdate = schema.ReferenceOption(actionOp) case "DELETE": fk.OnDelete = schema.ReferenceOption(actionOp) default: return fmt.Errorf("action type %s is none of 'UPDATE'/'DELETE'", actionType) } } refTable, ok := s.Table(refTableName) if !ok { return fmt.Errorf("couldn't resolve ref table %s on ", m[3]) } fk.RefTable = refTable for _, c := range columns(s, clmns) { column, ok := t.Column(c) if !ok { return fmt.Errorf("column %q was not found for fk %q", c, ctName) } fk.Columns = append(fk.Columns, column) } for _, c := range columns(s, refClmns) { column, ok := refTable.Column(c) if !ok { return fmt.Errorf("ref column %q was not found for fk %q", c, ctName) } fk.RefColumns = append(fk.RefColumns, column) } t.ForeignKeys = append(t.ForeignKeys, fk) } return nil } // columns from the matched regex above. func columns(schema *schema.Schema, s string) []string { names := strings.Split(s, ",") for i := range names { names[i] = strings.Trim(strings.TrimSpace(names[i]), "`\"") } return names } // e.g CHARSET=utf8mb4 COLLATE=utf8mb4_bin var reColl = regexp.MustCompile(`(?i)CHARSET\s*=\s*(\w+)\s*COLLATE\s*=\s*(\w+)`) // setCollate extracts the updated Collation from CREATE TABLE statement. func (i *tinspect) setCollate(t *schema.Table) error { var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE TABLE statement in attributes for %q", t.Name) } matches := reColl.FindStringSubmatch(c.S) if len(matches) != 3 { return fmt.Errorf("missing COLLATE and/or CHARSET information on CREATE TABLE statement for %q", t.Name) } t.SetCharset(matches[1]) t.SetCollation(matches[2]) return nil } // setCollate extracts the updated Collation from CREATE TABLE statement. func (i *tinspect) setAutoIncrement(t *schema.Table) error { // patch only it is set (set falsely to '1' due to this bug:https://github.com/pingcap/tidb/issues/24702). ai := &AutoIncrement{} if !sqlx.Has(t.Attrs, ai) { return nil } var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE TABLE statement in attributes for %q", t.Name) } matches := reAutoinc.FindStringSubmatch(c.S) if len(matches) != 2 { return nil } v, err := strconv.ParseInt(matches[1], 10, 64) if err != nil { return err } ai.V = v schema.ReplaceOrAppend(&t.Attrs, ai) return nil } atlas-0.7.2/sql/postgres/000077500000000000000000000000001431455511600152765ustar00rootroot00000000000000atlas-0.7.2/sql/postgres/convert.go000066400000000000000000000317231431455511600173130ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "errors" "fmt" "regexp" "strconv" "strings" "ariga.io/atlas/sql/schema" ) // FormatType converts schema type to its column form in the database. // An error is returned if the type cannot be recognized. func FormatType(t schema.Type) (string, error) { var f string switch t := t.(type) { case *ArrayType: f = strings.ToLower(t.T) case *BitType: f = strings.ToLower(t.T) // BIT without a length is equivalent to BIT(1), // BIT VARYING has unlimited length. if f == TypeBit && t.Len > 1 || f == TypeBitVar && t.Len > 0 { f = fmt.Sprintf("%s(%d)", f, t.Len) } case *schema.BoolType: // BOOLEAN can be abbreviated as BOOL. if f = strings.ToLower(t.T); f == TypeBool { f = TypeBoolean } case *schema.BinaryType: f = strings.ToLower(t.T) case *CurrencyType: f = strings.ToLower(t.T) case *schema.EnumType: if t.T == "" { return "", errors.New("postgres: missing enum type name") } f = t.T case *schema.IntegerType: switch f = strings.ToLower(t.T); f { case TypeSmallInt, TypeInteger, TypeBigInt: case TypeInt2: f = TypeSmallInt case TypeInt, TypeInt4: f = TypeInteger case TypeInt8: f = TypeBigInt } case *IntervalType: f = strings.ToLower(t.T) if t.F != "" { f += " " + strings.ToLower(t.F) } if t.Precision != nil && *t.Precision != defaultTimePrecision { f += fmt.Sprintf("(%d)", *t.Precision) } case *schema.StringType: switch f = strings.ToLower(t.T); f { case TypeText: // CHAR(n) is alias for CHARACTER(n). If not length was // specified, the definition is equivalent to CHARACTER(1). case TypeChar, TypeCharacter: n := t.Size if n == 0 { n = 1 } f = fmt.Sprintf("%s(%d)", TypeCharacter, n) // VARCHAR(n) is alias for CHARACTER VARYING(n). If not length // was specified, the type accepts strings of any size. case TypeVarChar, TypeCharVar: f = TypeCharVar if t.Size != 0 { f = fmt.Sprintf("%s(%d)", TypeCharVar, t.Size) } default: return "", fmt.Errorf("postgres: unexpected string type: %q", t.T) } case *schema.TimeType: f = timeAlias(t.T) if p := t.Precision; p != nil && *p != defaultTimePrecision && strings.HasPrefix(f, "time") { f += fmt.Sprintf("(%d)", *p) } case *schema.FloatType: switch f = strings.ToLower(t.T); f { case TypeFloat4: f = TypeReal case TypeFloat8: f = TypeDouble case TypeFloat: switch { case t.Precision > 0 && t.Precision <= 24: f = TypeReal case t.Precision == 0 || (t.Precision > 24 && t.Precision <= 53): f = TypeDouble default: return "", fmt.Errorf("postgres: precision for type float must be between 1 and 53: %d", t.Precision) } } case *schema.DecimalType: switch f = strings.ToLower(t.T); f { case TypeNumeric: // The DECIMAL type is an alias for NUMERIC. case TypeDecimal: f = TypeNumeric default: return "", fmt.Errorf("postgres: unexpected decimal type: %q", t.T) } switch p, s := t.Precision, t.Scale; { case p == 0 && s == 0: case s < 0: return "", fmt.Errorf("postgres: decimal type must have scale >= 0: %d", s) case p == 0 && s > 0: return "", fmt.Errorf("postgres: decimal type must have precision between 1 and 1000: %d", p) case s == 0: f = fmt.Sprintf("%s(%d)", f, p) default: f = fmt.Sprintf("%s(%d,%d)", f, p, s) } case *SerialType: switch f = strings.ToLower(t.T); f { case TypeSmallSerial, TypeSerial, TypeBigSerial: case TypeSerial2: f = TypeSmallSerial case TypeSerial4: f = TypeSerial case TypeSerial8: f = TypeBigSerial default: return "", fmt.Errorf("postgres: unexpected serial type: %q", t.T) } case *schema.JSONType: f = strings.ToLower(t.T) case *UUIDType: f = strings.ToLower(t.T) case *schema.SpatialType: f = strings.ToLower(t.T) case *NetworkType: f = strings.ToLower(t.T) case *UserDefinedType: f = strings.ToLower(t.T) case *XMLType: f = strings.ToLower(t.T) case *schema.UnsupportedType: return "", fmt.Errorf("postgres: unsupported type: %q", t.T) default: return "", fmt.Errorf("postgres: invalid schema type: %T", t) } return f, nil } // ParseType returns the schema.Type value represented by the given raw type. // The raw value is expected to follow the format in PostgreSQL information schema // or as an input for the CREATE TABLE statement. func ParseType(typ string) (schema.Type, error) { var ( err error d *columnDesc ) // Normalize PostgreSQL array data types from "CREATE TABLE" format to // "INFORMATION_SCHEMA" format (i.e. as it is inspected from the database). if t, ok := arrayType(typ); ok { d = &columnDesc{typ: TypeArray, fmtype: t + "[]"} } else if d, err = parseColumn(typ); err != nil { return nil, err } t, err := columnType(d) if err != nil { return nil, err } // If the type is unknown (to us), we fall back to user-defined but expect // to improve this in future versions by ensuring this against the database. if ut, ok := t.(*schema.UnsupportedType); ok { t = &UserDefinedType{T: ut.T} } return t, nil } func columnType(c *columnDesc) (schema.Type, error) { var typ schema.Type switch t := c.typ; strings.ToLower(t) { case TypeBigInt, TypeInt8, TypeInt, TypeInteger, TypeInt4, TypeSmallInt, TypeInt2, TypeInt64: typ = &schema.IntegerType{T: t} case TypeBit, TypeBitVar: typ = &BitType{T: t, Len: c.size} case TypeBool, TypeBoolean: typ = &schema.BoolType{T: t} case TypeBytea: typ = &schema.BinaryType{T: t} case TypeCharacter, TypeChar, TypeCharVar, TypeVarChar, TypeText: // A `character` column without length specifier is equivalent to `character(1)`, // but `varchar` without length accepts strings of any size (same as `text`). typ = &schema.StringType{T: t, Size: int(c.size)} case TypeCIDR, TypeInet, TypeMACAddr, TypeMACAddr8: typ = &NetworkType{T: t} case TypeCircle, TypeLine, TypeLseg, TypeBox, TypePath, TypePolygon, TypePoint, TypeGeometry: typ = &schema.SpatialType{T: t} case TypeDate: typ = &schema.TimeType{T: t} case TypeTime, TypeTimeWOTZ, TypeTimeTZ, TypeTimeWTZ, TypeTimestamp, TypeTimestampTZ, TypeTimestampWTZ, TypeTimestampWOTZ: p := defaultTimePrecision if c.timePrecision != nil { p = int(*c.timePrecision) } typ = &schema.TimeType{T: t, Precision: &p} case TypeInterval: p := defaultTimePrecision if c.timePrecision != nil { p = int(*c.timePrecision) } typ = &IntervalType{T: t, Precision: &p} if c.interval != "" { f, ok := intervalField(c.interval) if !ok { return &schema.UnsupportedType{T: c.interval}, nil } typ.(*IntervalType).F = f } case TypeReal, TypeDouble, TypeFloat, TypeFloat4, TypeFloat8: typ = &schema.FloatType{T: t, Precision: int(c.precision)} case TypeJSON, TypeJSONB: typ = &schema.JSONType{T: t} case TypeMoney: typ = &CurrencyType{T: t} case TypeDecimal, TypeNumeric: typ = &schema.DecimalType{T: t, Precision: int(c.precision), Scale: int(c.scale)} case TypeSmallSerial, TypeSerial, TypeBigSerial, TypeSerial2, TypeSerial4, TypeSerial8: typ = &SerialType{T: t, Precision: int(c.precision)} case TypeUUID: typ = &UUIDType{T: t} case TypeXML: typ = &XMLType{T: t} case TypeArray: // Ignore multi-dimensions or size constraints // as they are ignored by the database. typ = &ArrayType{T: c.fmtype} if t, ok := arrayType(c.fmtype); ok { tt, err := ParseType(t) if err != nil { return nil, err } if c.elemtyp == "e" { // Override the element type in // case it is an enum. tt = newEnumType(t, c.typelem) } typ.(*ArrayType).Type = tt } case TypeUserDefined: typ = &UserDefinedType{T: c.fmtype} // The `typtype` column is set to 'e' for enum types, and the // values are filled in batch after the rows above is closed. // https://postgresql.org/docs/current/catalog-pg-type.html if c.typtype == "e" { typ = newEnumType(c.fmtype, c.typid) } default: typ = &schema.UnsupportedType{T: t} } return typ, nil } // reArray parses array declaration. See: https://postgresql.org/docs/current/arrays.html. var reArray = regexp.MustCompile(`(?i)(.+?)(( +ARRAY( *\[[ \d]*] *)*)+|( *\[[ \d]*] *)+)$`) // arrayType reports if the given string is an array type (e.g. int[], text[2]), // and returns its "udt_name" as it was inspected from the database. func arrayType(t string) (string, bool) { matches := reArray.FindStringSubmatch(t) if len(matches) < 2 { return "", false } return strings.TrimSpace(matches[1]), true } // reInterval parses declaration of interval fields. See: https://www.postgresql.org/docs/current/datatype-datetime.html. var reInterval = regexp.MustCompile(`(?i)(?:INTERVAL\s*)?(YEAR|MONTH|DAY|HOUR|MINUTE|SECOND|YEAR TO MONTH|DAY TO HOUR|DAY TO MINUTE|DAY TO SECOND|HOUR TO MINUTE|HOUR TO SECOND|MINUTE TO SECOND)?\s*(?:\(([0-6])\))?$`) // intervalField reports if the given string is an interval // field type and returns its value (e.g. SECOND, MINUTE TO SECOND). func intervalField(t string) (string, bool) { matches := reInterval.FindStringSubmatch(t) if len(matches) != 3 || matches[1] == "" { return "", false } return matches[1], true } // columnDesc represents a column descriptor. type columnDesc struct { typ string // data_type fmtype string // pg_catalog.format_type size int64 // character_maximum_length typtype string // pg_type.typtype typelem int64 // pg_type.typelem elemtyp string // pg_type.typtype of the array element type above. typid int64 // pg_type.oid precision int64 timePrecision *int64 scale int64 parts []string interval string } var reDigits = regexp.MustCompile(`\d`) func parseColumn(s string) (*columnDesc, error) { parts := strings.FieldsFunc(s, func(r rune) bool { return r == '(' || r == ')' || r == ' ' || r == ',' }) var ( err error c = &columnDesc{ typ: parts[0], parts: parts, } ) switch c.parts[0] { case TypeVarChar, TypeCharVar, TypeChar, TypeCharacter: if err := parseCharParts(c.parts, c); err != nil { return nil, err } case TypeDecimal, TypeNumeric, TypeFloat: if len(parts) > 1 { c.precision, err = strconv.ParseInt(parts[1], 10, 64) if err != nil { return nil, fmt.Errorf("postgres: parse precision %q: %w", parts[1], err) } } if len(parts) > 2 { c.scale, err = strconv.ParseInt(parts[2], 10, 64) if err != nil { return nil, fmt.Errorf("postgres: parse scale %q: %w", parts[1], err) } } case TypeBit: if err := parseBitParts(parts, c); err != nil { return nil, err } case TypeDouble, TypeFloat8: c.precision = 53 case TypeReal, TypeFloat4: c.precision = 24 case TypeTime, TypeTimeTZ, TypeTimestamp, TypeTimestampTZ: t, p := s, int64(defaultTimePrecision) // If the second part is only one digit it is the precision argument. // For cases like "timestamp(4) with time zone" make sure to not drop // the rest of the type definition. if len(parts) > 1 && reDigits.MatchString(parts[1]) { i, err := strconv.ParseInt(parts[1], 10, 64) if err != nil { return nil, fmt.Errorf("postgres: parse time precision %q: %w", parts[1], err) } p = i t = strings.Join(append(c.parts[:1], c.parts[2:]...), " ") } c.typ = timeAlias(t) c.timePrecision = &p case TypeInterval: matches := reInterval.FindStringSubmatch(s) c.interval = matches[1] if matches[2] != "" { i, err := strconv.ParseInt(matches[2], 10, 64) if err != nil { return nil, fmt.Errorf("postgres: parse interval precision %q: %w", parts[1], err) } c.timePrecision = &i } default: c.typ = s } return c, nil } func parseCharParts(parts []string, c *columnDesc) error { j := strings.Join(parts, " ") switch { case strings.HasPrefix(j, TypeVarChar): c.typ = TypeVarChar parts = parts[1:] case strings.HasPrefix(j, TypeCharVar): c.typ = TypeCharVar parts = parts[2:] default: parts = parts[1:] } if len(parts) == 0 { return nil } size, err := strconv.ParseInt(parts[0], 10, 64) if err != nil { return fmt.Errorf("postgres: parse size %q: %w", parts[0], err) } c.size = size return nil } func parseBitParts(parts []string, c *columnDesc) error { if len(parts) == 1 { c.size = 1 return nil } parts = parts[1:] if parts[0] == "varying" { c.typ = TypeBitVar parts = parts[1:] } if len(parts) == 0 { return nil } size, err := strconv.ParseInt(parts[0], 10, 64) if err != nil { return fmt.Errorf("postgres: parse size %q: %w", parts[1], err) } c.size = size return nil } // timeAlias returns the abbreviation for the given time type. func timeAlias(t string) string { switch t = strings.ToLower(t); t { // TIMESTAMPTZ be equivalent to TIMESTAMP WITH TIME ZONE. case TypeTimestampWTZ: t = TypeTimestampTZ // TIMESTAMP be equivalent to TIMESTAMP WITHOUT TIME ZONE. case TypeTimestampWOTZ: t = TypeTimestamp // TIME be equivalent to TIME WITHOUT TIME ZONE. case TypeTimeWOTZ: t = TypeTime // TIMETZ be equivalent to TIME WITH TIME ZONE. case TypeTimeWTZ: t = TypeTimeTZ } return t } atlas-0.7.2/sql/postgres/crdb.go000066400000000000000000000232571431455511600165500ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "database/sql" "fmt" "regexp" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" ) func init() { sqlclient.Register( "cockroach", sqlclient.DriverOpener(Open), sqlclient.RegisterCodec(MarshalHCL, EvalHCL), sqlclient.RegisterFlavours("crdb"), sqlclient.RegisterURLParser(parser{}), ) } // crdbDiff implements the sqlx.DiffDriver for CockroachDB. type ( crdbDiff struct{ diff } crdbInspect struct{ inspect } ) var _ sqlx.DiffDriver = (*crdbDiff)(nil) // pathSchema fixes: https://github.com/cockroachdb/cockroach/issues/82040. func (i *crdbInspect) patchSchema(s *schema.Schema) { for _, t := range s.Tables { for _, c := range t.Columns { id, ok := identity(c.Attrs) if !ok { continue } c.Default = nil if g := strings.ToUpper(id.Generation); strings.Contains(g, "ALWAYS") { id.Generation = "ALWAYS" } else if strings.Contains(g, "BY DEFAULT") { id.Generation = "BY DEFAULT" } schema.ReplaceOrAppend(&c.Attrs, id) } } } func (i *crdbInspect) InspectSchema(ctx context.Context, name string, opts *schema.InspectOptions) (*schema.Schema, error) { s, err := i.inspect.InspectSchema(ctx, name, opts) if err != nil { return nil, err } i.patchSchema(s) return s, err } func (i *crdbInspect) InspectRealm(ctx context.Context, opts *schema.InspectRealmOption) (*schema.Realm, error) { r, err := i.inspect.InspectRealm(ctx, opts) if err != nil { return nil, err } for _, s := range r.Schemas { i.patchSchema(s) } return r, nil } // Normalize implements the sqlx.Normalizer. func (cd *crdbDiff) Normalize(from, to *schema.Table) error { cd.normalize(from) cd.normalize(to) return nil } func (cd *crdbDiff) ColumnChange(fromT *schema.Table, from, to *schema.Column) (schema.ChangeKind, error) { // All serial types in Cockroach are implemented as bigint. // See: https://www.cockroachlabs.com/docs/stable/serial.html#generated-values-for-mode-sql_sequence-and-sql_sequence_cached. for _, c := range []*schema.Column{from, to} { if _, ok := c.Type.Type.(*SerialType); ok { c.Type.Type = &schema.IntegerType{ T: TypeBigInt, } to.Default = nil from.Default = nil } } return cd.diff.ColumnChange(fromT, from, to) } func (cd *crdbDiff) normalize(table *schema.Table) { if table.PrimaryKey == nil { prim, ok := table.Column("rowid") if !ok { prim = schema.NewColumn("rowid"). AddAttrs(Identity{}). SetType(&schema.IntegerType{T: TypeBigInt}). SetDefault(&schema.RawExpr{X: "unique_rowid()"}) table.AddColumns(prim) } table.PrimaryKey = &schema.Index{ Name: "primary", Unique: true, Table: table, Parts: []*schema.IndexPart{{ SeqNo: 1, C: prim, }}, } } for _, c := range table.Columns { if _, ok := identity(c.Attrs); ok { if c.Default != nil { c.Default = nil continue } } switch t := c.Type.Type.(type) { // Integer types are aliased. // see: cockroachlabs.com/docs/v21.2/int.html#names-and-aliases. case *schema.IntegerType: switch t.T { case TypeBigInt, TypeInteger, TypeInt8, TypeInt64, TypeInt: t.T = TypeBigInt case TypeInt2, TypeSmallInt: t.T = TypeSmallInt } case *schema.JSONType: switch t.T { // Type json is aliased to jsonb. case TypeJSON: t.T = TypeJSONB } case *SerialType: c.Default = &schema.RawExpr{ X: "unique_rowid()", } case *schema.TimeType: // "timestamp" and "timestamptz" are accepted as // abbreviations for timestamp with(out) time zone. switch t.T { case "timestamp with time zone": t.T = "timestamptz" case "timestamp without time zone": t.T = "timestamp" } case *schema.FloatType: // The same numeric precision is used in all platform. // See: https://www.postgresql.org/docs/current/datatype-numeric.html switch { case t.T == "float" && t.Precision < 25: // float(1) to float(24) are selected as "real" type. t.T = "real" fallthrough case t.T == "real": t.Precision = 24 case t.T == "float" && t.Precision >= 25: // float(25) to float(53) are selected as "double precision" type. t.T = "double precision" fallthrough case t.T == "double precision": t.Precision = 53 } case *schema.StringType: switch t.T { case "character", "char": // Character without length specifier // is equivalent to character(1). t.Size = 1 } case *enumType: c.Type.Type = &schema.EnumType{T: t.T, Values: t.Values} } } } func (i *inspect) crdbIndexes(ctx context.Context, s *schema.Schema) error { rows, err := i.querySchema(ctx, crdbIndexesQuery, s) if err != nil { return fmt.Errorf("postgres: querying schema %q indexes: %w", s.Name, err) } defer rows.Close() if err := i.crdbAddIndexes(s, rows); err != nil { return err } return rows.Err() } var reIndexType = regexp.MustCompile("(?i)USING (BTREE|GIN|GIST)") func (i *inspect) crdbAddIndexes(s *schema.Schema, rows *sql.Rows) error { // Unlike Postgres, Cockroach may have duplicate index names. names := make(map[string]*schema.Index) for rows.Next() { var ( uniq, primary bool table, name, createStmt string column, contype, pred, expr, comment sql.NullString ) if err := rows.Scan(&table, &name, &column, &primary, &uniq, &contype, &createStmt, &pred, &expr, &comment); err != nil { return fmt.Errorf("cockroach: scanning indexes for schema %q: %w", s.Name, err) } t, ok := s.Table(table) if !ok { return fmt.Errorf("table %q was not found in schema", table) } uniqueName := fmt.Sprintf("%s.%s", table, name) idx, ok := names[uniqueName] if !ok { idx = &schema.Index{ Name: name, Unique: uniq, Table: t, } // Extract index type information from index create statement. // See: https://www.cockroachlabs.com/docs/stable/create-index.html. if parts := reIndexType.FindStringSubmatch(createStmt); len(parts) > 0 { idx.Attrs = append(idx.Attrs, &IndexType{T: parts[1]}) } if sqlx.ValidString(comment) { idx.Attrs = append(idx.Attrs, &schema.Comment{Text: comment.String}) } if sqlx.ValidString(contype) { idx.Attrs = append(idx.Attrs, &ConType{T: contype.String}) } if sqlx.ValidString(pred) { idx.Attrs = append(idx.Attrs, &IndexPredicate{P: pred.String}) } names[uniqueName] = idx if primary { t.PrimaryKey = idx } else { t.Indexes = append(t.Indexes, idx) } } part := &schema.IndexPart{SeqNo: len(idx.Parts) + 1, Desc: strings.Contains(createStmt, "DESC")} switch { case sqlx.ValidString(column): part.C, ok = t.Column(column.String) if !ok { return fmt.Errorf("cockroach: column %q was not found for index %q", column.String, idx.Name) } part.C.Indexes = append(part.C.Indexes, idx) case sqlx.ValidString(expr): part.X = &schema.RawExpr{ X: expr.String, } default: return fmt.Errorf("cockroach: invalid part for index %q", idx.Name) } idx.Parts = append(idx.Parts, part) } return nil } // CockroachDB types that are not part of PostgreSQL. const ( TypeInt64 = "int64" TypeGeometry = "geometry" ) // CockroachDB query for getting schema indexes. const crdbIndexesQuery = ` SELECT t.relname AS table_name, i.relname AS index_name, a.attname AS column_name, idx.indisprimary AS primary, idx.indisunique AS unique, c.contype AS constraint_type, pgi.indexdef create_stmt, pg_get_expr(idx.indpred, idx.indrelid) AS predicate, pg_get_indexdef(idx.indexrelid, idx.ord, false) AS expression, pg_catalog.obj_description(i.oid, 'pg_class') AS comment FROM ( select *, generate_series(1,array_length(i.indkey,1)) as ord, unnest(i.indkey) AS key from pg_index i ) idx JOIN pg_class i ON i.oid = idx.indexrelid JOIN pg_class t ON t.oid = idx.indrelid JOIN pg_namespace n ON n.oid = t.relnamespace LEFT JOIN pg_constraint c ON idx.indexrelid = c.conindid LEFT JOIN pg_indexes pgi ON pgi.tablename = t.relname AND indexname = i.relname AND n.nspname = pgi.schemaname LEFT JOIN pg_attribute a ON (a.attrelid, a.attnum) = (idx.indrelid, idx.key) WHERE n.nspname = $1 AND t.relname IN (%s) AND COALESCE(c.contype, '') <> 'f' ORDER BY table_name, index_name, idx.ord ` const crdbColumnsQuery = ` SELECT t1.table_name, t1.column_name, t1.data_type, pg_catalog.format_type(a.atttypid, a.atttypmod) AS format_type, t1.is_nullable, t1.column_default, t1.character_maximum_length, t1.numeric_precision, t1.datetime_precision, t1.numeric_scale, t1.interval_type, t1.character_set_name, t1.collation_name, t1.is_identity, t5.start_value as identity_start, t5.increment_by as identity_increment, t5.last_value AS identity_last, t1.identity_generation, t1.generation_expression, col_description(t3.oid, "ordinal_position") AS comment, t4.typtype, t4.typelem, (CASE WHEN t4.typcategory = 'A' AND t4.typelem <> 0 THEN (SELECT t.typtype FROM pg_catalog.pg_type t WHERE t.oid = t4.typelem) END) AS elemtyp, t4.oid FROM "information_schema"."columns" AS t1 JOIN pg_catalog.pg_namespace AS t2 ON t2.nspname = t1.table_schema JOIN pg_catalog.pg_class AS t3 ON t3.relnamespace = t2.oid AND t3.relname = t1.table_name JOIN pg_catalog.pg_attribute AS a ON a.attrelid = t3.oid AND a.attname = t1.column_name LEFT JOIN pg_catalog.pg_type AS t4 ON t1.udt_name = t4.typname LEFT JOIN pg_sequences AS t5 ON quote_ident(t5.schemaname) || '.' || quote_ident(t5.sequencename) = btrim(btrim(t1.column_default, 'nextval('''), '''::REGCLASS)') WHERE t1.table_schema = $1 AND t1.table_name IN (%s) ORDER BY t1.table_name, t1.ordinal_position ` atlas-0.7.2/sql/postgres/diff.go000066400000000000000000000270701431455511600165430ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "errors" "fmt" "reflect" "strconv" "strings" "unicode" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // A diff provides a PostgreSQL implementation for sqlx.DiffDriver. type diff struct{ conn } // SchemaAttrDiff returns a changeset for migrating schema attributes from one state to the other. func (d *diff) SchemaAttrDiff(_, _ *schema.Schema) []schema.Change { // No special schema attribute diffing for PostgreSQL. return nil } // TableAttrDiff returns a changeset for migrating table attributes from one state to the other. func (d *diff) TableAttrDiff(from, to *schema.Table) ([]schema.Change, error) { var changes []schema.Change if change := sqlx.CommentDiff(from.Attrs, to.Attrs); change != nil { changes = append(changes, change) } if err := d.partitionChanged(from, to); err != nil { return nil, err } return append(changes, sqlx.CheckDiff(from, to, func(c1, c2 *schema.Check) bool { return sqlx.Has(c1.Attrs, &NoInherit{}) == sqlx.Has(c2.Attrs, &NoInherit{}) })...), nil } // ColumnChange returns the schema changes (if any) for migrating one column to the other. func (d *diff) ColumnChange(_ *schema.Table, from, to *schema.Column) (schema.ChangeKind, error) { change := sqlx.CommentChange(from.Attrs, to.Attrs) if from.Type.Null != to.Type.Null { change |= schema.ChangeNull } changed, err := d.typeChanged(from, to) if err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeType } if changed, err = d.defaultChanged(from, to); err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeDefault } if identityChanged(from.Attrs, to.Attrs) { change |= schema.ChangeAttr } if changed, err = d.generatedChanged(from, to); err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeGenerated } return change, nil } // defaultChanged reports if the default value of a column was changed. func (d *diff) defaultChanged(from, to *schema.Column) (bool, error) { d1, ok1 := sqlx.DefaultValue(from) d2, ok2 := sqlx.DefaultValue(to) if ok1 != ok2 { return true, nil } if trimCast(d1) == trimCast(d2) { return false, nil } // Use database comparison in case of mismatch (e.g. `SELECT ARRAY[1] = '{1}'::int[]`). equals, err := d.valuesEqual(d1, d2) if err != nil { return false, err } return !equals, nil } // generatedChanged reports if the generated expression of a column was changed. func (*diff) generatedChanged(from, to *schema.Column) (bool, error) { var fromX, toX schema.GeneratedExpr switch fromHas, toHas := sqlx.Has(from.Attrs, &fromX), sqlx.Has(to.Attrs, &toX); { case fromHas && toHas && sqlx.MayWrap(fromX.Expr) != sqlx.MayWrap(toX.Expr): return false, fmt.Errorf("changing the generation expression for a column %q is not supported", from.Name) case !fromHas && toHas: return false, fmt.Errorf("changing column %q to generated column is not supported (drop and add is required)", from.Name) default: // Only DROP EXPRESSION is supported. return fromHas && !toHas, nil } } // partitionChanged checks and returns an error if the partition key of a table was changed. func (*diff) partitionChanged(from, to *schema.Table) error { var fromP, toP Partition switch fromHas, toHas := sqlx.Has(from.Attrs, &fromP), sqlx.Has(to.Attrs, &toP); { case fromHas && !toHas: return fmt.Errorf("partition key cannot be dropped from %q (drop and add is required)", from.Name) case !fromHas && toHas: return fmt.Errorf("partition key cannot be added to %q (drop and add is required)", to.Name) case fromHas && toHas: s1, err := formatPartition(fromP) if err != nil { return err } s2, err := formatPartition(toP) if err != nil { return err } if s1 != s2 { return fmt.Errorf("partition key of table %q cannot be changed from %s to %s (drop and add is required)", to.Name, s1, s2) } } return nil } // IsGeneratedIndexName reports if the index name was generated by the database. func (d *diff) IsGeneratedIndexName(t *schema.Table, idx *schema.Index) bool { names := make([]string, len(idx.Parts)) for i, p := range idx.Parts { if p.C == nil { return false } names[i] = p.C.Name } // Auto-generate index names will have the following format:
__..._key. // In case of conflict, PostgreSQL adds additional index at the end (e.g. "key1"). p := fmt.Sprintf("%s_%s_key", t.Name, strings.Join(names, "_")) if idx.Name == p { return true } i, err := strconv.ParseInt(strings.TrimPrefix(idx.Name, p), 10, 64) return err == nil && i > 0 } // IndexAttrChanged reports if the index attributes were changed. // The default type is BTREE if no type was specified. func (*diff) IndexAttrChanged(from, to []schema.Attr) bool { t1 := &IndexType{T: IndexTypeBTree} if sqlx.Has(from, t1) { t1.T = strings.ToUpper(t1.T) } t2 := &IndexType{T: IndexTypeBTree} if sqlx.Has(to, t2) { t2.T = strings.ToUpper(t2.T) } if t1.T != t2.T { return true } var p1, p2 IndexPredicate if sqlx.Has(from, &p1) != sqlx.Has(to, &p2) || (p1.P != p2.P && p1.P != sqlx.MayWrap(p2.P)) { return true } if indexIncludeChanged(from, to) { return true } s1, ok1 := indexStorageParams(from) s2, ok2 := indexStorageParams(to) return ok1 != ok2 || ok1 && *s1 != *s2 } // IndexPartAttrChanged reports if the index-part attributes were changed. func (*diff) IndexPartAttrChanged(from, to *schema.IndexPart) bool { p1 := &IndexColumnProperty{NullsFirst: from.Desc, NullsLast: !from.Desc} sqlx.Has(from.Attrs, p1) p2 := &IndexColumnProperty{NullsFirst: to.Desc, NullsLast: !to.Desc} sqlx.Has(to.Attrs, p2) return p1.NullsFirst != p2.NullsFirst || p1.NullsLast != p2.NullsLast } // ReferenceChanged reports if the foreign key referential action was changed. func (*diff) ReferenceChanged(from, to schema.ReferenceOption) bool { // According to PostgreSQL, the NO ACTION rule is set // if no referential action was defined in foreign key. if from == "" { from = schema.NoAction } if to == "" { to = schema.NoAction } return from != to } func (d *diff) typeChanged(from, to *schema.Column) (bool, error) { fromT, toT := from.Type.Type, to.Type.Type if fromT == nil || toT == nil { return false, fmt.Errorf("postgres: missing type information for column %q", from.Name) } if reflect.TypeOf(fromT) != reflect.TypeOf(toT) { return true, nil } var changed bool switch fromT := fromT.(type) { case *schema.BinaryType, *BitType, *schema.BoolType, *schema.DecimalType, *schema.FloatType, *IntervalType, *schema.IntegerType, *schema.JSONType, *SerialType, *schema.SpatialType, *schema.StringType, *schema.TimeType, *NetworkType, *UserDefinedType: t1, err := FormatType(toT) if err != nil { return false, err } t2, err := FormatType(fromT) if err != nil { return false, err } changed = t1 != t2 case *schema.EnumType: toT := toT.(*schema.EnumType) // Column type was changed if the underlying enum type was changed or values are not equal. changed = !sqlx.ValuesEqual(fromT.Values, toT.Values) || fromT.T != toT.T || (toT.Schema != nil && fromT.Schema != nil && fromT.Schema.Name != toT.Schema.Name) case *CurrencyType: toT := toT.(*CurrencyType) changed = fromT.T != toT.T case *UUIDType: toT := toT.(*UUIDType) changed = fromT.T != toT.T case *XMLType: toT := toT.(*XMLType) changed = fromT.T != toT.T case *ArrayType: toT := toT.(*ArrayType) // Same type. if changed = fromT.T != toT.T; !changed { // In case it is an enum type, compare its values. fromE, ok1 := fromT.Type.(*schema.EnumType) toE, ok2 := toT.Type.(*schema.EnumType) changed = ok1 && ok2 && !sqlx.ValuesEqual(fromE.Values, toE.Values) break } // In case the desired schema is not normalized, the string type can look different even // if the two strings represent the same array type (varchar(1), character varying (1)). // Therefore, we try by comparing the underlying types if they were defined. if fromT.Type != nil && toT.Type != nil { t1, err := FormatType(fromT.Type) if err != nil { return false, err } t2, err := FormatType(toT.Type) if err != nil { return false, err } // Same underlying type. changed = t1 != t2 } default: return false, &sqlx.UnsupportedTypeError{Type: fromT} } return changed, nil } // valuesEqual reports if the DEFAULT values x and y // equal according to the database engine. func (d *diff) valuesEqual(x, y string) (bool, error) { var b bool // The DEFAULT expressions are safe to be inlined in the SELECT // statement same as we inline them in the CREATE TABLE statement. rows, err := d.QueryContext(context.Background(), fmt.Sprintf("SELECT %s = %s", x, y)) if err != nil { return false, err } if err := sqlx.ScanOne(rows, &b); err != nil { return false, err } return b, nil } // Default IDENTITY attributes. const ( defaultIdentityGen = "BY DEFAULT" defaultSeqStart = 1 defaultSeqIncrement = 1 ) // identityChanged reports if one of the identity attributes was changed. func identityChanged(from, to []schema.Attr) bool { i1, ok1 := identity(from) i2, ok2 := identity(to) if !ok1 && !ok2 || ok1 != ok2 { return ok1 != ok2 } return i1.Generation != i2.Generation || i1.Sequence.Start != i2.Sequence.Start || i1.Sequence.Increment != i2.Sequence.Increment } func identity(attrs []schema.Attr) (*Identity, bool) { i := &Identity{} if !sqlx.Has(attrs, i) { return nil, false } if i.Generation == "" { i.Generation = defaultIdentityGen } if i.Sequence == nil { i.Sequence = &Sequence{Start: defaultSeqStart, Increment: defaultSeqIncrement} return i, true } if i.Sequence.Start == 0 { i.Sequence.Start = defaultSeqStart } if i.Sequence.Increment == 0 { i.Sequence.Increment = defaultSeqIncrement } return i, true } // formatPartition returns the string representation of the // partition key according to the PostgreSQL format/grammar. func formatPartition(p Partition) (string, error) { b := &sqlx.Builder{QuoteChar: '"'} b.P("PARTITION BY") switch t := strings.ToUpper(p.T); t { case PartitionTypeRange, PartitionTypeList, PartitionTypeHash: b.P(t) default: return "", fmt.Errorf("unknown partition type: %q", t) } if len(p.Parts) == 0 { return "", errors.New("missing parts for partition key") } b.Wrap(func(b *sqlx.Builder) { b.MapComma(p.Parts, func(i int, b *sqlx.Builder) { switch k := p.Parts[i]; { case k.C != nil: b.Ident(k.C.Name) case k.X != nil: b.P(sqlx.MayWrap(k.X.(*schema.RawExpr).X)) } }) }) return b.String(), nil } // indexStorageParams returns the index storage parameters from the attributes // in case it is there, and it is not the default. func indexStorageParams(attrs []schema.Attr) (*IndexStorageParams, bool) { s := &IndexStorageParams{} if !sqlx.Has(attrs, s) { return nil, false } if !s.AutoSummarize && (s.PagesPerRange == 0 || s.PagesPerRange == defaultPagePerRange) { return nil, false } return s, true } // indexIncludeChanged reports if the INCLUDE attribute clause was changed. func indexIncludeChanged(from, to []schema.Attr) bool { var fromI, toI IndexInclude if sqlx.Has(from, &fromI) != sqlx.Has(to, &toI) || len(fromI.Columns) != len(toI.Columns) { return true } for i := range fromI.Columns { if fromI.Columns[i].Name != toI.Columns[i].Name { return true } } return false } func trimCast(s string) string { i := strings.LastIndex(s, "::") if i == -1 { return s } for _, r := range s[i+2:] { if r != ' ' && !unicode.IsLetter(r) { return s } } return s[:i] } atlas-0.7.2/sql/postgres/diff_test.go000066400000000000000000000414671431455511600176100ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "testing" "github.com/DATA-DOG/go-sqlmock" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestDiff_TableDiff(t *testing.T) { type testcase struct { name string from, to *schema.Table wantChanges []schema.Change wantErr bool } tests := []testcase{ { name: "no changes", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "users"}, }, { name: "change primary key columns", from: func() *schema.Table { t := &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}}} t.PrimaryKey = &schema.Index{ Parts: []*schema.IndexPart{{C: t.Columns[0]}}, } return t }(), to: &schema.Table{Name: "users"}, wantErr: true, }, { name: "change identity attributes", from: func() *schema.Table { t := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, }, } t.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: t.Columns[0]}}} return t }(), to: func() *schema.Table { t := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 1024, Increment: 1}}}}, }, } t.PrimaryKey = &schema.Index{Parts: []*schema.IndexPart{{C: t.Columns[0]}}} return t }(), wantChanges: []schema.Change{ &schema.ModifyColumn{ From: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, To: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 1024, Increment: 1}}}}, Change: schema.ChangeAttr, }, }, }, { name: "drop partition key", from: schema.NewTable("logs"). AddAttrs(&Partition{ T: PartitionTypeRange, Parts: []*PartitionPart{{C: schema.NewColumn("c")}}, }), to: schema.NewTable("logs"), wantErr: true, }, { name: "add partition key", from: schema.NewTable("logs"), to: schema.NewTable("logs"). AddAttrs(&Partition{ T: PartitionTypeRange, Parts: []*PartitionPart{{C: schema.NewColumn("c")}}, }), wantErr: true, }, { name: "change partition key column", from: schema.NewTable("logs"). AddAttrs(&Partition{ T: PartitionTypeRange, Parts: []*PartitionPart{{C: schema.NewColumn("c")}}, }), to: schema.NewTable("logs"). AddAttrs(&Partition{ T: PartitionTypeRange, Parts: []*PartitionPart{{C: schema.NewColumn("d")}}, }), wantErr: true, }, { name: "change partition key type", from: schema.NewTable("logs"). AddAttrs(&Partition{ T: PartitionTypeRange, Parts: []*PartitionPart{{C: schema.NewColumn("c")}}, }), to: schema.NewTable("logs"). AddAttrs(&Partition{ T: PartitionTypeHash, Parts: []*PartitionPart{{C: schema.NewColumn("c")}}, }), wantErr: true, }, { name: "add check", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}}}, wantChanges: []schema.Change{ &schema.AddCheck{ C: &schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}, }, }, }, { name: "drop check", from: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}}}, to: &schema.Table{Name: "t1"}, wantChanges: []schema.Change{ &schema.DropCheck{ C: &schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}, }, }, }, { name: "add comment", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Comment{Text: "t1"}}}, wantChanges: []schema.Change{ &schema.AddAttr{ A: &schema.Comment{Text: "t1"}, }, }, }, { name: "drop comment", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Comment{Text: "t1"}}}, to: &schema.Table{Name: "t1"}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Comment{Text: "t1"}, To: &schema.Comment{Text: ""}, }, }, }, { name: "modify comment", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}, Attrs: []schema.Attr{&schema.Comment{Text: "t1"}}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Comment{Text: "t1!"}}}, wantChanges: []schema.Change{ &schema.ModifyAttr{ From: &schema.Comment{Text: "t1"}, To: &schema.Comment{Text: "t1!"}, }, }, }, func() testcase { var ( s = schema.New("public") from = schema.NewTable("t1"). SetSchema(s). AddColumns( schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), ) to = schema.NewTable("t1"). SetSchema(s). AddColumns( schema.NewIntColumn("c1", "int"), ) ) return testcase{ name: "drop generation expression", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeGenerated}, }, } }(), { name: "change generation expression", from: schema.NewTable("t1"). SetSchema(schema.New("public")). AddColumns( schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), ), to: schema.NewTable("t1"). SetSchema(schema.New("public")). AddColumns( schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "2", Type: "STORED"}), ), wantErr: true, }, func() testcase { var ( from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int8", Type: &schema.IntegerType{T: "int8"}}}, }, } to = &schema.Table{ Name: "t1", Columns: []*schema.Column{ { Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}, Null: true}, Default: &schema.RawExpr{X: "{}"}, Attrs: []schema.Attr{&schema.Comment{Text: "json comment"}}, }, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) return testcase{ name: "columns", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{ From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeNull | schema.ChangeComment | schema.ChangeDefault, }, &schema.DropColumn{C: from.Columns[1]}, &schema.AddColumn{C: to.Columns[1]}, }, } }(), // Modify enum type or values. func() testcase { var ( from = schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( schema.NewEnumColumn("state", schema.EnumName("state"), schema.EnumValues("on")), schema.NewEnumColumn("enum1", schema.EnumName("enum1"), schema.EnumValues("a")), schema.NewEnumColumn("enum3", schema.EnumName("enum3"), schema.EnumValues("a")), schema.NewEnumColumn("enum4", schema.EnumName("enum4"), schema.EnumValues("a"), schema.EnumSchema(schema.New("public"))), ) to = schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( // Add value. schema.NewEnumColumn("state", schema.EnumName("state"), schema.EnumValues("on", "off")), // Change type. schema.NewEnumColumn("enum1", schema.EnumName("enum2"), schema.EnumValues("a")), // No change as schema is optional. schema.NewEnumColumn("enum3", schema.EnumName("enum3"), schema.EnumValues("a"), schema.EnumSchema(schema.New("public"))), // Enum type was changed (reside in a different schema). schema.NewEnumColumn("enum4", schema.EnumName("enum4"), schema.EnumValues("a"), schema.EnumSchema(schema.New("test"))), ) ) return testcase{ name: "enums", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeType}, &schema.ModifyColumn{From: from.Columns[1], To: to.Columns[1], Change: schema.ChangeType}, &schema.ModifyColumn{From: from.Columns[3], To: to.Columns[3], Change: schema.ChangeType}, }, } }(), // Modify array of type enum. func() testcase { var ( from = schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( schema.NewColumn("a1").SetType(&ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on"}}}), schema.NewColumn("a2").SetType(&ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}), schema.NewColumn("a3").SetType(&ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}), ) to = schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( // Add value. schema.NewColumn("a1").SetType(&ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}), // Drop value. schema.NewColumn("a2").SetType(&ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on"}}}), // Same values. schema.NewColumn("a3").SetType(&ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}), ) ) return testcase{ name: "enum arrays", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeType}, &schema.ModifyColumn{From: from.Columns[1], To: to.Columns[1], Change: schema.ChangeType}, }, } }(), func() testcase { var ( from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}, Default: &schema.RawExpr{X: "'{}'"}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int8", Type: &schema.IntegerType{T: "int8"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } to = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}, Default: &schema.RawExpr{X: "'{}'::json"}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int8", Type: &schema.IntegerType{T: "int8"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) from.Indexes = []*schema.Index{ {Name: "c1_index", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0]}}}, {Name: "c2_unique", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, {Name: "c3_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, {Name: "c4_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexPredicate{P: "(c4 <> NULL)"}}}, {Name: "c4_storage_params", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexStorageParams{PagesPerRange: 4}}}, {Name: "c5_include_no_change", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexInclude{Columns: from.Columns[:1]}}}, {Name: "c5_include_added", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, {Name: "c5_include_dropped", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexInclude{Columns: from.Columns[:1]}}}, } to.Indexes = []*schema.Index{ {Name: "c1_index", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0]}}}, {Name: "c3_unique", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: to.Columns[1]}}}, {Name: "c3_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexPredicate{P: "c3 <> NULL"}}}, {Name: "c4_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexPredicate{P: "c4 <> NULL"}}}, {Name: "c4_storage_params", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexStorageParams{PagesPerRange: 2}}}, {Name: "c5_include_no_change", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexInclude{Columns: from.Columns[:1]}}}, {Name: "c5_include_added", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexInclude{Columns: from.Columns[:1]}}}, {Name: "c5_include_dropped", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, } return testcase{ name: "indexes", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyIndex{From: from.Indexes[0], To: to.Indexes[0], Change: schema.ChangeUnique}, &schema.DropIndex{I: from.Indexes[1]}, &schema.ModifyIndex{From: from.Indexes[2], To: to.Indexes[2], Change: schema.ChangeAttr}, &schema.ModifyIndex{From: from.Indexes[4], To: to.Indexes[4], Change: schema.ChangeAttr}, &schema.ModifyIndex{From: from.Indexes[6], To: to.Indexes[6], Change: schema.ChangeAttr}, &schema.ModifyIndex{From: from.Indexes[7], To: to.Indexes[7], Change: schema.ChangeAttr}, &schema.AddIndex{I: to.Indexes[1]}, }, } }(), func() testcase { var ( ref = &schema.Table{ Name: "t2", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "ref_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } to = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) from.ForeignKeys = []*schema.ForeignKey{ {Table: from, Columns: from.Columns, RefTable: ref, RefColumns: ref.Columns[:1]}, } to.ForeignKeys = []*schema.ForeignKey{ {Table: to, Columns: to.Columns, RefTable: ref, RefColumns: ref.Columns[1:]}, } return testcase{ name: "foreign-keys", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyForeignKey{ From: from.ForeignKeys[0], To: to.ForeignKeys[0], Change: schema.ChangeRefColumn, }, }, } }(), } for _, tt := range tests { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.version("130000") drv, err := Open(db) require.NoError(t, err) t.Run(tt.name, func(t *testing.T) { changes, err := drv.TableDiff(tt.from, tt.to) require.Equalf(t, tt.wantErr, err != nil, "got: %v", err) require.EqualValues(t, tt.wantChanges, changes) }) } } func TestDiff_SchemaDiff(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.version("130000") drv, err := Open(db) require.NoError(t, err) from := &schema.Schema{ Tables: []*schema.Table{ {Name: "users"}, {Name: "pets"}, }, } to := &schema.Schema{ Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, }, {Name: "groups"}, }, } from.Tables[0].Schema = from from.Tables[1].Schema = from changes, err := drv.SchemaDiff(from, to) require.NoError(t, err) require.EqualValues(t, []schema.Change{ &schema.ModifyTable{T: to.Tables[0], Changes: []schema.Change{&schema.AddColumn{C: to.Tables[0].Columns[0]}}}, &schema.DropTable{T: from.Tables[1]}, &schema.AddTable{T: to.Tables[1]}, }, changes) } atlas-0.7.2/sql/postgres/driver.go000066400000000000000000000261131431455511600171230ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "database/sql" "errors" "fmt" "hash/fnv" "net/url" "strconv" "time" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" ) type ( // Driver represents a PostgreSQL driver for introspecting database schemas, // generating diff between schema elements and apply migrations changes. Driver struct { conn schema.Differ schema.Inspector migrate.PlanApplier schema string // the schema given in the `search_path` parameter (if given) } // database connection and its information. conn struct { schema.ExecQuerier // System variables that are set on `Open`. collate string ctype string version int crdb bool } ) // DriverName holds the name used for registration. const DriverName = "postgres" func init() { sqlclient.Register( DriverName, sqlclient.OpenerFunc(opener), sqlclient.RegisterDriverOpener(Open), sqlclient.RegisterFlavours("postgresql"), sqlclient.RegisterCodec(MarshalHCL, EvalHCL), sqlclient.RegisterURLParser(parser{}), ) } func opener(_ context.Context, u *url.URL) (*sqlclient.Client, error) { ur := parser{}.ParseURL(u) db, err := sql.Open(DriverName, ur.DSN) if err != nil { return nil, err } drv, err := Open(db) if err != nil { if cerr := db.Close(); cerr != nil { err = fmt.Errorf("%w: %v", err, cerr) } return nil, err } drv.(*Driver).schema = ur.Schema return &sqlclient.Client{ Name: DriverName, DB: db, URL: ur, Driver: drv, }, nil } // Open opens a new PostgreSQL driver. func Open(db schema.ExecQuerier) (migrate.Driver, error) { c := conn{ExecQuerier: db} rows, err := db.QueryContext(context.Background(), paramsQuery) if err != nil { return nil, fmt.Errorf("postgres: scanning system variables: %w", err) } params, err := sqlx.ScanStrings(rows) if err != nil { return nil, fmt.Errorf("postgres: failed scanning rows: %w", err) } if len(params) != 3 && len(params) != 4 { return nil, fmt.Errorf("postgres: unexpected number of rows: %d", len(params)) } c.ctype, c.collate = params[1], params[2] if c.version, err = strconv.Atoi(params[0]); err != nil { return nil, fmt.Errorf("postgres: malformed version: %s: %w", params[0], err) } if c.version < 10_00_00 { return nil, fmt.Errorf("postgres: unsupported postgres version: %d", c.version) } // Means we are connected to CockroachDB because we have a result for name='crdb_version'. see `paramsQuery`. if c.crdb = len(params) == 4; c.crdb { return &Driver{ conn: c, Differ: &sqlx.Diff{DiffDriver: &crdbDiff{diff{c}}}, Inspector: &crdbInspect{inspect{c}}, PlanApplier: &planApply{c}, }, nil } return &Driver{ conn: c, Differ: &sqlx.Diff{DiffDriver: &diff{c}}, Inspector: &inspect{c}, PlanApplier: &planApply{c}, }, nil } func (d *Driver) dev() *sqlx.DevDriver { return &sqlx.DevDriver{ Driver: d, MaxNameLen: 63, PatchColumn: func(s *schema.Schema, c *schema.Column) { if e, ok := hasEnumType(c); ok { e.Schema = s } }, } } // NormalizeRealm returns the normal representation of the given database. func (d *Driver) NormalizeRealm(ctx context.Context, r *schema.Realm) (*schema.Realm, error) { return d.dev().NormalizeRealm(ctx, r) } // NormalizeSchema returns the normal representation of the given database. func (d *Driver) NormalizeSchema(ctx context.Context, s *schema.Schema) (*schema.Schema, error) { return d.dev().NormalizeSchema(ctx, s) } // Lock implements the schema.Locker interface. func (d *Driver) Lock(ctx context.Context, name string, timeout time.Duration) (schema.UnlockFunc, error) { conn, err := sqlx.SingleConn(ctx, d.ExecQuerier) if err != nil { return nil, err } h := fnv.New32() h.Write([]byte(name)) id := h.Sum32() if err := acquire(ctx, conn, id, timeout); err != nil { conn.Close() return nil, err } return func() error { defer conn.Close() rows, err := conn.QueryContext(ctx, "SELECT pg_advisory_unlock($1)", id) if err != nil { return err } switch released, err := sqlx.ScanNullBool(rows); { case err != nil: return err case !released.Valid || !released.Bool: return fmt.Errorf("sql/postgres: failed releasing lock %d", id) } return nil }, nil } // Snapshot implements migrate.Snapshoter. func (d *Driver) Snapshot(ctx context.Context) (migrate.RestoreFunc, error) { // Postgres will only then be considered bound to a schema if the `search_path` was given. // In all other cases, the connection is considered bound to the realm. if d.schema != "" { s, err := d.InspectSchema(ctx, d.schema, nil) if err != nil { return nil, err } if len(s.Tables) > 0 { return nil, migrate.NotCleanError{Reason: fmt.Sprintf("found table %q in connected schema", s.Tables[0].Name)} } return func(ctx context.Context) error { current, err := d.InspectSchema(ctx, s.Name, nil) if err != nil { return err } changes, err := d.SchemaDiff(current, s) if err != nil { return err } return d.ApplyChanges(ctx, changes) }, nil } // Not bound to a schema. realm, err := d.InspectRealm(ctx, nil) if err != nil { return nil, err } restore := func(ctx context.Context) error { current, err := d.InspectRealm(ctx, nil) if err != nil { return err } changes, err := d.RealmDiff(current, realm) if err != nil { return err } return d.ApplyChanges(ctx, changes) } // Postgres is considered clean, if there are no schemas or the public schema has no tables. if len(realm.Schemas) == 0 { return restore, nil } if s, ok := realm.Schema("public"); len(realm.Schemas) == 1 && ok { if len(s.Tables) > 0 { return nil, migrate.NotCleanError{Reason: fmt.Sprintf("found table %q in schema %q", s.Tables[0].Name, s.Name)} } return restore, nil } return nil, migrate.NotCleanError{Reason: fmt.Sprintf("found schema %q", realm.Schemas[0].Name)} } // CheckClean implements migrate.CleanChecker. func (d *Driver) CheckClean(ctx context.Context, revT *migrate.TableIdent) error { if d.schema != "" { switch s, err := d.InspectSchema(ctx, d.schema, nil); { case err != nil: return err case len(s.Tables) == 0, (revT != nil && revT.Schema == "" || s.Name == revT.Schema) && len(s.Tables) == 1 && s.Tables[0].Name == revT.Name: return nil default: return &migrate.NotCleanError{Reason: fmt.Sprintf("found table %q in schema %q", s.Tables[0].Name, s.Name)} } } r, err := d.InspectRealm(ctx, nil) if err != nil { return err } for _, s := range r.Schemas { switch { case len(s.Tables) == 0 && s.Name == "public": case len(s.Tables) == 0 || s.Name != revT.Schema: return &migrate.NotCleanError{Reason: fmt.Sprintf("found schema %q", s.Name)} case len(s.Tables) > 1: return &migrate.NotCleanError{Reason: fmt.Sprintf("found %d tables in schema %q", len(s.Tables), s.Name)} case len(s.Tables) == 1 && s.Tables[0].Name != revT.Name: return &migrate.NotCleanError{Reason: fmt.Sprintf("found table %q in schema %q", s.Tables[0].Name, s.Name)} } } return nil } func acquire(ctx context.Context, conn schema.ExecQuerier, id uint32, timeout time.Duration) error { switch { // With timeout (context-based). case timeout > 0: var cancel context.CancelFunc ctx, cancel = context.WithTimeout(ctx, timeout) defer cancel() fallthrough // Infinite timeout. case timeout < 0: rows, err := conn.QueryContext(ctx, "SELECT pg_advisory_lock($1)", id) if errors.Is(err, context.DeadlineExceeded) || errors.Is(err, context.Canceled) { err = schema.ErrLocked } if err != nil { return err } return rows.Close() // No timeout. default: rows, err := conn.QueryContext(ctx, "SELECT pg_try_advisory_lock($1)", id) if err != nil { return err } acquired, err := sqlx.ScanNullBool(rows) if err != nil { return err } if !acquired.Bool { return schema.ErrLocked } return nil } } // supportsIndexInclude reports if the server supports the INCLUDE clause. func (c *conn) supportsIndexInclude() bool { return c.version >= 11_00_00 } type parser struct{} // ParseURL implements the sqlclient.URLParser interface. func (parser) ParseURL(u *url.URL) *sqlclient.URL { return &sqlclient.URL{URL: u, DSN: u.String(), Schema: u.Query().Get("search_path")} } // ChangeSchema implements the sqlclient.SchemaChanger interface. func (parser) ChangeSchema(u *url.URL, s string) *url.URL { nu := *u q := nu.Query() q.Set("search_path", s) nu.RawQuery = q.Encode() return &nu } // Standard column types (and their aliases) as defined in // PostgreSQL codebase/website. const ( TypeBit = "bit" TypeBitVar = "bit varying" TypeBoolean = "boolean" TypeBool = "bool" // boolean. TypeBytea = "bytea" TypeCharacter = "character" TypeChar = "char" // character TypeCharVar = "character varying" TypeVarChar = "varchar" // character varying TypeText = "text" TypeSmallInt = "smallint" TypeInteger = "integer" TypeBigInt = "bigint" TypeInt = "int" // integer. TypeInt2 = "int2" // smallint. TypeInt4 = "int4" // integer. TypeInt8 = "int8" // bigint. TypeCIDR = "cidr" TypeInet = "inet" TypeMACAddr = "macaddr" TypeMACAddr8 = "macaddr8" TypeCircle = "circle" TypeLine = "line" TypeLseg = "lseg" TypeBox = "box" TypePath = "path" TypePolygon = "polygon" TypePoint = "point" TypeDate = "date" TypeTime = "time" // time without time zone TypeTimeTZ = "timetz" // time with time zone TypeTimeWTZ = "time with time zone" TypeTimeWOTZ = "time without time zone" TypeTimestamp = "timestamp" // timestamp without time zone TypeTimestampTZ = "timestamptz" TypeTimestampWTZ = "timestamp with time zone" TypeTimestampWOTZ = "timestamp without time zone" TypeDouble = "double precision" TypeReal = "real" TypeFloat8 = "float8" // double precision TypeFloat4 = "float4" // real TypeFloat = "float" // float(p). TypeNumeric = "numeric" TypeDecimal = "decimal" // numeric TypeSmallSerial = "smallserial" // smallint with auto_increment. TypeSerial = "serial" // integer with auto_increment. TypeBigSerial = "bigserial" // bigint with auto_increment. TypeSerial2 = "serial2" // smallserial TypeSerial4 = "serial4" // serial TypeSerial8 = "serial8" // bigserial TypeArray = "array" TypeXML = "xml" TypeJSON = "json" TypeJSONB = "jsonb" TypeUUID = "uuid" TypeMoney = "money" TypeInterval = "interval" TypeUserDefined = "user-defined" ) // List of supported index types. const ( IndexTypeBTree = "BTREE" IndexTypeHash = "HASH" IndexTypeGIN = "GIN" IndexTypeGiST = "GIST" IndexTypeBRIN = "BRIN" defaultPagePerRange = 128 ) // List of "GENERATED" types. const ( GeneratedTypeAlways = "ALWAYS" GeneratedTypeByDefault = "BY_DEFAULT" // BY DEFAULT. ) // List of PARTITION KEY types. const ( PartitionTypeRange = "RANGE" PartitionTypeList = "LIST" PartitionTypeHash = "HASH" ) atlas-0.7.2/sql/postgres/driver_test.go000066400000000000000000000124621431455511600201640ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "io" "testing" "time" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestDriver_LockAcquired(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) name, hash := "name", 797654004 m.ExpectQuery(sqltest.Escape("SELECT pg_try_advisory_lock($1)")). WithArgs(hash). WillReturnRows(sqlmock.NewRows([]string{"pg_advisory_lock"}).AddRow(1)). RowsWillBeClosed() m.ExpectQuery(sqltest.Escape("SELECT pg_advisory_unlock($1)")). WithArgs(hash). WillReturnRows(sqlmock.NewRows([]string{"pg_advisory_unlock"}).AddRow(1)). RowsWillBeClosed() d := &Driver{} d.ExecQuerier = db unlock, err := d.Lock(context.Background(), name, 0) require.NoError(t, err) require.NoError(t, unlock()) require.NoError(t, m.ExpectationsWereMet()) } func TestDriver_LockError(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) d := &Driver{} d.ExecQuerier = db name, hash := "migrate", 979249972 t.Run("Timeout", func(t *testing.T) { m.ExpectQuery(sqltest.Escape("SELECT pg_advisory_lock($1)")). WithArgs(hash). WillReturnError(context.DeadlineExceeded). RowsWillBeClosed() unlock, err := d.Lock(context.Background(), name, time.Minute) require.Equal(t, schema.ErrLocked, err) require.Nil(t, unlock) }) t.Run("Internal", func(t *testing.T) { m.ExpectQuery(sqltest.Escape("SELECT pg_advisory_lock($1)")). WithArgs(hash). WillReturnError(io.EOF). RowsWillBeClosed() unlock, err := d.Lock(context.Background(), name, time.Minute) require.Equal(t, io.EOF, err) require.Nil(t, unlock) }) } func TestDriver_UnlockError(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) d := &Driver{} d.ExecQuerier = db name, hash := "up", 1551306158 acquired := func() { m.ExpectQuery(sqltest.Escape("SELECT pg_try_advisory_lock($1)")). WithArgs(hash). WillReturnRows(sqlmock.NewRows([]string{"pg_try_advisory_lock"}).AddRow(1)). RowsWillBeClosed() } t.Run("NotHeld", func(t *testing.T) { acquired() unlock, err := d.Lock(context.Background(), name, 0) require.NoError(t, err) m.ExpectQuery(sqltest.Escape("SELECT pg_advisory_unlock($1)")). WithArgs(hash). WillReturnRows(sqlmock.NewRows([]string{"pg_advisory_unlock"}).AddRow(0)). RowsWillBeClosed() require.Error(t, unlock()) }) t.Run("Internal", func(t *testing.T) { acquired() unlock, err := d.Lock(context.Background(), name, 0) require.NoError(t, err) m.ExpectQuery(sqltest.Escape("SELECT pg_advisory_unlock($1)")). WithArgs(hash). WillReturnRows(sqlmock.NewRows([]string{"pg_advisory_unlock"}).AddRow(nil)). RowsWillBeClosed() require.Error(t, unlock()) }) } func TestDriver_CheckClean(t *testing.T) { s := schema.New("test") drv := &Driver{Inspector: &mockInspector{schema: s}, schema: "test"} // Empty schema. err := drv.CheckClean(context.Background(), nil) require.NoError(t, err) // Revisions table found. s.AddTables(schema.NewTable("revisions")) err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions", Schema: "test"}) require.NoError(t, err) // Multiple tables. s.Tables = []*schema.Table{schema.NewTable("a"), schema.NewTable("revisions")} err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions", Schema: "test"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found table "a" in schema "test"`) r := schema.NewRealm() drv.schema = "" drv.Inspector = &mockInspector{realm: r} // Empty realm. err = drv.CheckClean(context.Background(), nil) require.NoError(t, err) // Revisions table found. s.Tables = []*schema.Table{schema.NewTable("revisions").SetSchema(s)} r.AddSchemas(s) err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions", Schema: "test"}) require.NoError(t, err) // Unknown table. s.Tables[0].Name = "unknown" err = drv.CheckClean(context.Background(), &migrate.TableIdent{Schema: "test", Name: "revisions"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found table "unknown" in schema "test"`) // Multiple tables. s.Tables = []*schema.Table{schema.NewTable("a"), schema.NewTable("revisions")} err = drv.CheckClean(context.Background(), &migrate.TableIdent{Schema: "test", Name: "revisions"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found 2 tables in schema "test"`) // With auto created public schema. s.Tables = []*schema.Table{schema.NewTable("revisions")} r.AddSchemas(schema.New("public")) err = drv.CheckClean(context.Background(), &migrate.TableIdent{Schema: "test", Name: "revisions"}) require.NoError(t, err) } type mockInspector struct { schema.Inspector realm *schema.Realm schema *schema.Schema } func (m *mockInspector) InspectSchema(context.Context, string, *schema.InspectOptions) (*schema.Schema, error) { if m.schema == nil { return nil, &schema.NotExistError{} } return m.schema, nil } func (m *mockInspector) InspectRealm(context.Context, *schema.InspectRealmOption) (*schema.Realm, error) { return m.realm, nil } atlas-0.7.2/sql/postgres/inspect.go000066400000000000000000000774021431455511600173040ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "database/sql" "fmt" "regexp" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // A diff provides a PostgreSQL implementation for schema.Inspector. type inspect struct{ conn } var _ schema.Inspector = (*inspect)(nil) // InspectRealm returns schema descriptions of all resources in the given realm. func (i *inspect) InspectRealm(ctx context.Context, opts *schema.InspectRealmOption) (*schema.Realm, error) { schemas, err := i.schemas(ctx, opts) if err != nil { return nil, err } if opts == nil { opts = &schema.InspectRealmOption{} } r := schema.NewRealm(schemas...).SetCollation(i.collate) r.Attrs = append(r.Attrs, &CType{V: i.ctype}) if len(schemas) == 0 || !sqlx.ModeInspectRealm(opts).Is(schema.InspectTables) { return sqlx.ExcludeRealm(r, opts.Exclude) } if err := i.inspectTables(ctx, r, nil); err != nil { return nil, err } sqlx.LinkSchemaTables(schemas) return sqlx.ExcludeRealm(r, opts.Exclude) } // InspectSchema returns schema descriptions of the tables in the given schema. // If the schema name is empty, the result will be the attached schema. func (i *inspect) InspectSchema(ctx context.Context, name string, opts *schema.InspectOptions) (s *schema.Schema, err error) { schemas, err := i.schemas(ctx, &schema.InspectRealmOption{Schemas: []string{name}}) if err != nil { return nil, err } switch n := len(schemas); { case n == 0: return nil, &schema.NotExistError{Err: fmt.Errorf("postgres: schema %q was not found", name)} case n > 1: return nil, fmt.Errorf("postgres: %d schemas were found for %q", n, name) } if opts == nil { opts = &schema.InspectOptions{} } r := schema.NewRealm(schemas...).SetCollation(i.collate) r.Attrs = append(r.Attrs, &CType{V: i.ctype}) if sqlx.ModeInspectSchema(opts).Is(schema.InspectTables) { if err := i.inspectTables(ctx, r, opts); err != nil { return nil, err } sqlx.LinkSchemaTables(schemas) } return sqlx.ExcludeSchema(r.Schemas[0], opts.Exclude) } func (i *inspect) inspectTables(ctx context.Context, r *schema.Realm, opts *schema.InspectOptions) error { if err := i.tables(ctx, r, opts); err != nil { return err } for _, s := range r.Schemas { if len(s.Tables) == 0 { continue } if err := i.columns(ctx, s); err != nil { return err } if err := i.indexes(ctx, s); err != nil { return err } if err := i.partitions(s); err != nil { return err } if err := i.fks(ctx, s); err != nil { return err } if err := i.checks(ctx, s); err != nil { return err } } return nil } // table returns the table from the database, or a NotExistError if the table was not found. func (i *inspect) tables(ctx context.Context, realm *schema.Realm, opts *schema.InspectOptions) error { var ( args []any query = fmt.Sprintf(tablesQuery, nArgs(0, len(realm.Schemas))) ) for _, s := range realm.Schemas { args = append(args, s.Name) } if opts != nil && len(opts.Tables) > 0 { for _, t := range opts.Tables { args = append(args, t) } query = fmt.Sprintf(tablesQueryArgs, nArgs(0, len(realm.Schemas)), nArgs(len(realm.Schemas), len(opts.Tables))) } rows, err := i.QueryContext(ctx, query, args...) if err != nil { return err } defer rows.Close() for rows.Next() { var tSchema, name, comment, partattrs, partstart, partexprs sql.NullString if err := rows.Scan(&tSchema, &name, &comment, &partattrs, &partstart, &partexprs); err != nil { return fmt.Errorf("scan table information: %w", err) } if !sqlx.ValidString(tSchema) || !sqlx.ValidString(name) { return fmt.Errorf("invalid schema or table name: %q.%q", tSchema.String, name.String) } s, ok := realm.Schema(tSchema.String) if !ok { return fmt.Errorf("schema %q was not found in realm", tSchema.String) } t := &schema.Table{Name: name.String} s.AddTables(t) if sqlx.ValidString(comment) { t.SetComment(comment.String) } if sqlx.ValidString(partattrs) { t.AddAttrs(&Partition{ start: partstart.String, attrs: partattrs.String, exprs: partexprs.String, }) } } return rows.Close() } // columns queries and appends the columns of the given table. func (i *inspect) columns(ctx context.Context, s *schema.Schema) error { query := columnsQuery if i.crdb { query = crdbColumnsQuery } rows, err := i.querySchema(ctx, query, s) if err != nil { return fmt.Errorf("postgres: querying schema %q columns: %w", s.Name, err) } defer rows.Close() for rows.Next() { if err := i.addColumn(s, rows); err != nil { return fmt.Errorf("postgres: %w", err) } } if err := rows.Close(); err != nil { return err } if err := i.enumValues(ctx, s); err != nil { return err } return nil } // addColumn scans the current row and adds a new column from it to the table. func (i *inspect) addColumn(s *schema.Schema, rows *sql.Rows) (err error) { var ( typid, typelem, maxlen, precision, timeprecision, scale, seqstart, seqinc, seqlast sql.NullInt64 table, name, typ, fmtype, nullable, defaults, identity, genidentity, genexpr, charset, collate, comment, typtype, elemtyp, interval sql.NullString ) if err = rows.Scan( &table, &name, &typ, &fmtype, &nullable, &defaults, &maxlen, &precision, &timeprecision, &scale, &interval, &charset, &collate, &identity, &seqstart, &seqinc, &seqlast, &genidentity, &genexpr, &comment, &typtype, &typelem, &elemtyp, &typid, ); err != nil { return err } t, ok := s.Table(table.String) if !ok { return fmt.Errorf("table %q was not found in schema", table.String) } c := &schema.Column{ Name: name.String, Type: &schema.ColumnType{ Raw: typ.String, Null: nullable.String == "YES", }, } c.Type.Type, err = columnType(&columnDesc{ typ: typ.String, fmtype: fmtype.String, size: maxlen.Int64, scale: scale.Int64, typtype: typtype.String, typelem: typelem.Int64, elemtyp: elemtyp.String, typid: typid.Int64, interval: interval.String, precision: precision.Int64, timePrecision: &timeprecision.Int64, }) if defaults.Valid { defaultExpr(c, defaults.String) } if identity.String == "YES" { c.Attrs = append(c.Attrs, &Identity{ Generation: genidentity.String, Sequence: &Sequence{ Last: seqlast.Int64, Start: seqstart.Int64, Increment: seqinc.Int64, }, }) } if sqlx.ValidString(genexpr) { c.Attrs = append(c.Attrs, &schema.GeneratedExpr{ Expr: genexpr.String, }) } if sqlx.ValidString(comment) { c.SetComment(comment.String) } if sqlx.ValidString(charset) { c.SetCharset(charset.String) } if sqlx.ValidString(collate) { c.SetCollation(collate.String) } t.Columns = append(t.Columns, c) return nil } // enumValues fills enum columns with their values from the database. func (i *inspect) enumValues(ctx context.Context, s *schema.Schema) error { var ( args []any ids = make(map[int64][]*schema.EnumType) query = "SELECT enumtypid, enumlabel FROM pg_enum WHERE enumtypid IN (%s)" newE = func(e1 *enumType) *schema.EnumType { if _, ok := ids[e1.ID]; !ok { args = append(args, e1.ID) } // Convert the intermediate type to // the standard schema.EnumType. e2 := &schema.EnumType{T: e1.T, Schema: s} if e1.Schema != "" && e1.Schema != s.Name { e2.Schema = schema.New(e1.Schema) } ids[e1.ID] = append(ids[e1.ID], e2) return e2 } ) for _, t := range s.Tables { for _, c := range t.Columns { switch t := c.Type.Type.(type) { case *enumType: e := newE(t) c.Type.Type = e c.Type.Raw = e.T case *ArrayType: if e, ok := t.Type.(*enumType); ok { t.Type = newE(e) } } } } if len(ids) == 0 { return nil } rows, err := i.QueryContext(ctx, fmt.Sprintf(query, nArgs(0, len(args))), args...) if err != nil { return fmt.Errorf("postgres: querying enum values: %w", err) } defer rows.Close() for rows.Next() { var ( id int64 v string ) if err := rows.Scan(&id, &v); err != nil { return fmt.Errorf("postgres: scanning enum label: %w", err) } for _, enum := range ids[id] { enum.Values = append(enum.Values, v) } } return nil } // indexes queries and appends the indexes of the given table. func (i *inspect) indexes(ctx context.Context, s *schema.Schema) error { query := indexesQuery switch { case i.conn.crdb: return i.crdbIndexes(ctx, s) case !i.conn.supportsIndexInclude(): query = indexesQueryNoInclude } rows, err := i.querySchema(ctx, query, s) if err != nil { return fmt.Errorf("postgres: querying schema %q indexes: %w", s.Name, err) } defer rows.Close() if err := i.addIndexes(s, rows); err != nil { return err } return rows.Err() } // addIndexes scans the rows and adds the indexes to the table. func (i *inspect) addIndexes(s *schema.Schema, rows *sql.Rows) error { names := make(map[string]*schema.Index) for rows.Next() { var ( uniq, primary, included bool table, name, typ string desc, nullsfirst, nullslast sql.NullBool column, contype, pred, expr, comment, options sql.NullString ) if err := rows.Scan(&table, &name, &typ, &column, &included, &primary, &uniq, &contype, &pred, &expr, &desc, &nullsfirst, &nullslast, &comment, &options); err != nil { return fmt.Errorf("postgres: scanning indexes for schema %q: %w", s.Name, err) } t, ok := s.Table(table) if !ok { return fmt.Errorf("table %q was not found in schema", table) } idx, ok := names[name] if !ok { idx = &schema.Index{ Name: name, Unique: uniq, Table: t, Attrs: []schema.Attr{ &IndexType{T: typ}, }, } if sqlx.ValidString(comment) { idx.Attrs = append(idx.Attrs, &schema.Comment{Text: comment.String}) } if sqlx.ValidString(contype) { idx.Attrs = append(idx.Attrs, &ConType{T: contype.String}) } if sqlx.ValidString(pred) { idx.Attrs = append(idx.Attrs, &IndexPredicate{P: pred.String}) } if sqlx.ValidString(options) { p, err := newIndexStorage(options.String) if err != nil { return err } idx.Attrs = append(idx.Attrs, p) } names[name] = idx if primary { t.PrimaryKey = idx } else { t.Indexes = append(t.Indexes, idx) } } part := &schema.IndexPart{SeqNo: len(idx.Parts) + 1, Desc: desc.Bool} if nullsfirst.Bool || nullslast.Bool { part.Attrs = append(part.Attrs, &IndexColumnProperty{ NullsFirst: nullsfirst.Bool, NullsLast: nullslast.Bool, }) } switch { case included: c, ok := t.Column(column.String) if !ok { return fmt.Errorf("postgres: INCLUDE column %q was not found for index %q", column.String, idx.Name) } var include IndexInclude sqlx.Has(idx.Attrs, &include) include.Columns = append(include.Columns, c) schema.ReplaceOrAppend(&idx.Attrs, &include) case sqlx.ValidString(column): part.C, ok = t.Column(column.String) if !ok { return fmt.Errorf("postgres: column %q was not found for index %q", column.String, idx.Name) } part.C.Indexes = append(part.C.Indexes, idx) idx.Parts = append(idx.Parts, part) case sqlx.ValidString(expr): part.X = &schema.RawExpr{ X: expr.String, } idx.Parts = append(idx.Parts, part) default: return fmt.Errorf("postgres: invalid part for index %q", idx.Name) } } return nil } // partitions builds the partition each table in the schema. func (i *inspect) partitions(s *schema.Schema) error { for _, t := range s.Tables { var d Partition if !sqlx.Has(t.Attrs, &d) { continue } switch s := strings.ToLower(d.start); s { case "r": d.T = PartitionTypeRange case "l": d.T = PartitionTypeList case "h": d.T = PartitionTypeHash default: return fmt.Errorf("postgres: unexpected partition strategy %q", s) } idxs := strings.Split(strings.TrimSpace(d.attrs), " ") if len(idxs) == 0 { return fmt.Errorf("postgres: no columns/expressions were found in partition key for column %q", t.Name) } for i := range idxs { switch idx, err := strconv.Atoi(idxs[i]); { case err != nil: return fmt.Errorf("postgres: faild parsing partition key index %q", idxs[i]) // An expression. case idx == 0: j := sqlx.ExprLastIndex(d.exprs) if j == -1 { return fmt.Errorf("postgres: no expression found in partition key: %q", d.exprs) } d.Parts = append(d.Parts, &PartitionPart{ X: &schema.RawExpr{X: d.exprs[:j+1]}, }) d.exprs = strings.TrimPrefix(d.exprs[j+1:], ", ") // A column at index idx-1. default: if idx > len(t.Columns) { return fmt.Errorf("postgres: unexpected column index %d", idx) } d.Parts = append(d.Parts, &PartitionPart{ C: t.Columns[idx-1], }) } } schema.ReplaceOrAppend(&t.Attrs, &d) } return nil } // fks queries and appends the foreign keys of the given table. func (i *inspect) fks(ctx context.Context, s *schema.Schema) error { rows, err := i.querySchema(ctx, fksQuery, s) if err != nil { return fmt.Errorf("postgres: querying schema %q foreign keys: %w", s.Name, err) } defer rows.Close() if err := sqlx.SchemaFKs(s, rows); err != nil { return fmt.Errorf("postgres: %w", err) } return rows.Err() } // checks queries and appends the check constraints of the given table. func (i *inspect) checks(ctx context.Context, s *schema.Schema) error { rows, err := i.querySchema(ctx, checksQuery, s) if err != nil { return fmt.Errorf("postgres: querying schema %q check constraints: %w", s.Name, err) } defer rows.Close() if err := i.addChecks(s, rows); err != nil { return err } return rows.Err() } // addChecks scans the rows and adds the checks to the table. func (i *inspect) addChecks(s *schema.Schema, rows *sql.Rows) error { names := make(map[string]*schema.Check) for rows.Next() { var ( noInherit bool table, name, column, clause, indexes string ) if err := rows.Scan(&table, &name, &clause, &column, &indexes, &noInherit); err != nil { return fmt.Errorf("postgres: scanning check: %w", err) } t, ok := s.Table(table) if !ok { return fmt.Errorf("table %q was not found in schema", table) } if _, ok := t.Column(column); !ok { return fmt.Errorf("postgres: column %q was not found for check %q", column, name) } check, ok := names[name] if !ok { check = &schema.Check{Name: name, Expr: clause, Attrs: []schema.Attr{&CheckColumns{}}} if noInherit { check.Attrs = append(check.Attrs, &NoInherit{}) } names[name] = check t.Attrs = append(t.Attrs, check) } c := check.Attrs[0].(*CheckColumns) c.Columns = append(c.Columns, column) } return nil } // schemas returns the list of the schemas in the database. func (i *inspect) schemas(ctx context.Context, opts *schema.InspectRealmOption) ([]*schema.Schema, error) { var ( args []any query = schemasQuery ) if opts != nil { switch n := len(opts.Schemas); { case n == 1 && opts.Schemas[0] == "": query = fmt.Sprintf(schemasQueryArgs, "= CURRENT_SCHEMA()") case n == 1 && opts.Schemas[0] != "": query = fmt.Sprintf(schemasQueryArgs, "= $1") args = append(args, opts.Schemas[0]) case n > 0: query = fmt.Sprintf(schemasQueryArgs, "IN ("+nArgs(0, len(opts.Schemas))+")") for _, s := range opts.Schemas { args = append(args, s) } } } rows, err := i.QueryContext(ctx, query, args...) if err != nil { return nil, fmt.Errorf("postgres: querying schemas: %w", err) } defer rows.Close() var schemas []*schema.Schema for rows.Next() { var name string if err := rows.Scan(&name); err != nil { return nil, err } schemas = append(schemas, &schema.Schema{ Name: name, }) } if err := rows.Close(); err != nil { return nil, err } return schemas, nil } func (i *inspect) querySchema(ctx context.Context, query string, s *schema.Schema) (*sql.Rows, error) { args := []any{s.Name} for _, t := range s.Tables { args = append(args, t.Name) } return i.QueryContext(ctx, fmt.Sprintf(query, nArgs(1, len(s.Tables))), args...) } func nArgs(start, n int) string { var b strings.Builder for i := 1; i <= n; i++ { if i > 1 { b.WriteString(", ") } b.WriteByte('$') b.WriteString(strconv.Itoa(start + i)) } return b.String() } var reNextval = regexp.MustCompile(`(?i) *nextval\('(?:[\w$]+\.)*([\w$]+_[\w$]+_seq)'(?:::regclass)*\) *$`) func defaultExpr(c *schema.Column, s string) { switch m := reNextval.FindStringSubmatch(s); { // The definition of " " is equivalent to specifying: // " NOT NULL DEFAULT nextval('
__seq')". // https://postgresql.org/docs/current/datatype-numeric.html#DATATYPE-SERIAL. case len(m) == 2: tt, ok := c.Type.Type.(*schema.IntegerType) if !ok { return } st := &SerialType{SequenceName: m[1]} st.SetType(tt) c.Type.Raw = st.T c.Type.Type = st case sqlx.IsLiteralBool(s), sqlx.IsLiteralNumber(s), sqlx.IsQuoted(s, '\''): c.Default = &schema.Literal{V: s} default: var x schema.Expr = &schema.RawExpr{X: s} // Try casting or fallback to raw expressions (e.g. column text[] has the default of '{}':text[]). if v, ok := canConvert(c.Type, s); ok { x = &schema.Literal{V: v} } c.Default = x } } func canConvert(t *schema.ColumnType, x string) (string, bool) { i := strings.LastIndex(x, "::") if i == -1 || !sqlx.IsQuoted(x[:i], '\'') { return "", false } q := x[0:i] x = x[1 : i-1] switch t.Type.(type) { case *enumType: return q, true case *schema.BoolType: if sqlx.IsLiteralBool(x) { return x, true } case *schema.DecimalType, *schema.IntegerType, *schema.FloatType: if sqlx.IsLiteralNumber(x) { return x, true } case *ArrayType, *schema.BinaryType, *schema.JSONType, *NetworkType, *schema.SpatialType, *schema.StringType, *schema.TimeType, *UUIDType, *XMLType: return q, true } return "", false } type ( // CType describes the character classification setting (LC_CTYPE). CType struct { schema.Attr V string } // UserDefinedType defines a user-defined type attribute. UserDefinedType struct { schema.Type T string } // enumType represents an enum type. It serves aa intermediate representation of a Postgres enum type, // to temporary save TypeID and TypeName of an enum column until the enum values can be extracted. enumType struct { schema.Type T string // Type name. Schema string // Optional schema name. ID int64 // Type id. Values []string } // ArrayType defines an array type. // https://postgresql.org/docs/current/arrays.html ArrayType struct { schema.Type // Underlying items type (e.g. varchar(255)). T string // Formatted type (e.g. int[]). } // BitType defines a bit type. // https://postgresql.org/docs/current/datatype-bit.html BitType struct { schema.Type T string Len int64 } // IntervalType defines an interval type. // https://postgresql.org/docs/current/datatype-datetime.html IntervalType struct { schema.Type T string // Type name. F string // Optional field. YEAR, MONTH, ..., MINUTE TO SECOND. Precision *int // Optional precision. } // A NetworkType defines a network type. // https://postgresql.org/docs/current/datatype-net-types.html NetworkType struct { schema.Type T string Len int64 } // A CurrencyType defines a currency type. CurrencyType struct { schema.Type T string } // A SerialType defines a serial type. // https://postgresql.org/docs/current/datatype-numeric.html#DATATYPE-SERIAL SerialType struct { schema.Type T string Precision int // SequenceName holds the inspected sequence name attached to the column. // It defaults to
__seq when the column is created, but may // be different in case the table or the column was renamed. SequenceName string } // A UUIDType defines a UUID type. UUIDType struct { schema.Type T string } // A XMLType defines an XML type. XMLType struct { schema.Type T string } // ConType describes constraint type. // https://postgresql.org/docs/current/catalog-pg-constraint.html ConType struct { schema.Attr T string // c, f, p, u, t, x. } // Sequence defines (the supported) sequence options. // https://postgresql.org/docs/current/sql-createsequence.html Sequence struct { Start, Increment int64 // Last sequence value written to disk. // https://postgresql.org/docs/current/view-pg-sequences.html. Last int64 } // Identity defines an identity column. Identity struct { schema.Attr Generation string // ALWAYS, BY DEFAULT. Sequence *Sequence } // IndexType represents an index type. // https://postgresql.org/docs/current/indexes-types.html IndexType struct { schema.Attr T string // BTREE, BRIN, HASH, GiST, SP-GiST, GIN. } // IndexPredicate describes a partial index predicate. // https://postgresql.org/docs/current/catalog-pg-index.html IndexPredicate struct { schema.Attr P string } // IndexColumnProperty describes an index column property. // https://postgresql.org/docs/current/functions-info.html#FUNCTIONS-INFO-INDEX-COLUMN-PROPS IndexColumnProperty struct { schema.Attr // NullsFirst defaults to true for DESC indexes. NullsFirst bool // NullsLast defaults to true for ASC indexes. NullsLast bool } // IndexStorageParams describes index storage parameters add with the WITH clause. // https://postgresql.org/docs/current/sql-createindex.html#SQL-CREATEINDEX-STORAGE-PARAMETERS IndexStorageParams struct { schema.Attr // AutoSummarize defines the authsummarize storage parameter. AutoSummarize bool // PagesPerRange defines pages_per_range storage // parameter for BRIN indexes. Defaults to 128. PagesPerRange int64 } // IndexInclude describes the INCLUDE clause allows specifying // a list of column which added to the index as non-key columns. // https://www.postgresql.org/docs/current/sql-createindex.html IndexInclude struct { schema.Attr Columns []*schema.Column } // Concurrently describes the CONCURRENTLY clause to instruct Postgres // to build or drop the index concurrently without blocking the current table. // https://www.postgresql.org/docs/current/sql-createindex.html#SQL-CREATEINDEX-CONCURRENTLY Concurrently struct { schema.Attr } // NoInherit attribute defines the NO INHERIT flag for CHECK constraint. // https://postgresql.org/docs/current/catalog-pg-constraint.html NoInherit struct { schema.Attr } // CheckColumns attribute hold the column named used by the CHECK constraints. // This attribute is added on inspection for internal usage and has no meaning // on migration. CheckColumns struct { schema.Attr Columns []string } // Partition defines the spec of a partitioned table. Partition struct { schema.Attr // T defines the type/strategy of the partition. // Can be one of: RANGE, LIST, HASH. T string // Partition parts. The additional attributes // on each part can be used to control collation. Parts []*PartitionPart // Internal info returned from pg_partitioned_table. start, attrs, exprs string } // An PartitionPart represents an index part that // can be either an expression or a column. PartitionPart struct { X schema.Expr C *schema.Column Attrs []schema.Attr } ) // IsUnique reports if the type is unique constraint. func (c ConType) IsUnique() bool { return strings.ToLower(c.T) == "u" } // IntegerType returns the underlying integer type this serial type represents. func (s *SerialType) IntegerType() *schema.IntegerType { t := &schema.IntegerType{T: TypeInteger} switch s.T { case TypeSerial2, TypeSmallSerial: t.T = TypeSmallInt case TypeSerial8, TypeBigSerial: t.T = TypeBigInt } return t } // SetType sets the serial type from the given integer type. func (s *SerialType) SetType(t *schema.IntegerType) { switch t.T { case TypeSmallInt, TypeInt2: s.T = TypeSmallSerial case TypeInteger, TypeInt4, TypeInt: s.T = TypeSerial case TypeBigInt, TypeInt8: s.T = TypeBigSerial } } // sequence returns the inspected name of the sequence // or the standard name defined by postgres. func (s *SerialType) sequence(t *schema.Table, c *schema.Column) string { if s.SequenceName != "" { return s.SequenceName } return fmt.Sprintf("%s_%s_seq", t.Name, c.Name) } // newIndexStorage parses and returns the index storage parameters. func newIndexStorage(opts string) (*IndexStorageParams, error) { params := &IndexStorageParams{} for _, p := range strings.Split(strings.Trim(opts, "{}"), ",") { kv := strings.Split(p, "=") if len(kv) != 2 { return nil, fmt.Errorf("invalid index storage parameter: %s", p) } switch kv[0] { case "autosummarize": b, err := strconv.ParseBool(kv[1]) if err != nil { return nil, fmt.Errorf("failed parsing autosummarize %q: %w", kv[1], err) } params.AutoSummarize = b case "pages_per_range": i, err := strconv.ParseInt(kv[1], 10, 64) if err != nil { return nil, fmt.Errorf("failed parsing pages_per_range %q: %w", kv[1], err) } params.PagesPerRange = i } } return params, nil } // reEnumType extracts the enum type and an option schema qualifier. var reEnumType = regexp.MustCompile(`^(?:(".+"|\w+)\.)?(".+"|\w+)$`) func newEnumType(t string, id int64) *enumType { var ( e = &enumType{T: t, ID: id} parts = reEnumType.FindStringSubmatch(e.T) r = func(s string) string { s = strings.ReplaceAll(s, `""`, `"`) if len(s) > 1 && s[0] == '"' && s[len(s)-1] == '"' { s = s[1 : len(s)-1] } return s } ) if len(parts) > 1 { e.Schema = r(parts[1]) } if len(parts) > 2 { e.T = r(parts[2]) } return e } const ( // Query to list runtime parameters. paramsQuery = `SELECT setting FROM pg_settings WHERE name IN ('lc_collate', 'lc_ctype', 'server_version_num', 'crdb_version') ORDER BY name DESC` // Query to list database schemas. schemasQuery = "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT IN ('information_schema', 'pg_catalog', 'pg_toast', 'crdb_internal', 'pg_extension') AND schema_name NOT LIKE 'pg_%temp_%' ORDER BY schema_name" // Query to list specific database schemas. schemasQueryArgs = "SELECT schema_name FROM information_schema.schemata WHERE schema_name %s ORDER BY schema_name" // Query to list table information. tablesQuery = ` SELECT t1.table_schema, t1.table_name, pg_catalog.obj_description(t3.oid, 'pg_class') AS comment, t4.partattrs AS partition_attrs, t4.partstrat AS partition_strategy, pg_get_expr(t4.partexprs, t4.partrelid) AS partition_exprs FROM INFORMATION_SCHEMA.TABLES AS t1 JOIN pg_catalog.pg_namespace AS t2 ON t2.nspname = t1.table_schema JOIN pg_catalog.pg_class AS t3 ON t3.relnamespace = t2.oid AND t3.relname = t1.table_name LEFT JOIN pg_catalog.pg_partitioned_table AS t4 ON t4.partrelid = t3.oid WHERE t1.table_type = 'BASE TABLE' AND NOT COALESCE(t3.relispartition, false) AND t1.table_schema IN (%s) ORDER BY t1.table_schema, t1.table_name ` tablesQueryArgs = ` SELECT t1.table_schema, t1.table_name, pg_catalog.obj_description(t3.oid, 'pg_class') AS comment, t4.partattrs AS partition_attrs, t4.partstrat AS partition_strategy, pg_get_expr(t4.partexprs, t4.partrelid) AS partition_exprs FROM INFORMATION_SCHEMA.TABLES AS t1 JOIN pg_catalog.pg_namespace AS t2 ON t2.nspname = t1.table_schema JOIN pg_catalog.pg_class AS t3 ON t3.relnamespace = t2.oid AND t3.relname = t1.table_name LEFT JOIN pg_catalog.pg_partitioned_table AS t4 ON t4.partrelid = t3.oid WHERE t1.table_type = 'BASE TABLE' AND NOT COALESCE(t3.relispartition, false) AND t1.table_schema IN (%s) AND t1.table_name IN (%s) ORDER BY t1.table_schema, t1.table_name ` // Query to list table columns. columnsQuery = ` SELECT t1.table_name, t1.column_name, t1.data_type, pg_catalog.format_type(a.atttypid, a.atttypmod) AS format_type, t1.is_nullable, t1.column_default, t1.character_maximum_length, t1.numeric_precision, t1.datetime_precision, t1.numeric_scale, t1.interval_type, t1.character_set_name, t1.collation_name, t1.is_identity, t1.identity_start, t1.identity_increment, (CASE WHEN t1.is_identity = 'YES' THEN (SELECT last_value FROM pg_sequences WHERE quote_ident(schemaname) || '.' || quote_ident(sequencename) = pg_get_serial_sequence(quote_ident(t1.table_schema) || '.' || quote_ident(t1.table_name), t1.column_name)) END) AS identity_last, t1.identity_generation, t1.generation_expression, col_description(t3.oid, "ordinal_position") AS comment, t4.typtype, t4.typelem, (CASE WHEN t4.typcategory = 'A' AND t4.typelem <> 0 THEN (SELECT t.typtype FROM pg_catalog.pg_type t WHERE t.oid = t4.typelem) END) AS elemtyp, t4.oid FROM "information_schema"."columns" AS t1 JOIN pg_catalog.pg_namespace AS t2 ON t2.nspname = t1.table_schema JOIN pg_catalog.pg_class AS t3 ON t3.relnamespace = t2.oid AND t3.relname = t1.table_name JOIN pg_catalog.pg_attribute AS a ON a.attrelid = t3.oid AND a.attname = t1.column_name LEFT JOIN pg_catalog.pg_type AS t4 ON t1.udt_name = t4.typname AND t4.typnamespace = t2.oid WHERE t1.table_schema = $1 AND t1.table_name IN (%s) ORDER BY t1.table_name, t1.ordinal_position ` fksQuery = ` SELECT t1.constraint_name, t1.table_name, t2.column_name, t1.table_schema, t3.table_name AS referenced_table_name, t3.column_name AS referenced_column_name, t3.table_schema AS referenced_schema_name, t4.update_rule, t4.delete_rule FROM information_schema.table_constraints t1 JOIN information_schema.key_column_usage t2 ON t1.constraint_name = t2.constraint_name AND t1.table_schema = t2.constraint_schema JOIN information_schema.constraint_column_usage t3 ON t1.constraint_name = t3.constraint_name AND t1.table_schema = t3.constraint_schema JOIN information_schema.referential_constraints t4 ON t1.constraint_name = t4.constraint_name AND t1.table_schema = t4.constraint_schema WHERE t1.constraint_type = 'FOREIGN KEY' AND t1.table_schema = $1 AND t1.table_name IN (%s) ORDER BY t1.constraint_name, t2.ordinal_position ` // Query to list table check constraints. checksQuery = ` SELECT rel.relname AS table_name, t1.conname AS constraint_name, pg_get_expr(t1.conbin, t1.conrelid) as expression, t2.attname as column_name, t1.conkey as column_indexes, t1.connoinherit as no_inherit FROM pg_constraint t1 JOIN pg_attribute t2 ON t2.attrelid = t1.conrelid AND t2.attnum = ANY (t1.conkey) JOIN pg_class rel ON rel.oid = t1.conrelid JOIN pg_namespace nsp ON nsp.oid = t1.connamespace WHERE t1.contype = 'c' AND nsp.nspname = $1 AND rel.relname IN (%s) ORDER BY t1.conname, array_position(t1.conkey, t2.attnum) ` ) var ( indexesQuery = fmt.Sprintf(indexesQueryTmpl, "(a.attname <> '' AND idx.indnatts > idx.indnkeyatts AND idx.ord > idx.indnkeyatts)", "%s") indexesQueryNoInclude = fmt.Sprintf(indexesQueryTmpl, "false", "%s") indexesQueryTmpl = ` SELECT t.relname AS table_name, i.relname AS index_name, am.amname AS index_type, a.attname AS column_name, %s AS included, idx.indisprimary AS primary, idx.indisunique AS unique, c.contype AS constraint_type, pg_get_expr(idx.indpred, idx.indrelid) AS predicate, pg_get_indexdef(idx.indexrelid, idx.ord, false) AS expression, pg_index_column_has_property(idx.indexrelid, idx.ord, 'desc') AS desc, pg_index_column_has_property(idx.indexrelid, idx.ord, 'nulls_first') AS nulls_first, pg_index_column_has_property(idx.indexrelid, idx.ord, 'nulls_last') AS nulls_last, obj_description(i.oid, 'pg_class') AS comment, i.reloptions AS options FROM ( select *, generate_series(1,array_length(i.indkey,1)) as ord, unnest(i.indkey) AS key from pg_index i ) idx JOIN pg_class i ON i.oid = idx.indexrelid JOIN pg_class t ON t.oid = idx.indrelid JOIN pg_namespace n ON n.oid = t.relnamespace LEFT JOIN pg_constraint c ON idx.indexrelid = c.conindid LEFT JOIN pg_attribute a ON (a.attrelid, a.attnum) = (idx.indrelid, idx.key) JOIN pg_am am ON am.oid = i.relam WHERE n.nspname = $1 AND t.relname IN (%s) AND COALESCE(c.contype, '') <> 'f' ORDER BY table_name, index_name, idx.ord ` ) atlas-0.7.2/sql/postgres/inspect_test.go000066400000000000000000001654471431455511600203520ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "fmt" "testing" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) // Single table queries used by the different tests. var ( queryFKs = sqltest.Escape(fmt.Sprintf(fksQuery, "$2")) queryTables = sqltest.Escape(fmt.Sprintf(tablesQuery, "$1")) queryChecks = sqltest.Escape(fmt.Sprintf(checksQuery, "$2")) queryColumns = sqltest.Escape(fmt.Sprintf(columnsQuery, "$2")) queryCrdbColumns = sqltest.Escape(fmt.Sprintf(crdbColumnsQuery, "$2")) queryIndexes = sqltest.Escape(fmt.Sprintf(indexesQuery, "$2")) queryCrdbIndexes = sqltest.Escape(fmt.Sprintf(crdbIndexesQuery, "$2")) ) func TestDriver_InspectTable(t *testing.T) { tests := []struct { name string before func(mock) expect func(*require.Assertions, *schema.Table, error) }{ { name: "column types", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | column_name | data_type | formatted | is_nullable | column_default | character_maximum_length | numeric_precision | datetime_precision | numeric_scale | interval_type | character_set_name | collation_name | is_identity | identity_start | identity_increment | identity_last | identity_generation | generation_expression | comment | typtype | typelem | elemtyp | oid -------------+--------------+-----------------------------+---------------------|-------------+----------------------------------------+--------------------------+-------------------+--------------------+---------------+---------------------+--------------------+----------------+-------------+----------------+--------------------+------------------+---------------------+-----------------------+---------+---------+---------+---------+------- users | id | bigint | int8 | NO | | | 64 | | 0 | | | | YES | 100 | 1 | 1 | BY DEFAULT | | | b | | | 20 users | rank | integer | int4 | YES | | | 32 | | 0 | | | | NO | | | | | | rank | b | | | 23 users | c1 | smallint | int2 | NO | 1000 | | 16 | | 0 | | | | NO | | | | | | | b | | | 21 users | c2 | bit | bit | NO | | 1 | | | | | | | NO | | | | | | | b | | | 1560 users | c3 | bit varying | varbit | NO | | 10 | | | | | | | NO | | | | | | | b | | | 1562 users | c4 | boolean | bool | NO | | | | | | | | | NO | | | | | | | b | | | 16 users | c5 | bytea | bytea | NO | | | | | | | | | NO | | | | | | | b | | | 17 users | c6 | character | bpchar | NO | | 100 | | | | | | | NO | | | | | | | b | | | 1042 users | c7 | character varying | varchar | NO | 'logged_in'::character varying | | | | | | | | NO | | | | | | | b | | | 1043 users | c8 | cidr | cidr | NO | | | | | | | | | NO | | | | | | | b | | | 650 users | c9 | circle | circle | NO | | | | | | | | | NO | | | | | | | b | | | 718 users | c10 | date | date | NO | | | | | | | | | NO | | | | | | | b | | | 1082 users | c11 | time with time zone | timetz | NO | | | | | | | | | NO | | | | | | | b | | | 1266 users | c12 | double precision | float8 | NO | | | 53 | | | | | | NO | | | | | | | b | | | 701 users | c13 | real | float4 | NO | random() | | 24 | | | | | | NO | | | | | | | b | | | 700 users | c14 | json | json | NO | '{}'::json | | | | | | | | NO | | | | | | | b | | | 114 users | c15 | jsonb | jsonb | NO | '{}'::jsonb | | | | | | | | NO | | | | | | | b | | | 3802 users | c16 | money | money | NO | | | | | | | | | NO | | | | | | | b | | | 790 users | c17 | numeric | numeric | NO | | | | | | | | | NO | | | | | | | b | | | 1700 users | c18 | numeric | numeric | NO | | | 4 | | 4 | | | | NO | | | | | | | b | | | 1700 users | c19 | integer | int4 | NO | nextval('t1_c19_seq'::regclass) | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 users | c20 | uuid | uuid | NO | | | | | | | | | NO | | | | | | | b | | | 2950 users | c21 | xml | xml | NO | | | | | | | | | NO | | | | | | | b | | | 142 users | c22 | ARRAY | integer[] | YES | | | | | | | | | NO | | | | | | | b | | | 1007 users | c23 | USER-DEFINED | ltree | YES | | | | | | | | | NO | | | | | | | b | | | 16535 users | c24 | USER-DEFINED | state | NO | | | | | | | | | NO | | | | | | | e | | | 16774 users | c25 | timestamp without time zone | timestamp | NO | now() | | | 4 | | | | | NO | | | | | | | b | | | 1114 users | c26 | timestamp with time zone | timestamptz | NO | | | | 6 | | | | | NO | | | | | | | b | | | 1184 users | c27 | time without time zone | time | NO | | | | 6 | | | | | NO | | | | | | | b | | | 1266 users | c28 | int | int8 | NO | | | | 6 | | | | | NO | | | | | (c1 + c2) | | b | | | 1267 users | c29 | interval | interval | NO | | | | 6 | | | | | NO | | | | | | | b | | | 1268 users | c30 | interval | interval | NO | | | | 6 | | MONTH | | | NO | | | | | | | b | | | 1269 users | c31 | interval | interval | NO | | | | 6 | | MINUTE TO SECOND(6) | | | NO | | | | | | | b | | | 1233 users | c32 | bigint | int4 | NO | nextval('public.t1_c32_seq'::regclass) | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 users | c33 | USER-DEFINED | test."status""." | NO | 'unknown'::test."status""." | | | | | | | | NO | | | | | | | e | | | 16775 users | c34 | ARRAY | state[] | NO | | | | | | | | | NO | | | | | | | b | 16774 | e | 16779 `)) m.ExpectQuery(sqltest.Escape(`SELECT enumtypid, enumlabel FROM pg_enum WHERE enumtypid IN ($1, $2)`)). WithArgs(16774, 16775). WillReturnRows(sqltest.Rows(` enumtypid | enumlabel -----------+----------- 16774 | on 16774 | off 16775 | unknown `)) m.noIndexes() m.noFKs() m.noChecks() }, expect: func(require *require.Assertions, t *schema.Table, err error) { p := func(i int) *int { return &i } require.NoError(err) require.Equal("users", t.Name) require.EqualValues([]*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{Generation: "BY DEFAULT", Sequence: &Sequence{Start: 100, Increment: 1, Last: 1}}}}, {Name: "rank", Type: &schema.ColumnType{Raw: "integer", Null: true, Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&schema.Comment{Text: "rank"}}}, {Name: "c1", Type: &schema.ColumnType{Raw: "smallint", Type: &schema.IntegerType{T: "smallint"}}, Default: &schema.Literal{V: "1000"}}, {Name: "c2", Type: &schema.ColumnType{Raw: "bit", Type: &BitType{T: "bit", Len: 1}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "bit varying", Type: &BitType{T: "bit varying", Len: 10}}}, {Name: "c4", Type: &schema.ColumnType{Raw: "boolean", Type: &schema.BoolType{T: "boolean"}}}, {Name: "c5", Type: &schema.ColumnType{Raw: "bytea", Type: &schema.BinaryType{T: "bytea"}}}, {Name: "c6", Type: &schema.ColumnType{Raw: "character", Type: &schema.StringType{T: "character", Size: 100}}}, {Name: "c7", Type: &schema.ColumnType{Raw: "character varying", Type: &schema.StringType{T: "character varying"}}, Default: &schema.Literal{V: "'logged_in'"}}, {Name: "c8", Type: &schema.ColumnType{Raw: "cidr", Type: &NetworkType{T: "cidr"}}}, {Name: "c9", Type: &schema.ColumnType{Raw: "circle", Type: &schema.SpatialType{T: "circle"}}}, {Name: "c10", Type: &schema.ColumnType{Raw: "date", Type: &schema.TimeType{T: "date"}}}, {Name: "c11", Type: &schema.ColumnType{Raw: "time with time zone", Type: &schema.TimeType{T: "time with time zone", Precision: p(0)}}}, {Name: "c12", Type: &schema.ColumnType{Raw: "double precision", Type: &schema.FloatType{T: "double precision", Precision: 53}}}, {Name: "c13", Type: &schema.ColumnType{Raw: "real", Type: &schema.FloatType{T: "real", Precision: 24}}, Default: &schema.RawExpr{X: "random()"}}, {Name: "c14", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}, Default: &schema.Literal{V: "'{}'"}}, {Name: "c15", Type: &schema.ColumnType{Raw: "jsonb", Type: &schema.JSONType{T: "jsonb"}}, Default: &schema.Literal{V: "'{}'"}}, {Name: "c16", Type: &schema.ColumnType{Raw: "money", Type: &CurrencyType{T: "money"}}}, {Name: "c17", Type: &schema.ColumnType{Raw: "numeric", Type: &schema.DecimalType{T: "numeric"}}}, {Name: "c18", Type: &schema.ColumnType{Raw: "numeric", Type: &schema.DecimalType{T: "numeric", Precision: 4, Scale: 4}}}, {Name: "c19", Type: &schema.ColumnType{Raw: "serial", Type: &SerialType{T: "serial", SequenceName: "t1_c19_seq"}}}, {Name: "c20", Type: &schema.ColumnType{Raw: "uuid", Type: &UUIDType{T: "uuid"}}}, {Name: "c21", Type: &schema.ColumnType{Raw: "xml", Type: &XMLType{T: "xml"}}}, {Name: "c22", Type: &schema.ColumnType{Raw: "ARRAY", Null: true, Type: &ArrayType{Type: &schema.IntegerType{T: "integer"}, T: "integer[]"}}}, {Name: "c23", Type: &schema.ColumnType{Raw: "USER-DEFINED", Null: true, Type: &UserDefinedType{T: "ltree"}}}, {Name: "c24", Type: &schema.ColumnType{Raw: "state", Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}, Schema: t.Schema}}}, {Name: "c25", Type: &schema.ColumnType{Raw: "timestamp without time zone", Type: &schema.TimeType{T: "timestamp without time zone", Precision: p(4)}}, Default: &schema.RawExpr{X: "now()"}}, {Name: "c26", Type: &schema.ColumnType{Raw: "timestamp with time zone", Type: &schema.TimeType{T: "timestamp with time zone", Precision: p(6)}}}, {Name: "c27", Type: &schema.ColumnType{Raw: "time without time zone", Type: &schema.TimeType{T: "time without time zone", Precision: p(6)}}}, {Name: "c28", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{&schema.GeneratedExpr{Expr: "(c1 + c2)"}}}, {Name: "c29", Type: &schema.ColumnType{Raw: "interval", Type: &IntervalType{T: "interval", Precision: p(6)}}}, {Name: "c30", Type: &schema.ColumnType{Raw: "interval", Type: &IntervalType{T: "interval", F: "MONTH", Precision: p(6)}}}, {Name: "c31", Type: &schema.ColumnType{Raw: "interval", Type: &IntervalType{T: "interval", F: "MINUTE TO SECOND", Precision: p(6)}}}, {Name: "c32", Type: &schema.ColumnType{Raw: "bigserial", Type: &SerialType{T: "bigserial", SequenceName: "t1_c32_seq"}}}, {Name: "c33", Type: &schema.ColumnType{Raw: `status".`, Type: &schema.EnumType{T: `status".`, Values: []string{"unknown"}, Schema: schema.New("test")}}, Default: &schema.Literal{V: "'unknown'"}}, {Name: "c34", Type: &schema.ColumnType{Raw: "ARRAY", Type: &ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}, Schema: t.Schema}}}}, }, t.Columns) }, }, { name: "table indexes", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | column_name | data_type | formatted | is_nullable | column_default | character_maximum_length | numeric_precision | datetime_precision | numeric_scale | interval_type | character_set_name | collation_name | is_identity | identity_start | identity_increment | identity_last | identity_generation | generation_expression | comment | typtype | typelem | elemtyp | oid -----------+-------------+---------------------+-----------+--------------+---------------------------------+--------------------------+-------------------+--------------------+---------------+---------------+--------------------+----------------+-------------+----------------+--------------------+------------------+---------------------+-----------------------+---------+---------+---------+---------+------- users | id | bigint | int8 | NO | | | 64 | | 0 | | | | NO | | | | | | | b | | | 20 users | c1 | smallint | int2 | NO | | | 16 | | 0 | | | | NO | | | | | | | b | | | 21 users | parent_id | bigint | int8 | YES | | | 64 | | 0 | | | | NO | | | | | | | b | | | 22 `)) m.ExpectQuery(queryIndexes). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | index_name | index_type | column_name | included | primary | unique | constraint_type | predicate | expression | desc | nulls_first | nulls_last | comment | options ----------------+-----------------+-------------+-------------+----------+---------+--------+-----------------+-----------------------+---------------------------+------+-------------+------------+-----------+----------- users | idx | hash | | f | f | f | | | "left"((c11)::text, 100) | t | t | f | boring | users | idx1 | btree | | f | f | f | | (id <> NULL::integer) | "left"((c11)::text, 100) | t | t | f | | users | t1_c1_key | btree | c1 | f | f | t | u | | c1 | t | t | f | | users | t1_pkey | btree | id | f | t | t | p | | id | t | f | f | | users | idx4 | btree | c1 | f | f | t | | | c1 | f | f | f | | users | idx4 | btree | id | f | f | t | | | id | f | f | t | | users | idx5 | btree | c1 | f | f | t | | | c1 | f | f | f | | users | idx5 | btree | | f | f | t | | | coalesce(parent_id, 0) | f | f | f | | users | idx6 | brin | c1 | f | f | t | | | | f | f | f | | {autosummarize=true,pages_per_range=2} users | idx2 | btree | | f | f | f | | | ((c * 2)) | f | f | t | | users | idx2 | btree | c1 | f | f | f | | | c | f | f | t | | users | idx2 | btree | id | f | f | f | | | d | f | f | t | | users | idx2 | btree | c1 | t | f | f | | | c | | | | | users | idx2 | btree | parent_id | t | f | f | | | d | | | | | `)) m.noFKs() m.noChecks() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) columns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "c1", Type: &schema.ColumnType{Raw: "smallint", Type: &schema.IntegerType{T: "smallint"}}}, {Name: "parent_id", Type: &schema.ColumnType{Raw: "bigint", Null: true, Type: &schema.IntegerType{T: "bigint"}}}, } indexes := []*schema.Index{ {Name: "idx", Table: t, Attrs: []schema.Attr{&IndexType{T: "hash"}, &schema.Comment{Text: "boring"}}, Parts: []*schema.IndexPart{{SeqNo: 1, X: &schema.RawExpr{X: `"left"((c11)::text, 100)`}, Desc: true, Attrs: []schema.Attr{&IndexColumnProperty{NullsFirst: true}}}}}, {Name: "idx1", Table: t, Attrs: []schema.Attr{&IndexType{T: "btree"}, &IndexPredicate{P: `(id <> NULL::integer)`}}, Parts: []*schema.IndexPart{{SeqNo: 1, X: &schema.RawExpr{X: `"left"((c11)::text, 100)`}, Desc: true, Attrs: []schema.Attr{&IndexColumnProperty{NullsFirst: true}}}}}, {Name: "t1_c1_key", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "btree"}, &ConType{T: "u"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[1], Desc: true, Attrs: []schema.Attr{&IndexColumnProperty{NullsFirst: true}}}}}, {Name: "idx4", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "btree"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[1]}, {SeqNo: 2, C: columns[0], Attrs: []schema.Attr{&IndexColumnProperty{NullsLast: true}}}}}, {Name: "idx5", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "btree"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[1]}, {SeqNo: 2, X: &schema.RawExpr{X: `coalesce(parent_id, 0)`}}}}, {Name: "idx6", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "brin"}, &IndexStorageParams{AutoSummarize: true, PagesPerRange: 2}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[1]}}}, {Name: "idx2", Unique: false, Table: t, Attrs: []schema.Attr{&IndexType{T: "btree"}, &IndexInclude{Columns: columns[1:]}}, Parts: []*schema.IndexPart{{SeqNo: 1, X: &schema.RawExpr{X: `((c * 2))`}, Attrs: []schema.Attr{&IndexColumnProperty{NullsLast: true}}}, {SeqNo: 2, C: columns[1], Attrs: []schema.Attr{&IndexColumnProperty{NullsLast: true}}}, {SeqNo: 3, C: columns[0], Attrs: []schema.Attr{&IndexColumnProperty{NullsLast: true}}}}}, } pk := &schema.Index{ Name: "t1_pkey", Unique: true, Table: t, Attrs: []schema.Attr{&IndexType{T: "btree"}, &ConType{T: "p"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[0], Desc: true}}, } columns[0].Indexes = append(columns[0].Indexes, pk, indexes[3], indexes[6]) columns[1].Indexes = indexes[2:] require.EqualValues(columns, t.Columns) require.EqualValues(indexes, t.Indexes) require.EqualValues(pk, t.PrimaryKey) }, }, { name: "fks", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | column_name | data_type | formatted | is_nullable | column_default | character_maximum_length | numeric_precision | datetime_precision | numeric_scale | interval_type | character_set_name | collation_name | is_identity | identity_start | identity_increment | identity_last | identity_generation | generation_expression | comment | typtype | typelem | elemtyp | oid -----------+-------------+---------------------+-----------+-------------+---------------------------------+--------------------------+-------------------+--------------------+---------------+---------------+--------------------+----------------+-------------+----------------+--------------------+------------------+---------------------+-----------------------+---------+---------+---------+---------+------- users | id | integer | int | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 20 users | oid | integer | int | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 21 users | uid | integer | int | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 21 `)) m.noIndexes() m.ExpectQuery(queryFKs). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` constraint_name | table_name | column_name | table_schema | referenced_table_name | referenced_column_name | referenced_schema_name | update_rule | delete_rule -----------------+------------+-------------+--------------+-----------------------+------------------------+------------------------+-------------+------------- multi_column | users | id | public | t1 | gid | public | NO ACTION | CASCADE multi_column | users | id | public | t1 | xid | public | NO ACTION | CASCADE multi_column | users | oid | public | t1 | gid | public | NO ACTION | CASCADE multi_column | users | oid | public | t1 | xid | public | NO ACTION | CASCADE self_reference | users | uid | public | users | id | public | NO ACTION | CASCADE `)) m.noChecks() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.Equal("public", t.Schema.Name) fks := []*schema.ForeignKey{ {Symbol: "multi_column", Table: t, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: &schema.Table{Name: "t1", Schema: t.Schema}, RefColumns: []*schema.Column{{Name: "gid"}, {Name: "xid"}}}, {Symbol: "self_reference", Table: t, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: t}, } columns := []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}, ForeignKeys: fks[0:1]}, {Name: "oid", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}, ForeignKeys: fks[0:1]}, {Name: "uid", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}, ForeignKeys: fks[1:2]}, } fks[0].Columns = columns[:2] fks[1].Columns = columns[2:] fks[1].RefColumns = columns[:1] require.EqualValues(columns, t.Columns) require.EqualValues(fks, t.ForeignKeys) }, }, { name: "check", before: func(m mock) { m.tableExists("public", "users", true) m.ExpectQuery(queryColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name |column_name | data_type | formatted | is_nullable | column_default | character_maximum_length | numeric_precision | datetime_precision | numeric_scale | interval_type | character_set_name | collation_name | is_identity | identity_start | identity_increment | identity_last | identity_generation | generation_expression | comment | typtype | typelem | elemtyp | oid -----------+------------+-----------+-----------+-------------+----------------+--------------------------+-------------------+--------------------+---------------+---------------+--------------------+----------------+-------------+----------------+--------------------+------------------+---------------------+-----------------------+---------+---------+---------+---------+----- users | c1 | integer | int4 | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 users | c2 | integer | int4 | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 users | c3 | integer | int4 | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 `)) m.noIndexes() m.noFKs() m.ExpectQuery(queryChecks). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | constraint_name | expression | column_name | column_indexes | no_inherit -------------+--------------------+-------------------------+-------------+----------------+---------------- users | boring | (c1 > 1) | c1 | {1} | t users | users_c2_check | (c2 > 0) | c2 | {2} | f users | users_c2_check1 | (c2 > 0) | c2 | {2} | f users | users_check | ((c2 + c1) > 2) | c2 | {2,1} | f users | users_check | ((c2 + c1) > 2) | c1 | {2,1} | f users | users_check1 | (((c2 + c1) + c3) > 10) | c2 | {2,1,3} | f users | users_check1 | (((c2 + c1) + c3) > 10) | c1 | {2,1,3} | f users | users_check1 | (((c2 + c1) + c3) > 10) | c3 | {2,1,3} | f `)) m.noChecks() }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) require.Equal("users", t.Name) require.Equal("public", t.Schema.Name) require.EqualValues([]*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}}, }, t.Columns) require.EqualValues([]schema.Attr{ &schema.Check{Name: "boring", Expr: "(c1 > 1)", Attrs: []schema.Attr{&CheckColumns{Columns: []string{"c1"}}, &NoInherit{}}}, &schema.Check{Name: "users_c2_check", Expr: "(c2 > 0)", Attrs: []schema.Attr{&CheckColumns{Columns: []string{"c2"}}}}, &schema.Check{Name: "users_c2_check1", Expr: "(c2 > 0)", Attrs: []schema.Attr{&CheckColumns{Columns: []string{"c2"}}}}, &schema.Check{Name: "users_check", Expr: "((c2 + c1) > 2)", Attrs: []schema.Attr{&CheckColumns{Columns: []string{"c2", "c1"}}}}, &schema.Check{Name: "users_check1", Expr: "(((c2 + c1) + c3) > 10)", Attrs: []schema.Attr{&CheckColumns{Columns: []string{"c2", "c1", "c3"}}}}, }, t.Attrs) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("130000") var drv migrate.Driver drv, err = Open(db) require.NoError(t, err) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= $1"))). WithArgs("public"). WillReturnRows(sqltest.Rows(` schema_name -------------------- public `)) tt.before(mk) s, err := drv.InspectSchema(context.Background(), "public", nil) require.NoError(t, err) tt.expect(require.New(t), s.Tables[0], err) }) } } func TestDriver_InspectPartitionedTable(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("130000") drv, err := Open(db) require.NoError(t, err) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= CURRENT_SCHEMA()"))). WillReturnRows(sqltest.Rows(` schema_name -------------------- public `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(tablesQuery, "$1"))). WithArgs("public"). WillReturnRows(sqltest.Rows(` table_schema | table_name | comment | partition_attrs | partition_strategy | partition_exprs --------------+-------------+---------+-----------------+--------------------+---------------------------------------------------- public | logs1 | | | | public | logs2 | | 1 | r | public | logs3 | | 2 0 0 | l | (a + b), (a + (b * 2)) `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsQuery, "$2, $3, $4"))). WithArgs("public", "logs1", "logs2", "logs3"). WillReturnRows(sqltest.Rows(` table_name |column_name | data_type | formatted | is_nullable | column_default | character_maximum_length | numeric_precision | datetime_precision | numeric_scale | interval_type | character_set_name | collation_name | is_identity | identity_start | identity_increment | identity_last | identity_generation | generation_expression | comment | typtype | typelem | elemtyp | oid -----------+------------+-----------+-----------+-------------+----------------+--------------------------+-------------------+--------------------+---------------+---------------+--------------------+----------------+-------------+----------------+--------------------+------------------+---------------------+-----------------------+---------+---------+---------+---------+----- logs1 | c1 | integer | integer | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 logs2 | c2 | integer | integer | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 logs2 | c3 | integer | integer | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 logs3 | c4 | integer | integer | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 logs3 | c5 | integer | integer | NO | | | 32 | | 0 | | | | NO | | | | | | | b | | | 23 `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexesQuery, "$2, $3, $4"))). WillReturnRows(sqlmock.NewRows([]string{"table_name", "index_name", "column_name", "primary", "unique", "constraint_type", "predicate", "expression"})) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(fksQuery, "$2, $3, $4"))). WillReturnRows(sqlmock.NewRows([]string{"constraint_name", "table_name", "column_name", "referenced_table_name", "referenced_column_name", "referenced_table_schema", "update_rule", "delete_rule"})) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(checksQuery, "$2, $3, $4"))). WillReturnRows(sqlmock.NewRows([]string{"table_name", "constraint_name", "expression", "column_name", "column_indexes"})) s, err := drv.InspectSchema(context.Background(), "", &schema.InspectOptions{}) require.NoError(t, err) t1, ok := s.Table("logs1") require.True(t, ok) require.Empty(t, t1.Attrs) t2, ok := s.Table("logs2") require.True(t, ok) require.Len(t, t2.Attrs, 1) key := t2.Attrs[0].(*Partition) require.Equal(t, PartitionTypeRange, key.T) require.Equal(t, []*PartitionPart{ {C: &schema.Column{Name: "c2", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}}}, }, key.Parts) t3, ok := s.Table("logs3") require.True(t, ok) require.Len(t, t3.Attrs, 1) key = t3.Attrs[0].(*Partition) require.Equal(t, PartitionTypeList, key.T) require.Equal(t, []*PartitionPart{ {C: &schema.Column{Name: "c5", Type: &schema.ColumnType{Raw: "integer", Type: &schema.IntegerType{T: "integer"}}}}, {X: &schema.RawExpr{X: "(a + b)"}}, {X: &schema.RawExpr{X: "(a + (b * 2))"}}, }, key.Parts) } func TestDriver_InspectCRDBSchema(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.ExpectQuery(sqltest.Escape(paramsQuery)). WillReturnRows(sqltest.Rows(` setting ------------ 130000 en_US.utf8 en_US.utf8 cockroach `)) drv, err := Open(db) require.NoError(t, err) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= $1"))). WithArgs("public"). WillReturnRows(sqltest.Rows(` schema_name -------------------- public `)) mk.tableExists("public", "users", true) mk.ExpectQuery(queryCrdbColumns). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | column_name | data_type | formatted | is_nullable | column_default | character_maximum_length | numeric_precision | datetime_precision | numeric_scale | interval_type | character_set_name | collation_name | is_identity | identity_start | identity_increment | identity_last | identity_generation | generation_expression | comment | typtype | typelem | elemtyp | oid ------------+-------------+-----------+-----------+-------------+-------------------------------------------+--------------------------+-------------------+--------------------+---------------+---------------+--------------------+----------------|-------------+----------------+--------------------+------------------+-----------------------+-----------------------+---------+---------+---------+---------+----- users | a | bigint | bigint | NO | | | 64 | | 0 | | | | NO | | | | | | | b | | | 20 users | b | bigint | bigint | NO | | | 64 | | 0 | | | | NO | | | | | | | b | | | 20 users | c | bigint | bigint | NO | | | 64 | | 0 | | | | NO | | | | | | | b | | | 20 users | d | bigint | bigint | NO | | | 64 | | 0 | | | | NO | | | | | | | b | | | 20 `)) mk.ExpectQuery(queryCrdbIndexes). WithArgs("public", "users"). WillReturnRows(sqltest.Rows(` table_name | index_name | column_name | primary | unique | constraint_type | create_stmt | predicate | expression | comment ------------+------------+-------------+---------+--------+-----------------+---------------------------------------------------------------------------------+-----------+------------+--------- users | idx1 | a | false | false | | CREATE INDEX idx1 ON defaultdb.public.serial USING btree (a ASC) | | a | users | idx2 | b | false | true | u | CREATE UNIQUE INDEX idx2 ON defaultdb.public.serial USING btree (b ASC) | | b | users | idx3 | c | false | false | | CREATE INDEX idx3 ON defaultdb.public.serial USING btree (c DESC) | | c | boring users | idx4 | d | false | false | | CREATE INDEX idx5 ON defaultdb.public.serial USING btree (d ASC) WHERE (d < 10) | d < 10 | d | users | idx5 | a | false | false | | CREATE INDEX idx5 ON defaultdb.public.serial USING btree (a ASC, b ASC, c ASC) | | a | users | idx5 | b | false | false | | CREATE INDEX idx5 ON defaultdb.public.serial USING btree (a ASC, b ASC, c ASC) | | b | users | idx5 | c | false | false | | CREATE INDEX idx5 ON defaultdb.public.serial USING btree (a ASC, b ASC, c ASC) | | c | `)) mk.noFKs() mk.noChecks() s, err := drv.InspectSchema(context.Background(), "public", nil) require.NoError(t, err) tbl := s.Tables[0] require.Equal(t, "users", tbl.Name) columns := []*schema.Column{ {Name: "a", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "b", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "c", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, {Name: "d", Type: &schema.ColumnType{Raw: "bigint", Type: &schema.IntegerType{T: "bigint"}}}, } indexes := []*schema.Index{ {Name: "idx1", Table: tbl, Attrs: []schema.Attr{&IndexType{T: "btree"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[0]}}}, {Name: "idx2", Unique: true, Table: tbl, Attrs: []schema.Attr{&IndexType{T: "btree"}, &ConType{T: "u"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[1]}}}, {Name: "idx3", Table: tbl, Attrs: []schema.Attr{&IndexType{T: "btree"}, &schema.Comment{Text: "boring"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[2], Desc: true}}}, {Name: "idx4", Table: tbl, Attrs: []schema.Attr{&IndexType{T: "btree"}, &IndexPredicate{P: `d < 10`}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[3]}}}, {Name: "idx5", Table: tbl, Attrs: []schema.Attr{&IndexType{T: "btree"}}, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[0]}, {SeqNo: 2, C: columns[1]}, {SeqNo: 3, C: columns[2]}}}, } columns[0].Indexes = []*schema.Index{indexes[0], indexes[4]} columns[1].Indexes = []*schema.Index{indexes[1], indexes[4]} columns[2].Indexes = []*schema.Index{indexes[2], indexes[4]} columns[3].Indexes = []*schema.Index{indexes[3]} require.EqualValues(t, columns, tbl.Columns) require.EqualValues(t, indexes, tbl.Indexes) } func TestDriver_InspectSchema(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("130000") drv, err := Open(db) require.NoError(t, err) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= CURRENT_SCHEMA()"))). WillReturnRows(sqltest.Rows(` schema_name -------------------- test `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(tablesQuery, "$1"))). WithArgs("test"). WillReturnRows(sqlmock.NewRows([]string{"table_schema", "table_name", "comment", "partition_attrs", "partition_strategy", "partition_exprs"})) s, err := drv.InspectSchema(context.Background(), "", &schema.InspectOptions{}) require.NoError(t, err) require.EqualValues(t, func() *schema.Schema { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Collation{ V: "en_US.utf8", }, &CType{ V: "en_US.utf8", }, }, } r.Schemas[0].Realm = r return r.Schemas[0] }(), s) } func TestDriver_Realm(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("130000") drv, err := Open(db) require.NoError(t, err) mk.ExpectQuery(sqltest.Escape(schemasQuery)). WillReturnRows(sqltest.Rows(` schema_name -------------------- test public `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(tablesQuery, "$1, $2"))). WithArgs("test", "public"). WillReturnRows(sqlmock.NewRows([]string{"table_schema", "table_name", "comment", "partition_attrs", "partition_strategy", "partition_exprs"})) realm, err := drv.InspectRealm(context.Background(), &schema.InspectRealmOption{}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", }, { Name: "public", }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Collation{ V: "en_US.utf8", }, &CType{ V: "en_US.utf8", }, }, } r.Schemas[0].Realm = r r.Schemas[1].Realm = r return r }(), realm) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "IN ($1, $2)"))). WithArgs("test", "public"). WillReturnRows(sqltest.Rows(` schema_name -------------------- test public `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(tablesQuery, "$1, $2"))). WithArgs("test", "public"). WillReturnRows(sqlmock.NewRows([]string{"table_schema", "table_name", "comment", "partition_attrs", "partition_strategy", "partition_exprs"})) realm, err = drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Schemas: []string{"test", "public"}}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", }, { Name: "public", }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Collation{ V: "en_US.utf8", }, &CType{ V: "en_US.utf8", }, }, } r.Schemas[0].Realm = r r.Schemas[1].Realm = r return r }(), realm) mk.ExpectQuery(sqltest.Escape(fmt.Sprintf(schemasQueryArgs, "= $1"))). WithArgs("test"). WillReturnRows(sqltest.Rows(` schema_name -------------------- test `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(tablesQuery, "$1"))). WithArgs("test"). WillReturnRows(sqlmock.NewRows([]string{"table_schema", "table_name", "comment", "partition_attrs", "partition_strategy", "partition_exprs"})) realm, err = drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Schemas: []string{"test"}}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Collation{ V: "en_US.utf8", }, &CType{ V: "en_US.utf8", }, }, } r.Schemas[0].Realm = r return r }(), realm) } func TestInspectMode_InspectRealm(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.version("130000") mk.ExpectQuery(sqltest.Escape(schemasQuery)). WillReturnRows(sqltest.Rows(` schema_name -------------------- test public `)) drv, err := Open(db) realm, err := drv.InspectRealm(context.Background(), &schema.InspectRealmOption{Mode: schema.InspectSchemas}) require.NoError(t, err) require.EqualValues(t, func() *schema.Realm { r := &schema.Realm{ Schemas: []*schema.Schema{ { Name: "test", }, { Name: "public", }, }, // Server default configuration. Attrs: []schema.Attr{ &schema.Collation{ V: "en_US.utf8", }, &CType{ V: "en_US.utf8", }, }, } r.Schemas[0].Realm = r r.Schemas[1].Realm = r return r }(), realm) } type mock struct { sqlmock.Sqlmock } func (m mock) version(version string) { m.ExpectQuery(sqltest.Escape(paramsQuery)). WillReturnRows(sqltest.Rows(` setting ------------ ` + version + ` en_US.utf8 en_US.utf8 `)) } func (m mock) tableExists(schema, table string, exists bool) { rows := sqlmock.NewRows([]string{"table_schema", "table_name", "table_comment", "partition_attrs", "partition_strategy", "partition_exprs"}) if exists { rows.AddRow(schema, table, nil, nil, nil, nil) } m.ExpectQuery(queryTables). WithArgs(schema). WillReturnRows(rows) } func (m mock) noIndexes() { m.ExpectQuery(queryIndexes). WillReturnRows(sqlmock.NewRows([]string{"table_name", "index_name", "column_name", "primary", "unique", "constraint_type", "predicate", "expression", "options"})) } func (m mock) noFKs() { m.ExpectQuery(queryFKs). WillReturnRows(sqlmock.NewRows([]string{"constraint_name", "table_name", "column_name", "referenced_table_name", "referenced_column_name", "referenced_table_schema", "update_rule", "delete_rule"})) } func (m mock) noChecks() { m.ExpectQuery(queryChecks). WillReturnRows(sqlmock.NewRows([]string{"table_name", "constraint_name", "expression", "column_name", "column_indexes"})) } atlas-0.7.2/sql/postgres/migrate.go000066400000000000000000001066001431455511600172600ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "errors" "fmt" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) // A planApply provides migration capabilities for schema elements. type planApply struct{ conn } // PlanChanges returns a migration plan for the given schema changes. func (p *planApply) PlanChanges(ctx context.Context, name string, changes []schema.Change, opts ...migrate.PlanOption) (*migrate.Plan, error) { s := &state{ conn: p.conn, Plan: migrate.Plan{ Name: name, Reversible: true, Transactional: true, }, created: make(map[string]*schema.EnumType), altered: make(map[string]*schema.EnumType), dropped: make(map[string]*schema.EnumType), } for _, o := range opts { o(&s.PlanOptions) } if err := s.plan(ctx, changes); err != nil { return nil, err } for _, c := range s.Changes { if c.Reverse == "" { s.Reversible = false } } return &s.Plan, nil } // ApplyChanges applies the changes on the database. An error is returned // if the driver is unable to produce a plan to do so, or one of the statements // is failed or unsupported. func (p *planApply) ApplyChanges(ctx context.Context, changes []schema.Change, opts ...migrate.PlanOption) error { return sqlx.ApplyChanges(ctx, changes, p, opts...) } // state represents the state of a planning. It is not part of // planApply so that multiple planning/applying can be called // in parallel. type state struct { conn migrate.Plan migrate.PlanOptions // Track the enums that were created, altered and // dropped, in this phase to avoid duplicate updates. created, altered, dropped map[string]*schema.EnumType } // Exec executes the changes on the database. An error is returned // if one of the operations fail, or a change is not supported. func (s *state) plan(ctx context.Context, changes []schema.Change) error { if s.SchemaQualifier != nil { if err := sqlx.CheckChangesScope(changes); err != nil { return err } } planned := s.topLevel(changes) planned, err := sqlx.DetachCycles(planned) if err != nil { return err } for _, c := range planned { switch c := c.(type) { case *schema.AddTable: err = s.addTable(ctx, c) case *schema.DropTable: s.dropTable(c) case *schema.ModifyTable: err = s.modifyTable(ctx, c) case *schema.RenameTable: s.renameTable(c) default: err = fmt.Errorf("unsupported change %T", c) } if err != nil { return err } } return nil } // topLevel executes first the changes for creating or dropping schemas (top-level schema elements). func (s *state) topLevel(changes []schema.Change) []schema.Change { planned := make([]schema.Change, 0, len(changes)) for _, c := range changes { switch c := c.(type) { case *schema.AddSchema: b := s.Build("CREATE SCHEMA") if sqlx.Has(c.Extra, &schema.IfNotExists{}) { b.P("IF NOT EXISTS") } b.Ident(c.S.Name) s.append(&migrate.Change{ Cmd: b.String(), Source: c, Reverse: s.Build("DROP SCHEMA").Ident(c.S.Name).P("CASCADE").String(), Comment: fmt.Sprintf("Add new schema named %q", c.S.Name), }) case *schema.DropSchema: b := s.Build("DROP SCHEMA") if sqlx.Has(c.Extra, &schema.IfExists{}) { b.P("IF EXISTS") } b.Ident(c.S.Name).P("CASCADE") s.append(&migrate.Change{ Cmd: b.String(), Source: c, Comment: fmt.Sprintf("Drop schema named %q", c.S.Name), }) default: planned = append(planned, c) } } return planned } // addTable builds and executes the query for creating a table in a schema. func (s *state) addTable(ctx context.Context, add *schema.AddTable) error { // Create enum types before using them in the `CREATE TABLE` statement. if err := s.mayAddEnums(ctx, add.T, add.T.Columns...); err != nil { return err } var ( errs []string b = s.Build("CREATE TABLE") ) if sqlx.Has(add.Extra, &schema.IfNotExists{}) { b.P("IF NOT EXISTS") } b.Table(add.T) b.Wrap(func(b *sqlx.Builder) { b.MapComma(add.T.Columns, func(i int, b *sqlx.Builder) { if err := s.column(b, add.T, add.T.Columns[i]); err != nil { errs = append(errs, err.Error()) } }) if pk := add.T.PrimaryKey; pk != nil { b.Comma().P("PRIMARY KEY") s.indexParts(b, pk.Parts) } if len(add.T.ForeignKeys) > 0 { b.Comma() s.fks(b, add.T.ForeignKeys...) } for _, attr := range add.T.Attrs { if c, ok := attr.(*schema.Check); ok { b.Comma() check(b, c) } } }) if p := (Partition{}); sqlx.Has(add.T.Attrs, &p) { s, err := formatPartition(p) if err != nil { errs = append(errs, err.Error()) } b.P(s) } if len(errs) > 0 { return fmt.Errorf("create table %q: %s", add.T.Name, strings.Join(errs, ", ")) } s.append(&migrate.Change{ Cmd: b.String(), Source: add, Comment: fmt.Sprintf("create %q table", add.T.Name), Reverse: s.Build("DROP TABLE").Table(add.T).String(), }) s.addIndexes(add.T, add.T.Indexes...) s.addComments(add.T) return nil } // dropTable builds and executes the query for dropping a table from a schema. func (s *state) dropTable(drop *schema.DropTable) { b := s.Build("DROP TABLE") if sqlx.Has(drop.Extra, &schema.IfExists{}) { b.P("IF EXISTS") } b.Table(drop.T) s.append(&migrate.Change{ Cmd: b.String(), Source: drop, Comment: fmt.Sprintf("drop %q table", drop.T.Name), }) } // modifyTable builds the statements that bring the table into its modified state. func (s *state) modifyTable(ctx context.Context, modify *schema.ModifyTable) error { var ( alter []schema.Change addI, dropI []*schema.Index changes []*migrate.Change ) for _, change := range skipAutoChanges(modify.Changes) { switch change := change.(type) { case *schema.AddAttr, *schema.ModifyAttr: from, to, err := commentChange(change) if err != nil { return err } changes = append(changes, s.tableComment(modify.T, to, from)) case *schema.DropAttr: return fmt.Errorf("unsupported change type: %T", change) case *schema.AddIndex: if c := (schema.Comment{}); sqlx.Has(change.I.Attrs, &c) { changes = append(changes, s.indexComment(modify.T, change.I, c.Text, "")) } addI = append(addI, change.I) case *schema.DropIndex: // Unlike DROP INDEX statements that are executed separately, // DROP CONSTRAINT are added to the ALTER TABLE statement below. if isUniqueConstraint(change.I) { alter = append(alter, change) } else { dropI = append(dropI, change.I) } case *schema.ModifyIndex: k := change.Change if change.Change.Is(schema.ChangeComment) { from, to, err := commentChange(sqlx.CommentDiff(change.From.Attrs, change.To.Attrs)) if err != nil { return err } changes = append(changes, s.indexComment(modify.T, change.To, to, from)) // If only the comment of the index was changed. if k &= ^schema.ChangeComment; k.Is(schema.NoChange) { continue } } // Index modification requires rebuilding the index. addI = append(addI, change.To) dropI = append(dropI, change.From) case *schema.RenameIndex: changes = append(changes, &migrate.Change{ Source: change, Comment: fmt.Sprintf("rename an index from %q to %q", change.From.Name, change.To.Name), Cmd: s.Build("ALTER INDEX").Ident(change.From.Name).P("RENAME TO").Ident(change.To.Name).String(), Reverse: s.Build("ALTER INDEX").Ident(change.To.Name).P("RENAME TO").Ident(change.From.Name).String(), }) case *schema.ModifyForeignKey: // Foreign-key modification is translated into 2 steps. // Dropping the current foreign key and creating a new one. alter = append(alter, &schema.DropForeignKey{ F: change.From, }, &schema.AddForeignKey{ F: change.To, }) case *schema.AddColumn: if err := s.mayAddEnums(ctx, modify.T, change.C); err != nil { return err } if c := (schema.Comment{}); sqlx.Has(change.C.Attrs, &c) { changes = append(changes, s.columnComment(modify.T, change.C, c.Text, "")) } alter = append(alter, change) case *schema.ModifyColumn: k := change.Change if change.Change.Is(schema.ChangeComment) { from, to, err := commentChange(sqlx.CommentDiff(change.From.Attrs, change.To.Attrs)) if err != nil { return err } changes = append(changes, s.columnComment(modify.T, change.To, to, from)) // If only the comment of the column was changed. if k &= ^schema.ChangeComment; k.Is(schema.NoChange) { continue } } from, ok1 := hasEnumType(change.From) to, ok2 := hasEnumType(change.To) switch { // Enum was changed (underlying values). case ok1 && ok2 && s.enumIdent(modify.T.Schema, from) == s.enumIdent(modify.T.Schema, to): if err := s.alterEnum(modify.T, from, to); err != nil { return err } // If only the enum values were changed, // there is no need to ALTER the table. if k == schema.ChangeType { continue } // Enum was added or changed. case !ok1 && ok2 || ok1 && ok2 && s.enumIdent(modify.T.Schema, from) != s.enumIdent(modify.T.Schema, to): if err := s.mayAddEnums(ctx, modify.T, change.To); err != nil { return err } } alter = append(alter, &schema.ModifyColumn{To: change.To, From: change.From, Change: k}) case *schema.RenameColumn: // "RENAME COLUMN" cannot be combined with other alterations. b := s.Build("ALTER TABLE").Table(modify.T).P("RENAME COLUMN") r := b.Clone() changes = append(changes, &migrate.Change{ Source: change, Comment: fmt.Sprintf("rename a column from %q to %q", change.From.Name, change.To.Name), Cmd: b.Ident(change.From.Name).P("TO").Ident(change.To.Name).String(), Reverse: r.Ident(change.To.Name).P("TO").Ident(change.From.Name).String(), }) default: alter = append(alter, change) } } s.dropIndexes(modify.T, dropI...) if len(alter) > 0 { if err := s.alterTable(modify.T, alter); err != nil { return err } } s.addIndexes(modify.T, addI...) s.append(changes...) return nil } // alterTable modifies the given table by executing on it a list of changes in one SQL statement. func (s *state) alterTable(t *schema.Table, changes []schema.Change) error { var ( reverse []schema.Change reversible = true ) build := func(alter *alterChange, changes []schema.Change) (string, error) { b := s.Build("ALTER TABLE").Table(t) err := b.MapCommaErr(changes, func(i int, b *sqlx.Builder) error { switch change := changes[i].(type) { case *schema.AddColumn: b.P("ADD COLUMN") if err := s.column(b, t, change.C); err != nil { return err } reverse = append(reverse, &schema.DropColumn{C: change.C}) case *schema.ModifyColumn: if err := s.alterColumn(b, alter, t, change); err != nil { return err } if change.Change.Is(schema.ChangeGenerated) { reversible = false } reverse = append(reverse, &schema.ModifyColumn{ From: change.To, To: change.From, Change: change.Change & ^schema.ChangeGenerated, }) toE, toHas := hasEnumType(change.To) fromE, fromHas := hasEnumType(change.From) // In case the enum was dropped or replaced with a different one. if fromHas && !toHas || fromHas && toHas && s.enumIdent(t.Schema, fromE) != s.enumIdent(t.Schema, toE) { if err := s.mayDropEnum(alter, t.Schema, fromE); err != nil { return err } } case *schema.DropColumn: b.P("DROP COLUMN").Ident(change.C.Name) reverse = append(reverse, &schema.AddColumn{C: change.C}) if e, ok := hasEnumType(change.C); ok { if err := s.mayDropEnum(alter, t.Schema, e); err != nil { return err } } case *schema.AddIndex: b.P("ADD CONSTRAINT").Ident(change.I.Name).P("UNIQUE") s.indexParts(b, change.I.Parts) // Skip reversing this operation as it is the inverse of // the operation below and should not be used besides this. case *schema.DropIndex: b.P("DROP CONSTRAINT").Ident(change.I.Name) reverse = append(reverse, &schema.AddIndex{I: change.I}) case *schema.AddForeignKey: b.P("ADD") s.fks(b, change.F) reverse = append(reverse, &schema.DropForeignKey{F: change.F}) case *schema.DropForeignKey: b.P("DROP CONSTRAINT").Ident(change.F.Symbol) reverse = append(reverse, &schema.AddForeignKey{F: change.F}) case *schema.AddCheck: check(b.P("ADD"), change.C) // Reverse operation is supported if // the constraint name is not generated. if reversible = reversible && change.C.Name != ""; reversible { reverse = append(reverse, &schema.DropCheck{C: change.C}) } case *schema.DropCheck: b.P("DROP CONSTRAINT").Ident(change.C.Name) reverse = append(reverse, &schema.AddCheck{C: change.C}) case *schema.ModifyCheck: switch { case change.From.Name == "": return errors.New("cannot modify unnamed check constraint") case change.From.Name != change.To.Name: return fmt.Errorf("mismatch check constraint names: %q != %q", change.From.Name, change.To.Name) case change.From.Expr != change.To.Expr, sqlx.Has(change.From.Attrs, &NoInherit{}) && !sqlx.Has(change.To.Attrs, &NoInherit{}), !sqlx.Has(change.From.Attrs, &NoInherit{}) && sqlx.Has(change.To.Attrs, &NoInherit{}): b.P("DROP CONSTRAINT").Ident(change.From.Name).Comma().P("ADD") check(b, change.To) default: return errors.New("unknown check constraint change") } reverse = append(reverse, &schema.ModifyCheck{ From: change.To, To: change.From, }) } return nil }) if err != nil { return "", err } return b.String(), nil } cmd := &alterChange{} stmt, err := build(cmd, changes) if err != nil { return fmt.Errorf("alter table %q: %v", t.Name, err) } cmd.main = &migrate.Change{ Cmd: stmt, Source: &schema.ModifyTable{ T: t, Changes: changes, }, Comment: fmt.Sprintf("modify %q table", t.Name), } if reversible { // Changes should be reverted in // a reversed order they were created. sqlx.ReverseChanges(reverse) if cmd.main.Reverse, err = build(&alterChange{}, reverse); err != nil { return fmt.Errorf("reverse alter table %q: %v", t.Name, err) } } cmd.append(s) return nil } // alterChange describes an alter table migrate.Change where its main command // can be supported by additional statements before and after it is executed. type alterChange struct { main *migrate.Change before, after []*migrate.Change } func (a *alterChange) append(s *state) { s.append(a.before...) s.append(a.main) s.append(a.after...) } func (s *state) alterColumn(b *sqlx.Builder, alter *alterChange, t *schema.Table, c *schema.ModifyColumn) error { for k := c.Change; !k.Is(schema.NoChange); { b.P("ALTER COLUMN").Ident(c.To.Name) switch { case k.Is(schema.ChangeType): if err := s.alterType(b, alter, t, c); err != nil { return err } k &= ^schema.ChangeType case k.Is(schema.ChangeNull) && c.To.Type.Null: if t, ok := c.To.Type.Type.(*SerialType); ok { return fmt.Errorf("NOT NULL constraint is required for %s column %q", t.T, c.To.Name) } b.P("DROP NOT NULL") k &= ^schema.ChangeNull case k.Is(schema.ChangeNull) && !c.To.Type.Null: b.P("SET NOT NULL") k &= ^schema.ChangeNull case k.Is(schema.ChangeDefault) && c.To.Default == nil: b.P("DROP DEFAULT") k &= ^schema.ChangeDefault case k.Is(schema.ChangeDefault) && c.To.Default != nil: s.columnDefault(b.P("SET"), c.To) k &= ^schema.ChangeDefault case k.Is(schema.ChangeAttr): toI, ok := identity(c.To.Attrs) if !ok { return fmt.Errorf("unexpected attribute change (expect IDENTITY): %v", c.To.Attrs) } // The syntax for altering identity columns is identical to sequence_options. // https://www.postgresql.org/docs/current/sql-altersequence.html b.P("SET GENERATED", toI.Generation, "SET START WITH", strconv.FormatInt(toI.Sequence.Start, 10), "SET INCREMENT BY", strconv.FormatInt(toI.Sequence.Increment, 10)) // Skip SEQUENCE RESTART in case the "start value" is less than the "current value" in one // of the states (inspected and desired), because this function is used for both UP and DOWN. if fromI, ok := identity(c.From.Attrs); (!ok || fromI.Sequence.Last < toI.Sequence.Start) && toI.Sequence.Last < toI.Sequence.Start { b.P("RESTART") } k &= ^schema.ChangeAttr case k.Is(schema.ChangeGenerated): if sqlx.Has(c.To.Attrs, &schema.GeneratedExpr{}) { return fmt.Errorf("unexpected generation expression change (expect DROP EXPRESSION): %v", c.To.Attrs) } b.P("DROP EXPRESSION") k &= ^schema.ChangeGenerated default: // e.g. schema.ChangeComment. return fmt.Errorf("unexpected column change: %d", k) } if !k.Is(schema.NoChange) { b.Comma() } } return nil } // alterType appends the clause(s) to alter the column type and assuming the // "ALTER COLUMN " was called before by the alterColumn function. func (s *state) alterType(b *sqlx.Builder, alter *alterChange, t *schema.Table, c *schema.ModifyColumn) error { // Commands for creating and dropping serial sequences. createDropSeq := func(st *SerialType) (string, string, string) { seq := fmt.Sprintf(`%s%q`, s.schemaPrefix(t.Schema), st.sequence(t, c.To)) drop := s.Build("DROP SEQUENCE IF EXISTS").P(seq).String() create := s.Build("CREATE SEQUENCE IF NOT EXISTS").P(seq, "OWNED BY"). P(fmt.Sprintf(`%s%q.%q`, s.schemaPrefix(t.Schema), t.Name, c.To.Name)). String() return create, drop, seq } toS, toHas := c.To.Type.Type.(*SerialType) fromS, fromHas := c.From.Type.Type.(*SerialType) switch { // Sequence was dropped. case fromHas && !toHas: b.P("DROP DEFAULT") create, drop, _ := createDropSeq(fromS) // Sequence should be deleted after it was dropped // from the DEFAULT value. alter.after = append(alter.after, &migrate.Change{ Source: c, Comment: fmt.Sprintf("drop sequence used by serial column %q", c.From.Name), Cmd: drop, Reverse: create, }) toT, err := FormatType(c.To.Type.Type) if err != nil { return err } fromT, err := FormatType(fromS.IntegerType()) if err != nil { return err } // Underlying type was changed. e.g. serial to bigint. if toT != fromT { b.Comma().P("ALTER COLUMN").Ident(c.To.Name).P("TYPE", toT) } // Sequence was added. case !fromHas && toHas: create, drop, seq := createDropSeq(toS) // Sequence should be created before it is used by the // column DEFAULT value. alter.before = append(alter.before, &migrate.Change{ Source: c, Comment: fmt.Sprintf("create sequence for serial column %q", c.To.Name), Cmd: create, Reverse: drop, }) b.P("SET DEFAULT", fmt.Sprintf("nextval('%s')", seq)) toT, err := FormatType(toS.IntegerType()) if err != nil { return err } fromT, err := FormatType(c.From.Type.Type) if err != nil { return err } // Underlying type was changed. e.g. integer to bigserial (bigint). if toT != fromT { b.Comma().P("ALTER COLUMN").Ident(c.To.Name).P("TYPE", toT) } // Serial type was changed. e.g. serial to bigserial. case fromHas && toHas: f, err := FormatType(toS.IntegerType()) if err != nil { return err } b.P("TYPE", f) default: var ( f string err error ) if e, ok := c.To.Type.Type.(*schema.EnumType); ok { f = s.enumIdent(t.Schema, e) } else if f, err = FormatType(c.To.Type.Type); err != nil { return err } b.P("TYPE", f) } if collate := (schema.Collation{}); sqlx.Has(c.To.Attrs, &collate) { b.P("COLLATE", collate.V) } return nil } func (s *state) renameTable(c *schema.RenameTable) { s.append(&migrate.Change{ Source: c, Comment: fmt.Sprintf("rename a table from %q to %q", c.From.Name, c.To.Name), Cmd: s.Build("ALTER TABLE").Table(c.From).P("RENAME TO").Table(c.To).String(), Reverse: s.Build("ALTER TABLE").Table(c.To).P("RENAME TO").Table(c.From).String(), }) } func (s *state) addComments(t *schema.Table) { var c schema.Comment if sqlx.Has(t.Attrs, &c) && c.Text != "" { s.append(s.tableComment(t, c.Text, "")) } for i := range t.Columns { if sqlx.Has(t.Columns[i].Attrs, &c) && c.Text != "" { s.append(s.columnComment(t, t.Columns[i], c.Text, "")) } } for i := range t.Indexes { if sqlx.Has(t.Indexes[i].Attrs, &c) && c.Text != "" { s.append(s.indexComment(t, t.Indexes[i], c.Text, "")) } } } func (s *state) tableComment(t *schema.Table, to, from string) *migrate.Change { b := s.Build("COMMENT ON TABLE").Table(t).P("IS") return &migrate.Change{ Cmd: b.Clone().P(quote(to)).String(), Comment: fmt.Sprintf("set comment to table: %q", t.Name), Reverse: b.Clone().P(quote(from)).String(), } } func (s *state) columnComment(t *schema.Table, c *schema.Column, to, from string) *migrate.Change { b := s.Build("COMMENT ON COLUMN").Table(t) b.WriteByte('.') b.Ident(c.Name).P("IS") return &migrate.Change{ Cmd: b.Clone().P(quote(to)).String(), Comment: fmt.Sprintf("set comment to column: %q on table: %q", c.Name, t.Name), Reverse: b.Clone().P(quote(from)).String(), } } func (s *state) indexComment(t *schema.Table, idx *schema.Index, to, from string) *migrate.Change { b := s.Build("COMMENT ON INDEX").Ident(idx.Name).P("IS") return &migrate.Change{ Cmd: b.Clone().P(quote(to)).String(), Comment: fmt.Sprintf("set comment to index: %q on table: %q", idx.Name, t.Name), Reverse: b.Clone().P(quote(from)).String(), } } func (s *state) dropIndexes(t *schema.Table, indexes ...*schema.Index) { rs := &state{conn: s.conn} rs.addIndexes(t, indexes...) for i, idx := range indexes { s.append(&migrate.Change{ Cmd: rs.Changes[i].Reverse, Comment: fmt.Sprintf("drop index %q from table: %q", idx.Name, t.Name), Reverse: rs.Changes[i].Cmd, }) } } func (s *state) mayAddEnums(ctx context.Context, t *schema.Table, columns ...*schema.Column) error { for _, c := range columns { e, ok := hasEnumType(c) if !ok { continue } if e.T == "" { return fmt.Errorf("missing enum name for column %q", c.Name) } if exists, err := s.enumExists(ctx, t.Schema, e); err != nil { return err } else if exists { // Enum exists and was not created // on this migration phase. continue } name := s.enumIdent(t.Schema, e) if prev, ok := s.created[name]; ok { if !sqlx.ValuesEqual(prev.Values, e.Values) { return fmt.Errorf("enum type %s has inconsistent desired state: %q != %q", name, prev.Values, e.Values) } continue } s.created[name] = e create, drop := s.createDropEnum(t.Schema, e) s.append(&migrate.Change{ Cmd: create, Reverse: drop, Comment: fmt.Sprintf("create enum type %q", e.T), }) } return nil } func (s *state) alterEnum(t *schema.Table, from, to *schema.EnumType) error { if len(from.Values) > len(to.Values) { return fmt.Errorf("dropping enum (%q) value is not supported", from.T) } for i := range from.Values { if from.Values[i] != to.Values[i] { return fmt.Errorf("replacing or reordering enum (%q) value is not supported: %q != %q", to.T, to.Values, from.Values) } } name := s.enumIdent(t.Schema, from) if prev, ok := s.altered[name]; ok { if !sqlx.ValuesEqual(prev.Values, to.Values) { return fmt.Errorf("enum type %s has inconsistent desired state: %q != %q", name, prev.Values, to.Values) } return nil } s.altered[name] = to for _, v := range to.Values[len(from.Values):] { s.append(&migrate.Change{ Cmd: s.Build("ALTER TYPE").P(name, "ADD VALUE", quote(v)).String(), Comment: fmt.Sprintf("add value to enum type: %q", from.T), }) } return nil } func (s *state) enumExists(ctx context.Context, ns *schema.Schema, e *schema.EnumType) (bool, error) { query, args := `SELECT * FROM pg_type t JOIN pg_namespace n on t.typnamespace = n.oid WHERE t.typname = $1 AND t.typtype = 'e'`, []any{e.T} if es := s.enumSchema(ns, e); es != "" { query += " AND n.nspname = $2" args = append(args, es) } rows, err := s.QueryContext(ctx, query, args...) if err != nil { return false, fmt.Errorf("check enum existence: %w", err) } defer rows.Close() return rows.Next(), rows.Err() } // mayDropEnum drops dangling enum types form the schema. func (s *state) mayDropEnum(alter *alterChange, ns *schema.Schema, e *schema.EnumType) error { name := s.enumIdent(ns, e) if _, ok := s.dropped[name]; ok { return nil } schemas := []*schema.Schema{ns} // In case there is a realm attached, traverse the entire tree. if ns.Realm != nil && len(ns.Realm.Schemas) > 0 { schemas = ns.Realm.Schemas } for i := range schemas { for _, t := range schemas[i].Tables { for _, c := range t.Columns { e1, ok := hasEnumType(c) // Although we search in siblings schemas, use the // table's one for building the enum identifier. if ok && s.enumIdent(ns, e1) == name { return nil } } } } s.dropped[name] = e create, drop := s.createDropEnum(ns, e) alter.after = append(alter.after, &migrate.Change{ Cmd: drop, Reverse: create, Comment: fmt.Sprintf("drop enum type %q", e.T), }) return nil } func (s *state) addIndexes(t *schema.Table, indexes ...*schema.Index) { for _, idx := range indexes { b := s.Build("CREATE") if idx.Unique { b.P("UNIQUE") } b.P("INDEX") if c := (Concurrently{}); sqlx.Has(idx.Attrs, &c) { b.P("CONCURRENTLY") } if idx.Name != "" { b.Ident(idx.Name) } b.P("ON").Table(t) s.index(b, idx) s.append(&migrate.Change{ Cmd: b.String(), Comment: fmt.Sprintf("create index %q to table: %q", idx.Name, t.Name), Reverse: func() string { b := s.Build("DROP INDEX") if c := (Concurrently{}); sqlx.Has(idx.Attrs, &c) { b.P("CONCURRENTLY") } // Unlike MySQL, the DROP command is not attached to ALTER TABLE. // Therefore, we print indexes with their qualified name, because // the connection that executes the statements may not be attached // to this schema. if t.Schema != nil { b.WriteString(s.schemaPrefix(t.Schema)) } b.Ident(idx.Name) return b.String() }(), }) } } func (s *state) column(b *sqlx.Builder, t *schema.Table, c *schema.Column) error { f, err := s.formatType(t, c) if err != nil { return err } b.Ident(c.Name).P(f) if !c.Type.Null { b.P("NOT") } else if t, ok := c.Type.Type.(*SerialType); ok { return fmt.Errorf("NOT NULL constraint is required for %s column %q", t.T, c.Name) } b.P("NULL") s.columnDefault(b, c) for _, attr := range c.Attrs { switch a := attr.(type) { case *schema.Comment: case *schema.Collation: b.P("COLLATE").Ident(a.V) case *Identity, *schema.GeneratedExpr: // Handled below. default: return fmt.Errorf("unexpected column attribute: %T", attr) } } switch hasI, hasX := sqlx.Has(c.Attrs, &Identity{}), sqlx.Has(c.Attrs, &schema.GeneratedExpr{}); { case hasI && hasX: return fmt.Errorf("both identity and generation expression specified for column %q", c.Name) case hasI: id, _ := identity(c.Attrs) b.P("GENERATED", id.Generation, "AS IDENTITY") if id.Sequence.Start != defaultSeqStart || id.Sequence.Increment != defaultSeqIncrement { b.Wrap(func(b *sqlx.Builder) { if id.Sequence.Start != defaultSeqStart { b.P("START WITH", strconv.FormatInt(id.Sequence.Start, 10)) } if id.Sequence.Increment != defaultSeqIncrement { b.P("INCREMENT BY", strconv.FormatInt(id.Sequence.Increment, 10)) } }) } case hasX: x := &schema.GeneratedExpr{} sqlx.Has(c.Attrs, x) b.P("GENERATED ALWAYS AS", sqlx.MayWrap(x.Expr), "STORED") } return nil } // columnDefault writes the default value of column to the builder. func (s *state) columnDefault(b *sqlx.Builder, c *schema.Column) { switch x := c.Default.(type) { case *schema.Literal: v := x.V switch c.Type.Type.(type) { case *schema.BoolType, *schema.DecimalType, *schema.IntegerType, *schema.FloatType: default: v = quote(v) } b.P("DEFAULT", v) case *schema.RawExpr: // Ignore identity functions added by the differ. if _, ok := c.Type.Type.(*SerialType); !ok { b.P("DEFAULT", x.X) } } } func (s *state) indexParts(b *sqlx.Builder, parts []*schema.IndexPart) { b.Wrap(func(b *sqlx.Builder) { b.MapComma(parts, func(i int, b *sqlx.Builder) { switch part := parts[i]; { case part.C != nil: b.Ident(part.C.Name) case part.X != nil: b.WriteString(sqlx.MayWrap(part.X.(*schema.RawExpr).X)) } s.partAttrs(b, parts[i]) }) }) } func (s *state) partAttrs(b *sqlx.Builder, p *schema.IndexPart) { if p.Desc { b.P("DESC") } for _, attr := range p.Attrs { switch attr := attr.(type) { case *IndexColumnProperty: switch { // Defaults when DESC is specified. case p.Desc && attr.NullsFirst: case p.Desc && attr.NullsLast: b.P("NULL LAST") // Defaults when DESC is not specified. case !p.Desc && attr.NullsLast: case !p.Desc && attr.NullsFirst: b.P("NULL FIRST") } case *schema.Collation: b.P("COLLATE").Ident(attr.V) default: panic(fmt.Sprintf("unexpected index part attribute: %T", attr)) } } } func (s *state) index(b *sqlx.Builder, idx *schema.Index) { // Avoid appending the default method. if t := (IndexType{}); sqlx.Has(idx.Attrs, &t) && strings.ToUpper(t.T) != IndexTypeBTree { b.P("USING", t.T) } s.indexParts(b, idx.Parts) if c := (IndexInclude{}); sqlx.Has(idx.Attrs, &c) { b.P("INCLUDE") b.Wrap(func(b *sqlx.Builder) { b.MapComma(c.Columns, func(i int, b *sqlx.Builder) { b.Ident(c.Columns[i].Name) }) }) } if p, ok := indexStorageParams(idx.Attrs); ok { b.P("WITH") b.Wrap(func(b *sqlx.Builder) { var parts []string if p.AutoSummarize { parts = append(parts, "autosummarize = true") } if p.PagesPerRange != 0 && p.PagesPerRange != defaultPagePerRange { parts = append(parts, fmt.Sprintf("pages_per_range = %d", p.PagesPerRange)) } b.WriteString(strings.Join(parts, ", ")) }) } if p := (IndexPredicate{}); sqlx.Has(idx.Attrs, &p) { b.P("WHERE").P(p.P) } for _, attr := range idx.Attrs { switch attr.(type) { case *schema.Comment, *ConType, *IndexType, *IndexInclude, *Concurrently, *IndexPredicate, *IndexStorageParams: default: panic(fmt.Sprintf("unexpected index attribute: %T", attr)) } } } func (s *state) fks(b *sqlx.Builder, fks ...*schema.ForeignKey) { b.MapComma(fks, func(i int, b *sqlx.Builder) { fk := fks[i] if fk.Symbol != "" { b.P("CONSTRAINT").Ident(fk.Symbol) } b.P("FOREIGN KEY") b.Wrap(func(b *sqlx.Builder) { b.MapComma(fk.Columns, func(i int, b *sqlx.Builder) { b.Ident(fk.Columns[i].Name) }) }) b.P("REFERENCES").Table(fk.RefTable) b.Wrap(func(b *sqlx.Builder) { b.MapComma(fk.RefColumns, func(i int, b *sqlx.Builder) { b.Ident(fk.RefColumns[i].Name) }) }) if fk.OnUpdate != "" { b.P("ON UPDATE", string(fk.OnUpdate)) } if fk.OnDelete != "" { b.P("ON DELETE", string(fk.OnDelete)) } }) } func (s *state) append(c ...*migrate.Change) { s.Changes = append(s.Changes, c...) } // Build instantiates a new builder and writes the given phrase to it. func (s *state) Build(phrases ...string) *sqlx.Builder { b := &sqlx.Builder{QuoteChar: '"', Schema: s.SchemaQualifier} return b.P(phrases...) } // skipAutoChanges filters unnecessary changes that are automatically // happened by the database when ALTER TABLE is executed. func skipAutoChanges(changes []schema.Change) []schema.Change { var ( dropC = make(map[string]bool) planned = make([]schema.Change, 0, len(changes)) ) for _, c := range changes { if c, ok := c.(*schema.DropColumn); ok { dropC[c.C.Name] = true } } search: for _, c := range changes { switch c := c.(type) { // Indexes involving the column are automatically dropped // with it. This is true for multi-columns indexes as well. // See https://www.postgresql.org/docs/current/sql-altertable.html case *schema.DropIndex: for _, p := range c.I.Parts { if p.C != nil && dropC[p.C.Name] { continue search } } // Simple case for skipping constraint dropping, // if the child table columns were dropped. case *schema.DropForeignKey: for _, c := range c.F.Columns { if dropC[c.Name] { continue search } } } planned = append(planned, c) } return planned } // commentChange extracts the information for modifying a comment from the given change. func commentChange(c schema.Change) (from, to string, err error) { switch c := c.(type) { case *schema.AddAttr: toC, ok := c.A.(*schema.Comment) if ok { to = toC.Text return } err = fmt.Errorf("unexpected AddAttr.(%T) for comment change", c.A) case *schema.ModifyAttr: fromC, ok1 := c.From.(*schema.Comment) toC, ok2 := c.To.(*schema.Comment) if ok1 && ok2 { from, to = fromC.Text, toC.Text return } err = fmt.Errorf("unsupported ModifyAttr(%T, %T) change", c.From, c.To) default: err = fmt.Errorf("unexpected change %T", c) } return } // checks writes the CHECK constraint to the builder. func check(b *sqlx.Builder, c *schema.Check) { if c.Name != "" { b.P("CONSTRAINT").Ident(c.Name) } b.P("CHECK", sqlx.MayWrap(c.Expr)) if sqlx.Has(c.Attrs, &NoInherit{}) { b.P("NO INHERIT") } } // isUniqueConstraint reports if the index is a valid UNIQUE constraint. func isUniqueConstraint(i *schema.Index) bool { if c := (ConType{}); !sqlx.Has(i.Attrs, &c) || !c.IsUnique() || !i.Unique { return false } // UNIQUE constraint cannot use functional indexes, // and all its parts must have the default sort ordering. for _, p := range i.Parts { if p.X != nil || p.Desc { return false } } for _, a := range i.Attrs { switch a := a.(type) { // UNIQUE constraints must have BTREE type indexes. case *IndexType: if strings.ToUpper(a.T) != IndexTypeBTree { return false } // Partial indexes are not allowed. case *IndexPredicate: return false } } return true } func quote(s string) string { if sqlx.IsQuoted(s, '\'') { return s } return "'" + strings.ReplaceAll(s, "'", "''") + "'" } func (s *state) createDropEnum(sc *schema.Schema, e *schema.EnumType) (string, string) { name := s.enumIdent(sc, e) return s.Build("CREATE TYPE"). P(name, "AS ENUM"). Wrap(func(b *sqlx.Builder) { b.MapComma(e.Values, func(i int, b *sqlx.Builder) { b.WriteString(quote(e.Values[i])) }) }). String(), s.Build("DROP TYPE").P(name).String() } func (s *state) enumIdent(ns *schema.Schema, e *schema.EnumType) string { es := s.enumSchema(ns, e) if es != "" { return fmt.Sprintf("%q.%q", es, e.T) } return strconv.Quote(e.T) } func (s *state) enumSchema(ns *schema.Schema, e *schema.EnumType) (es string) { switch { // In case the plan uses a specific schema qualifier. case s.SchemaQualifier != nil: es = *s.SchemaQualifier // Enum schema has higher precedence. case e.Schema != nil: es = e.Schema.Name // Fallback to table schema if exists. case ns != nil: es = ns.Name } return } // schemaPrefix returns the schema prefix based on the planner config. func (s *state) schemaPrefix(ns *schema.Schema) string { switch { case s.SchemaQualifier != nil: // In case the qualifier is empty, ignore. if *s.SchemaQualifier != "" { return fmt.Sprintf("%q.", *s.SchemaQualifier) } case ns != nil && ns.Name != "": return fmt.Sprintf("%q.", ns.Name) } return "" } // formatType formats the type but takes into account the qualifier. func (s *state) formatType(t *schema.Table, c *schema.Column) (string, error) { switch tt := c.Type.Type.(type) { case *schema.EnumType: return s.enumIdent(t.Schema, tt), nil case *ArrayType: if e, ok := tt.Type.(*schema.EnumType); ok { return s.enumIdent(t.Schema, e) + "[]", nil } } return FormatType(c.Type.Type) } func hasEnumType(c *schema.Column) (*schema.EnumType, bool) { switch t := c.Type.Type.(type) { case *schema.EnumType: return t, true case *ArrayType: if e, ok := t.Type.(*schema.EnumType); ok { return e, true } } return nil, false } atlas-0.7.2/sql/postgres/migrate_test.go000066400000000000000000001156711431455511600203270ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "context" "strconv" "testing" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestPlanChanges(t *testing.T) { tests := []struct { changes []schema.Change options []migrate.PlanOption mock func(mock) wantPlan *migrate.Plan wantErr bool }{ { changes: []schema.Change{ &schema.AddSchema{S: schema.New("test"), Extra: []schema.Clause{&schema.IfNotExists{}}}, &schema.DropSchema{S: schema.New("test"), Extra: []schema.Clause{&schema.IfExists{}}}, &schema.DropSchema{S: schema.New("test"), Extra: []schema.Clause{}}, }, wantPlan: &migrate.Plan{ Reversible: false, Transactional: true, Changes: []*migrate.Change{ { Cmd: `CREATE SCHEMA IF NOT EXISTS "test"`, Reverse: `DROP SCHEMA "test" CASCADE`, }, { Cmd: `DROP SCHEMA IF EXISTS "test" CASCADE`, }, { Cmd: `DROP SCHEMA "test" CASCADE`, }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } pets := &schema.Table{ Name: "pets", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "user_id", Type: &schema.ColumnType{ Type: &schema.IntegerType{T: "bigint"}, }, }, }, } fk := &schema.ForeignKey{ Symbol: "pets_user_id_fkey", Table: pets, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: users, Columns: []*schema.Column{pets.Columns[1]}, RefColumns: []*schema.Column{users.Columns[0]}, } pets.ForeignKeys = []*schema.ForeignKey{fk} return &schema.ModifyTable{ T: pets, Changes: []schema.Change{ &schema.DropForeignKey{ F: fk, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "pets" DROP CONSTRAINT "pets_user_id_fkey"`, Reverse: `ALTER TABLE "pets" ADD CONSTRAINT "pets_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE`, }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}}}, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropIndex{ I: schema.NewIndex("name_index"). AddParts(schema.NewColumnPart(schema.NewColumn("name"))), }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `DROP INDEX "name_index"`, Reverse: `CREATE INDEX "name_index" ON "users" ("name")`, }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "nickname", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}}}, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropIndex{ I: schema.NewUniqueIndex("unique_nickname"). AddColumns(schema.NewColumn("nickname")). AddAttrs(&ConType{T: "u"}), }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "users" DROP CONSTRAINT "unique_nickname"`, Reverse: `ALTER TABLE "users" ADD CONSTRAINT "unique_nickname" UNIQUE ("nickname")`, }, }, }, }, { changes: []schema.Change{ &schema.AddSchema{S: &schema.Schema{Name: "test"}}, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{{Cmd: `CREATE SCHEMA "test"`, Reverse: `DROP SCHEMA "test" CASCADE`}}}, }, { changes: []schema.Change{ &schema.DropSchema{S: &schema.Schema{Name: "atlas"}}, }, wantPlan: &migrate.Plan{ Transactional: true, Changes: []*migrate.Change{{Cmd: `DROP SCHEMA "atlas" CASCADE`}}, }, }, { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&Identity{}, &schema.Comment{}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, {Name: "directions", Type: &schema.ColumnType{Type: &ArrayType{T: "direction[]", Type: &schema.EnumType{T: "direction", Values: []string{"NORTH", "SOUTH"}, Schema: schema.New("public")}}}}, {Name: "states", Type: &schema.ColumnType{Type: &ArrayType{T: "state[]", Type: &schema.EnumType{T: "state", Values: []string{"ON", "OFF"}}}}}, }, Attrs: []schema.Attr{ &schema.Comment{}, &schema.Check{Name: "id_nonzero", Expr: `("id" > 0)`}, &schema.Check{Name: "text_len", Expr: `(length("text") > 0)`, Attrs: []schema.Attr{&NoInherit{}}}, &schema.Check{Name: "a_in_b", Expr: `(a) in (b)`}, &Partition{T: "HASH", Parts: []*PartitionPart{{C: schema.NewColumn("text")}}}, }, }, }, }, mock: func(m mock) { m.ExpectQuery(sqltest.Escape("SELECT * FROM pg_type t JOIN pg_namespace n on t.typnamespace = n.oid WHERE t.typname = $1 AND t.typtype = 'e' AND n.nspname = $2")). WithArgs("direction", "public"). WillReturnRows(sqlmock.NewRows([]string{"name"})) m.ExpectQuery(sqltest.Escape("SELECT * FROM pg_type t JOIN pg_namespace n on t.typnamespace = n.oid WHERE t.typname = $1 AND t.typtype = 'e'")). WithArgs("state"). WillReturnRows(sqlmock.NewRows([]string{"name"}).AddRow("state")) }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: `CREATE TYPE "public"."direction" AS ENUM ('NORTH', 'SOUTH')`, Reverse: `DROP TYPE "public"."direction"`}, {Cmd: `CREATE TABLE "posts" ("id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY, "text" text NULL, "directions" "public"."direction"[] NOT NULL, "states" "state"[] NOT NULL, CONSTRAINT "id_nonzero" CHECK ("id" > 0), CONSTRAINT "text_len" CHECK (length("text") > 0) NO INHERIT, CONSTRAINT "a_in_b" CHECK ((a) in (b))) PARTITION BY HASH ("text")`, Reverse: `DROP TABLE "posts"`}, }, }, }, { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 1024}}}}, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{{Cmd: `CREATE TABLE "posts" ("id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1024))`, Reverse: `DROP TABLE "posts"`}}, }, }, { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}}, {Name: "nid", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&schema.GeneratedExpr{Expr: "id+1"}}}, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `CREATE TABLE "posts" ("id" integer NOT NULL, "nid" integer NOT NULL GENERATED ALWAYS AS (id+1) STORED)`, Reverse: `DROP TABLE "posts"`, }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("posts"). AddColumns( schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "id+1"}), ), Changes: []schema.Change{ &schema.ModifyColumn{ Change: schema.ChangeGenerated, From: schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "id+1"}), To: schema.NewIntColumn("c1", "int"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: false, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "posts" ALTER COLUMN "c1" DROP EXPRESSION`, }, }, }, }, { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Increment: 2}}}}, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{{Cmd: `CREATE TABLE "posts" ("id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 2))`, Reverse: `DROP TABLE "posts"`}}, }, }, { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 100, Increment: 2}}}}, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{{Cmd: `CREATE TABLE "posts" ("id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 100 INCREMENT BY 2))`, Reverse: `DROP TABLE "posts"`}}, }, }, { changes: []schema.Change{ &schema.DropTable{T: &schema.Table{Name: "posts"}}, }, wantPlan: &migrate.Plan{ Reversible: false, Transactional: true, Changes: []*migrate.Change{ {Cmd: `DROP TABLE "posts"`}, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddColumn{ C: &schema.Column{Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 255}}, Attrs: []schema.Attr{&schema.Comment{Text: "foo"}}, Default: &schema.Literal{V: "'logged_in'"}}, }, &schema.AddColumn{ C: &schema.Column{Name: "last", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar", Size: 255}}, Attrs: []schema.Attr{&schema.Comment{Text: "bar"}}, Default: &schema.RawExpr{X: "'logged_in'"}}, }, &schema.AddIndex{ I: &schema.Index{ Name: "id_key", Parts: []*schema.IndexPart{ {C: users.Columns[0], Desc: true}, }, Attrs: []schema.Attr{ &schema.Comment{Text: "comment"}, &IndexPredicate{P: "success"}, }, }, }, &schema.AddIndex{ I: &schema.Index{ Name: "id_brin", Parts: []*schema.IndexPart{ {C: users.Columns[0], Desc: true}, }, Attrs: []schema.Attr{ &IndexType{T: IndexTypeBRIN}, &IndexStorageParams{PagesPerRange: 2}, }, }, }, &schema.AddCheck{ C: &schema.Check{Name: "name_not_empty", Expr: `("name" <> '')`}, }, &schema.DropCheck{ C: &schema.Check{Name: "id_nonzero", Expr: `("id" <> 0)`}, }, &schema.ModifyCheck{ From: &schema.Check{Name: "id_iseven", Expr: `("id" % 2 = 0)`}, To: &schema.Check{Name: "id_iseven", Expr: `(("id") % 2 = 0)`}, }, &schema.AddIndex{ I: &schema.Index{ Name: "include_key", Parts: []*schema.IndexPart{ {C: users.Columns[0]}, }, Attrs: []schema.Attr{ &IndexInclude{Columns: []*schema.Column{schema.NewColumn("a"), schema.NewColumn("b")}}, }, }, }, &schema.AddIndex{ I: &schema.Index{ Name: "concurrently", Parts: []*schema.IndexPart{ {C: users.Columns[0]}, }, Attrs: []schema.Attr{ &Concurrently{}, }, }, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "users" ADD COLUMN "name" character varying(255) NOT NULL DEFAULT 'logged_in', ADD COLUMN "last" character varying(255) NOT NULL DEFAULT 'logged_in', ADD CONSTRAINT "name_not_empty" CHECK ("name" <> ''), DROP CONSTRAINT "id_nonzero", DROP CONSTRAINT "id_iseven", ADD CONSTRAINT "id_iseven" CHECK (("id") % 2 = 0)`, Reverse: `ALTER TABLE "users" DROP CONSTRAINT "id_iseven", ADD CONSTRAINT "id_iseven" CHECK ("id" % 2 = 0), ADD CONSTRAINT "id_nonzero" CHECK ("id" <> 0), DROP CONSTRAINT "name_not_empty", DROP COLUMN "last", DROP COLUMN "name"`, }, { Cmd: `CREATE INDEX "id_key" ON "users" ("id" DESC) WHERE success`, Reverse: `DROP INDEX "id_key"`, }, { Cmd: `CREATE INDEX "id_brin" ON "users" USING BRIN ("id" DESC) WITH (pages_per_range = 2)`, Reverse: `DROP INDEX "id_brin"`, }, { Cmd: `CREATE INDEX "include_key" ON "users" ("id") INCLUDE ("a", "b")`, Reverse: `DROP INDEX "include_key"`, }, { Cmd: `CREATE INDEX CONCURRENTLY "concurrently" ON "users" ("id")`, Reverse: `DROP INDEX CONCURRENTLY "concurrently"`, }, { Cmd: `COMMENT ON COLUMN "users" ."name" IS 'foo'`, Reverse: `COMMENT ON COLUMN "users" ."name" IS ''`, }, { Cmd: `COMMENT ON COLUMN "users" ."last" IS 'bar'`, Reverse: `COMMENT ON COLUMN "users" ."last" IS ''`, }, { Cmd: `COMMENT ON INDEX "id_key" IS 'comment'`, Reverse: `COMMENT ON INDEX "id_key" IS ''`, }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropColumn{ C: &schema.Column{Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar"}}}, }, &schema.ModifyColumn{ From: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{}, &schema.Comment{Text: "comment"}}}, To: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 1024}}}}, Change: schema.ChangeAttr | schema.ChangeComment, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "users" DROP COLUMN "name", ALTER COLUMN "id" SET GENERATED BY DEFAULT SET START WITH 1024 SET INCREMENT BY 1 RESTART`, Reverse: `ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT SET START WITH 1 SET INCREMENT BY 1 RESTART, ADD COLUMN "name" character varying NOT NULL`, }, { Cmd: `COMMENT ON COLUMN "users" ."id" IS ''`, Reverse: `COMMENT ON COLUMN "users" ."id" IS 'comment'`, }, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropColumn{ C: &schema.Column{Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar"}}}, }, &schema.ModifyColumn{ From: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 0, Last: 1025}}}}, To: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 1024}}}}, Change: schema.ChangeAttr, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "users" DROP COLUMN "name", ALTER COLUMN "id" SET GENERATED BY DEFAULT SET START WITH 1024 SET INCREMENT BY 1`, Reverse: `ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT SET START WITH 1 SET INCREMENT BY 1, ADD COLUMN "name" character varying NOT NULL`, }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}}, Changes: []schema.Change{ &schema.AddAttr{ A: &schema.Comment{Text: "foo"}, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: `COMMENT ON TABLE "public"."users" IS 'foo'`, Reverse: `COMMENT ON TABLE "public"."users" IS ''`}, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}}, Changes: []schema.Change{ &schema.ModifyAttr{ To: &schema.Comment{Text: "foo"}, From: &schema.Comment{Text: "bar"}, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: `COMMENT ON TABLE "public"."users" IS 'foo'`, Reverse: `COMMENT ON TABLE "public"."users" IS 'bar'`}, }, }, }, { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( schema.NewEnumColumn("state", schema.EnumName("state"), schema.EnumValues("on", "off")), schema.NewEnumColumn("status", schema.EnumName("status"), schema.EnumValues("a", "b"), schema.EnumSchema(schema.New("test"))), ) return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}}, To: users.Columns[0], Change: schema.ChangeType, }, &schema.ModifyColumn{ From: &schema.Column{Name: "status", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}}, To: users.Columns[1], Change: schema.ChangeType, }, &schema.DropColumn{ C: schema.NewEnumColumn("dc1", schema.EnumName("de"), schema.EnumValues("on")), }, &schema.DropColumn{ C: schema.NewEnumColumn("dc2", schema.EnumName("de"), schema.EnumValues("on")), }, }, } }(), }, mock: func(m mock) { m.ExpectQuery(sqltest.Escape("SELECT * FROM pg_type t JOIN pg_namespace n on t.typnamespace = n.oid WHERE t.typname = $1 AND t.typtype = 'e' AND n.nspname = $2 ")). WithArgs("state", "public"). WillReturnRows(sqlmock.NewRows([]string{"name"})) m.ExpectQuery(sqltest.Escape("SELECT * FROM pg_type t JOIN pg_namespace n on t.typnamespace = n.oid WHERE t.typname = $1 AND t.typtype = 'e' AND n.nspname = $2 ")). WithArgs("status", "test"). WillReturnRows(sqlmock.NewRows([]string{"name"})) }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: `CREATE TYPE "public"."state" AS ENUM ('on', 'off')`, Reverse: `DROP TYPE "public"."state"`}, {Cmd: `CREATE TYPE "test"."status" AS ENUM ('a', 'b')`, Reverse: `DROP TYPE "test"."status"`}, {Cmd: `ALTER TABLE "public"."users" ALTER COLUMN "state" TYPE "public"."state", ALTER COLUMN "status" TYPE "test"."status", DROP COLUMN "dc1", DROP COLUMN "dc2"`, Reverse: `ALTER TABLE "public"."users" ADD COLUMN "dc2" "public"."de" NOT NULL, ADD COLUMN "dc1" "public"."de" NOT NULL, ALTER COLUMN "status" TYPE text, ALTER COLUMN "state" TYPE text`}, {Cmd: `DROP TYPE "public"."de"`, Reverse: `CREATE TYPE "public"."de" AS ENUM ('on')`}, }, }, }, { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( schema.NewEnumColumn("state", schema.EnumName("state"), schema.EnumValues("on", "off")), ) return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: users.Columns[0], To: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off", "unknown"}}}}, Change: schema.ChangeType, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: false, Transactional: true, Changes: []*migrate.Change{ {Cmd: `ALTER TYPE "public"."state" ADD VALUE 'unknown'`}, }, }, }, // Modify column type and drop comment. { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). SetSchema(schema.New("public")). AddColumns( schema.NewEnumColumn("state", schema.EnumName("state"), schema.EnumValues("on", "off")), ) return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}, Attrs: []schema.Attr{&schema.Comment{Text: "foo"}}}, To: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off", "unknown"}}}}, Change: schema.ChangeType | schema.ChangeComment, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: false, Transactional: true, Changes: []*migrate.Change{ {Cmd: `ALTER TYPE "public"."state" ADD VALUE 'unknown'`}, {Cmd: `COMMENT ON COLUMN "public"."users" ."state" IS ''`, Reverse: `COMMENT ON COLUMN "public"."users" ."state" IS 'foo'`}, }, }, }, // Modify column type and add comment. { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off"}}}}, To: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.EnumType{T: "state", Values: []string{"on", "off", "unknown"}}}, Attrs: []schema.Attr{&schema.Comment{Text: "foo"}}}, Change: schema.ChangeType | schema.ChangeComment, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: false, Transactional: true, Changes: []*migrate.Change{ {Cmd: `ALTER TYPE "state" ADD VALUE 'unknown'`}, {Cmd: `COMMENT ON COLUMN "users" ."state" IS 'foo'`, Reverse: `COMMENT ON COLUMN "users" ."state" IS ''`}, }, }, }, // Modify column comment. { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "state", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{&schema.Comment{Text: "bar"}}}, To: &schema.Column{Name: "state", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}}, Attrs: []schema.Attr{&schema.Comment{Text: "foo"}}}, Change: schema.ChangeComment, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: `COMMENT ON COLUMN "users" ."state" IS 'foo'`, Reverse: `COMMENT ON COLUMN "users" ."state" IS 'bar'`}, }, }, }, // Modify index comment. { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyIndex{ From: schema.NewIndex("id_key"). AddColumns(users.Columns[0]). SetComment("foo"), To: schema.NewIndex("id_key"). AddColumns(users.Columns[0]). SetComment("bar"), Change: schema.ChangeComment, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: `COMMENT ON INDEX "id_key" IS 'bar'`, Reverse: `COMMENT ON INDEX "id_key" IS 'foo'`}, }, }, }, // Modify default values. { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "one", Default: &schema.Literal{V: "'one'"}, Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, {Name: "two", Default: &schema.Literal{V: "'two'"}, Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: &schema.Column{Name: "one", Default: &schema.Literal{V: "'one'"}, Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, To: &schema.Column{Name: "one", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, Change: schema.ChangeDefault, }, &schema.ModifyColumn{ From: &schema.Column{Name: "two", Default: &schema.Literal{V: "'two'"}, Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, To: &schema.Column{Name: "two", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, Change: schema.ChangeDefault, }, &schema.ModifyColumn{ From: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{}}}, To: &schema.Column{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}, Attrs: []schema.Attr{&Identity{Sequence: &Sequence{Start: 1024}}}}, Change: schema.ChangeAttr, }, }, } }(), }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "users" ALTER COLUMN "one" DROP DEFAULT, ALTER COLUMN "two" DROP DEFAULT, ALTER COLUMN "id" SET GENERATED BY DEFAULT SET START WITH 1024 SET INCREMENT BY 1 RESTART`, Reverse: `ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT SET START WITH 1 SET INCREMENT BY 1 RESTART, ALTER COLUMN "two" SET DEFAULT 'two', ALTER COLUMN "one" SET DEFAULT 'one'`, }, }, }, }, { changes: []schema.Change{ &schema.RenameTable{ From: schema.NewTable("t1").SetSchema(schema.New("s1")), To: schema.NewTable("t2").SetSchema(schema.New("s2")), }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "s1"."t1" RENAME TO "s2"."t2"`, Reverse: `ALTER TABLE "s2"."t2" RENAME TO "s1"."t1"`, }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1").SetSchema(schema.New("s1")), Changes: []schema.Change{ &schema.RenameColumn{ From: schema.NewColumn("a"), To: schema.NewColumn("b"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "s1"."t1" RENAME COLUMN "a" TO "b"`, Reverse: `ALTER TABLE "s1"."t1" RENAME COLUMN "b" TO "a"`, }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1").SetSchema(schema.New("s1")), Changes: []schema.Change{ &schema.RenameColumn{ From: schema.NewColumn("a"), To: schema.NewColumn("b"), }, &schema.AddColumn{ C: schema.NewIntColumn("c", "int"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "s1"."t1" ADD COLUMN "c" integer NOT NULL`, Reverse: `ALTER TABLE "s1"."t1" DROP COLUMN "c"`, }, { Cmd: `ALTER TABLE "s1"."t1" RENAME COLUMN "a" TO "b"`, Reverse: `ALTER TABLE "s1"."t1" RENAME COLUMN "b" TO "a"`, }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1").SetSchema(schema.New("s1")), Changes: []schema.Change{ &schema.RenameIndex{ From: schema.NewIndex("a"), To: schema.NewIndex("b"), }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER INDEX "a" RENAME TO "b"`, Reverse: `ALTER INDEX "b" RENAME TO "a"`, }, }, }, }, // Invalid serial type. { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &SerialType{T: "serial"}, Null: true}}, }, }, }, }, wantErr: true, }, // Drop serial sequence. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("posts"). SetSchema(schema.New("public")). AddColumns( schema.NewIntColumn("c1", "integer"), schema.NewIntColumn("c2", "integer"), ), Changes: schema.Changes{ &schema.ModifyColumn{ From: schema.NewColumn("c1").SetType(&SerialType{T: "smallserial"}), To: schema.NewIntColumn("c1", "integer"), Change: schema.ChangeType, }, &schema.ModifyColumn{ From: schema.NewColumn("c2").SetType(&SerialType{T: "serial", SequenceName: "previous_name"}), To: schema.NewIntColumn("c2", "integer"), Change: schema.ChangeType, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "public"."posts" ALTER COLUMN "c1" DROP DEFAULT, ALTER COLUMN "c1" TYPE integer, ALTER COLUMN "c2" DROP DEFAULT`, Reverse: `ALTER TABLE "public"."posts" ALTER COLUMN "c2" SET DEFAULT nextval('"public"."previous_name"'), ALTER COLUMN "c1" SET DEFAULT nextval('"public"."posts_c1_seq"'), ALTER COLUMN "c1" TYPE smallint`, }, { Cmd: `DROP SEQUENCE IF EXISTS "public"."posts_c1_seq"`, Reverse: `CREATE SEQUENCE IF NOT EXISTS "public"."posts_c1_seq" OWNED BY "public"."posts"."c1"`, }, { Cmd: `DROP SEQUENCE IF EXISTS "public"."previous_name"`, Reverse: `CREATE SEQUENCE IF NOT EXISTS "public"."previous_name" OWNED BY "public"."posts"."c2"`, }, }, }, }, // Add serial sequence. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("posts"). SetSchema(schema.New("public")). AddColumns( schema.NewColumn("c1").SetType(&SerialType{T: "serial"}), schema.NewColumn("c2").SetType(&SerialType{T: "bigserial"}), ), Changes: schema.Changes{ &schema.ModifyColumn{ From: schema.NewIntColumn("c1", "integer"), To: schema.NewColumn("c1").SetType(&SerialType{T: "serial"}), Change: schema.ChangeType, }, &schema.ModifyColumn{ From: schema.NewIntColumn("c2", "integer"), To: schema.NewColumn("c2").SetType(&SerialType{T: "bigserial"}), Change: schema.ChangeType, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `CREATE SEQUENCE IF NOT EXISTS "public"."posts_c1_seq" OWNED BY "public"."posts"."c1"`, Reverse: `DROP SEQUENCE IF EXISTS "public"."posts_c1_seq"`, }, { Cmd: `CREATE SEQUENCE IF NOT EXISTS "public"."posts_c2_seq" OWNED BY "public"."posts"."c2"`, Reverse: `DROP SEQUENCE IF EXISTS "public"."posts_c2_seq"`, }, { Cmd: `ALTER TABLE "public"."posts" ALTER COLUMN "c1" SET DEFAULT nextval('"public"."posts_c1_seq"'), ALTER COLUMN "c2" SET DEFAULT nextval('"public"."posts_c2_seq"'), ALTER COLUMN "c2" TYPE bigint`, Reverse: `ALTER TABLE "public"."posts" ALTER COLUMN "c2" DROP DEFAULT, ALTER COLUMN "c2" TYPE integer, ALTER COLUMN "c1" DROP DEFAULT`, }, }, }, }, // Change underlying sequence type. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("posts"). SetSchema(schema.New("public")). AddColumns( schema.NewColumn("c1").SetType(&SerialType{T: "serial"}), schema.NewColumn("c2").SetType(&SerialType{T: "bigserial"}), ), Changes: schema.Changes{ &schema.ModifyColumn{ From: schema.NewColumn("c1").SetType(&SerialType{T: "smallserial"}), To: schema.NewColumn("c1").SetType(&SerialType{T: "serial"}), Change: schema.ChangeType, }, &schema.ModifyColumn{ From: schema.NewColumn("c2").SetType(&SerialType{T: "serial"}), To: schema.NewColumn("c2").SetType(&SerialType{T: "bigserial"}), Change: schema.ChangeType, }, }, }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `ALTER TABLE "public"."posts" ALTER COLUMN "c1" TYPE integer, ALTER COLUMN "c2" TYPE bigint`, Reverse: `ALTER TABLE "public"."posts" ALTER COLUMN "c2" TYPE integer, ALTER COLUMN "c1" TYPE smallint`, }, }, }, }, // Empty qualifier. { changes: []schema.Change{ &schema.AddTable{ T: schema.NewTable("posts"). SetSchema(schema.New("test1")). AddColumns( schema.NewEnumColumn("c1", schema.EnumName("enum"), schema.EnumValues("a"), schema.EnumSchema(schema.New("test2"))), ), }, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { o.SchemaQualifier = new(string) }, }, mock: func(m mock) { m.ExpectQuery(sqltest.Escape("SELECT * FROM pg_type t JOIN pg_namespace n on t.typnamespace = n.oid WHERE t.typname = $1 AND t.typtype = 'e'")). WithArgs("enum"). WillReturnRows(sqlmock.NewRows([]string{"name"})) }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `CREATE TYPE "enum" AS ENUM ('a')`, Reverse: `DROP TYPE "enum"`, }, { Cmd: `CREATE TABLE "posts" ("c1" "enum" NOT NULL)`, Reverse: `DROP TABLE "posts"`, }, }, }, }, // Empty sequence qualifier. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("posts"). SetSchema(schema.New("public")). AddColumns( schema.NewColumn("c1").SetType(&SerialType{T: "serial"}), ), Changes: schema.Changes{ &schema.ModifyColumn{ From: schema.NewIntColumn("c1", "integer"), To: schema.NewColumn("c1").SetType(&SerialType{T: "serial"}), Change: schema.ChangeType, }, }, }, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { o.SchemaQualifier = new(string) }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `CREATE SEQUENCE IF NOT EXISTS "posts_c1_seq" OWNED BY "posts"."c1"`, Reverse: `DROP SEQUENCE IF EXISTS "posts_c1_seq"`, }, { Cmd: `ALTER TABLE "posts" ALTER COLUMN "c1" SET DEFAULT nextval('"posts_c1_seq"')`, Reverse: `ALTER TABLE "posts" ALTER COLUMN "c1" DROP DEFAULT`, }, }, }, }, // Empty index qualifier. { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("posts"). SetSchema(schema.New("public")). AddColumns( schema.NewIntColumn("c", "int"), ), Changes: schema.Changes{ &schema.AddIndex{ I: schema.NewIndex("i").AddColumns(schema.NewIntColumn("c", "int")), }, }, }, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { o.SchemaQualifier = new(string) }, }, wantPlan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: `CREATE INDEX "i" ON "posts" ("c")`, Reverse: `DROP INDEX "i"`, }, }, }, }, // Empty qualifier in multi-schema mode should fail. { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("t1").SetSchema(schema.New("s1")).AddColumns(schema.NewIntColumn("a", "int"))}, &schema.AddTable{T: schema.NewTable("t2").SetSchema(schema.New("s2")).AddColumns(schema.NewIntColumn("a", "int"))}, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { o.SchemaQualifier = new(string) }, }, wantErr: true, }, } for i, tt := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { db, mk, err := sqlmock.New() require.NoError(t, err) m := mock{mk} m.version("130000") if tt.mock != nil { tt.mock(m) } drv, err := Open(db) require.NoError(t, err) plan, err := drv.PlanChanges(context.Background(), "wantPlan", tt.changes, tt.options...) if tt.wantErr { require.Error(t, err, "expect plan to fail") return } require.NoError(t, err) require.Equal(t, tt.wantPlan.Reversible, plan.Reversible) require.Equal(t, tt.wantPlan.Transactional, plan.Transactional) for i, c := range plan.Changes { require.Equal(t, tt.wantPlan.Changes[i].Cmd, c.Cmd) require.Equal(t, tt.wantPlan.Changes[i].Reverse, c.Reverse) } }) } } atlas-0.7.2/sql/postgres/postgrescheck/000077500000000000000000000000001431455511600201425ustar00rootroot00000000000000atlas-0.7.2/sql/postgres/postgrescheck/postgrescheck.go000066400000000000000000000021571431455511600233420ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgrescheck import ( "fmt" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/postgres" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlcheck/datadepend" "ariga.io/atlas/sql/sqlcheck/destructive" ) func addNotNull(p *datadepend.ColumnPass) (diags []sqlcheck.Diagnostic, err error) { tt, err := postgres.FormatType(p.Column.Type.Type) if err != nil { return nil, err } return []sqlcheck.Diagnostic{ { Pos: p.Change.Stmt.Pos, Text: fmt.Sprintf( "Adding a non-nullable %q column %q will fail in case table %q is not empty", tt, p.Column.Name, p.Table.Name, ), }, }, nil } func init() { sqlcheck.Register(postgres.DriverName, func(r *schemahcl.Resource) ([]sqlcheck.Analyzer, error) { ds, err := destructive.New(r) if err != nil { return nil, err } dd, err := datadepend.New(r, datadepend.Handler{ AddNotNull: addNotNull, }) return []sqlcheck.Analyzer{ds, dd}, nil }) } atlas-0.7.2/sql/postgres/postgrescheck/postgrescheck_test.go000066400000000000000000000032261431455511600243770ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgrescheck_test import ( "context" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/postgres" _ "ariga.io/atlas/sql/postgres/postgrescheck" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "github.com/stretchr/testify/require" ) func TestDataDepend_MightFail(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewIntColumn("a", postgres.TypeInt), schema.NewIntColumn("b", postgres.TypeInt), ), Changes: []schema.Change{ &schema.AddColumn{C: schema.NewTimeColumn("b", postgres.TypeInt)}, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) azs, err := sqlcheck.AnalyzerFor(postgres.DriverName, nil) require.NoError(t, err) require.NoError(t, sqlcheck.Analyzers(azs).Analyze(context.Background(), pass)) require.Equal(t, report.Diagnostics[0].Text, `Adding a non-nullable "int" column "b" will fail in case table "users" is not empty`) } type testFile struct { name string migrate.File } func (t testFile) Name() string { return t.name } atlas-0.7.2/sql/postgres/sqlspec.go000066400000000000000000000546211431455511600173070ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "fmt" "reflect" "strconv" "strings" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/specutil" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlspec" "github.com/hashicorp/hcl/v2/hclparse" ) type ( doc struct { Tables []*sqlspec.Table `spec:"table"` Enums []*Enum `spec:"enum"` Schemas []*sqlspec.Schema `spec:"schema"` } // Enum holds a specification for an enum, that can be referenced as a column type. Enum struct { Name string `spec:",name"` Schema *schemahcl.Ref `spec:"schema"` Values []string `spec:"values"` schemahcl.DefaultExtension } ) func init() { schemahcl.Register("enum", &Enum{}) } // evalSpec evaluates an Atlas DDL document into v using the input. func evalSpec(p *hclparse.Parser, v any, input map[string]string) error { var d doc if err := hclState.Eval(p, &d, input); err != nil { return err } switch v := v.(type) { case *schema.Realm: if err := specutil.Scan(v, d.Schemas, d.Tables, convertTable); err != nil { return fmt.Errorf("specutil: failed converting to *schema.Realm: %w", err) } if len(d.Enums) > 0 { if err := convertEnums(d.Tables, d.Enums, v); err != nil { return err } } case *schema.Schema: if len(d.Schemas) != 1 { return fmt.Errorf("specutil: expecting document to contain a single schema, got %d", len(d.Schemas)) } r := &schema.Realm{} if err := specutil.Scan(r, d.Schemas, d.Tables, convertTable); err != nil { return err } if err := convertEnums(d.Tables, d.Enums, r); err != nil { return err } *v = *r.Schemas[0] default: return fmt.Errorf("specutil: failed unmarshaling spec. %T is not supported", v) } return nil } // MarshalSpec marshals v into an Atlas DDL document using a schemahcl.Marshaler. func MarshalSpec(v any, marshaler schemahcl.Marshaler) ([]byte, error) { var d doc switch s := v.(type) { case *schema.Schema: var err error doc, err := schemaSpec(s) if err != nil { return nil, fmt.Errorf("specutil: failed converting schema to spec: %w", err) } d.Tables = doc.Tables d.Schemas = doc.Schemas d.Enums = doc.Enums case *schema.Realm: for _, s := range s.Schemas { doc, err := schemaSpec(s) if err != nil { return nil, fmt.Errorf("specutil: failed converting schema to spec: %w", err) } d.Tables = append(d.Tables, doc.Tables...) d.Schemas = append(d.Schemas, doc.Schemas...) d.Enums = append(d.Enums, doc.Enums...) } if err := specutil.QualifyDuplicates(d.Tables); err != nil { return nil, err } if err := specutil.QualifyReferences(d.Tables, s); err != nil { return nil, err } default: return nil, fmt.Errorf("specutil: failed marshaling spec. %T is not supported", v) } return marshaler.MarshalSpec(&d) } var ( hclState = schemahcl.New( schemahcl.WithTypes(TypeRegistry.Specs()), schemahcl.WithScopedEnums("table.index.type", IndexTypeBTree, IndexTypeHash, IndexTypeGIN, IndexTypeGiST, IndexTypeBRIN), schemahcl.WithScopedEnums("table.partition.type", PartitionTypeRange, PartitionTypeList, PartitionTypeHash), schemahcl.WithScopedEnums("table.column.identity.generated", GeneratedTypeAlways, GeneratedTypeByDefault), schemahcl.WithScopedEnums("table.column.as.type", "STORED"), schemahcl.WithScopedEnums("table.foreign_key.on_update", specutil.ReferenceVars...), schemahcl.WithScopedEnums("table.foreign_key.on_delete", specutil.ReferenceVars...), ) // MarshalHCL marshals v into an Atlas HCL DDL document. MarshalHCL = schemahcl.MarshalerFunc(func(v any) ([]byte, error) { return MarshalSpec(v, hclState) }) // EvalHCL implements the schemahcl.Evaluator interface. EvalHCL = schemahcl.EvalFunc(evalSpec) // EvalHCLBytes is a helper that evaluates an HCL document from a byte slice instead // of from an hclparse.Parser instance. EvalHCLBytes = specutil.HCLBytesFunc(EvalHCL) ) // convertTable converts a sqlspec.Table to a schema.Table. Table conversion is done without converting // ForeignKeySpecs into ForeignKeys, as the target tables do not necessarily exist in the schema // at this point. Instead, the linking is done by the convertSchema function. func convertTable(spec *sqlspec.Table, parent *schema.Schema) (*schema.Table, error) { t, err := specutil.Table(spec, parent, convertColumn, specutil.PrimaryKey, convertIndex, specutil.Check) if err != nil { return nil, err } if err := convertPartition(spec.Extra, t); err != nil { return nil, err } return t, nil } // convertPartition converts and appends the partition block into the table attributes if exists. func convertPartition(spec schemahcl.Resource, table *schema.Table) error { r, ok := spec.Resource("partition") if !ok { return nil } var p struct { Type string `spec:"type"` Columns []*schemahcl.Ref `spec:"columns"` Parts []*struct { Expr string `spec:"expr"` Column *schemahcl.Ref `spec:"column"` } `spec:"by"` } if err := r.As(&p); err != nil { return fmt.Errorf("parsing %s.partition: %w", table.Name, err) } if p.Type == "" { return fmt.Errorf("missing attribute %s.partition.type", table.Name) } key := &Partition{T: p.Type} switch n, m := len(p.Columns), len(p.Parts); { case n == 0 && m == 0: return fmt.Errorf("missing columns or expressions for %s.partition", table.Name) case n > 0 && m > 0: return fmt.Errorf(`multiple definitions for %s.partition, use "columns" or "by"`, table.Name) case n > 0: for _, r := range p.Columns { c, err := specutil.ColumnByRef(table, r) if err != nil { return err } key.Parts = append(key.Parts, &PartitionPart{C: c}) } case m > 0: for i, p := range p.Parts { switch { case p.Column == nil && p.Expr == "": return fmt.Errorf("missing column or expression for %s.partition.by at position %d", table.Name, i) case p.Column != nil && p.Expr != "": return fmt.Errorf("multiple definitions for %s.partition.by at position %d", table.Name, i) case p.Column != nil: c, err := specutil.ColumnByRef(table, p.Column) if err != nil { return err } key.Parts = append(key.Parts, &PartitionPart{C: c}) case p.Expr != "": key.Parts = append(key.Parts, &PartitionPart{X: &schema.RawExpr{X: p.Expr}}) } } } table.AddAttrs(key) return nil } // fromPartition returns the resource spec for representing the partition block. func fromPartition(p Partition) *schemahcl.Resource { key := &schemahcl.Resource{ Type: "partition", Attrs: []*schemahcl.Attr{ specutil.VarAttr("type", strings.ToUpper(specutil.Var(p.T))), }, } columns, ok := func() (*schemahcl.ListValue, bool) { parts := make([]schemahcl.Value, 0, len(p.Parts)) for _, p := range p.Parts { if p.C == nil { return nil, false } parts = append(parts, specutil.ColumnRef(p.C.Name)) } return &schemahcl.ListValue{V: parts}, true }() if ok { key.Attrs = append(key.Attrs, &schemahcl.Attr{K: "columns", V: columns}) return key } for _, p := range p.Parts { part := &schemahcl.Resource{Type: "by"} switch { case p.C != nil: part.Attrs = append(part.Attrs, specutil.RefAttr("column", specutil.ColumnRef(p.C.Name))) case p.X != nil: part.Attrs = append(part.Attrs, specutil.StrAttr("expr", p.X.(*schema.RawExpr).X)) } key.Children = append(key.Children, part) } return key } // convertColumn converts a sqlspec.Column into a schema.Column. func convertColumn(spec *sqlspec.Column, _ *schema.Table) (*schema.Column, error) { if err := fixDefaultQuotes(spec.Default); err != nil { return nil, err } c, err := specutil.Column(spec, convertColumnType) if err != nil { return nil, err } if r, ok := spec.Extra.Resource("identity"); ok { id, err := convertIdentity(r) if err != nil { return nil, err } c.Attrs = append(c.Attrs, id) } if err := specutil.ConvertGenExpr(spec.Remain(), c, generatedType); err != nil { return nil, err } return c, nil } func convertIdentity(r *schemahcl.Resource) (*Identity, error) { var spec struct { Generation string `spec:"generated"` Start int64 `spec:"start"` Increment int64 `spec:"increment"` } if err := r.As(&spec); err != nil { return nil, err } id := &Identity{Generation: specutil.FromVar(spec.Generation), Sequence: &Sequence{}} if spec.Start != 0 { id.Sequence.Start = spec.Start } if spec.Increment != 0 { id.Sequence.Increment = spec.Increment } return id, nil } // fixDefaultQuotes fixes the quotes on the Default field to be single quotes // instead of double quotes. func fixDefaultQuotes(value schemahcl.Value) error { lv, ok := value.(*schemahcl.LiteralValue) if !ok { return nil } if sqlx.IsQuoted(lv.V, '"') { uq, err := strconv.Unquote(lv.V) if err != nil { return err } lv.V = "'" + uq + "'" } return nil } // convertIndex converts a sqlspec.Index into a schema.Index. func convertIndex(spec *sqlspec.Index, t *schema.Table) (*schema.Index, error) { idx, err := specutil.Index(spec, t) if err != nil { return nil, err } if attr, ok := spec.Attr("type"); ok { t, err := attr.String() if err != nil { return nil, err } idx.Attrs = append(idx.Attrs, &IndexType{T: t}) } if attr, ok := spec.Attr("where"); ok { p, err := attr.String() if err != nil { return nil, err } idx.Attrs = append(idx.Attrs, &IndexPredicate{P: p}) } if attr, ok := spec.Attr("page_per_range"); ok { p, err := attr.Int64() if err != nil { return nil, err } idx.Attrs = append(idx.Attrs, &IndexStorageParams{PagesPerRange: p}) } if attr, ok := spec.Attr("include"); ok { refs, err := attr.Refs() if err != nil { return nil, err } if len(refs) == 0 { return nil, fmt.Errorf("unexpected empty INCLUDE in index %q definition", spec.Name) } include := make([]*schema.Column, len(refs)) for i, r := range refs { if include[i], err = specutil.ColumnByRef(t, r); err != nil { return nil, err } } idx.Attrs = append(idx.Attrs, &IndexInclude{Columns: include}) } return idx, nil } const defaultTimePrecision = 6 // convertColumnType converts a sqlspec.Column into a concrete Postgres schema.Type. func convertColumnType(spec *sqlspec.Column) (schema.Type, error) { typ, err := TypeRegistry.Type(spec.Type, spec.Extra.Attrs) if err != nil { return nil, err } // Handle default values for time precision types. if t, ok := typ.(*schema.TimeType); ok && strings.HasPrefix(t.T, "time") { if _, ok := attr(spec.Type, "precision"); !ok { p := defaultTimePrecision t.Precision = &p } } return typ, nil } // convertEnums converts possibly referenced column types (like enums) to // an actual schema.Type and sets it on the correct schema.Column. func convertEnums(tables []*sqlspec.Table, enums []*Enum, r *schema.Realm) error { var ( used = make(map[*Enum]struct{}) byName = make(map[string]*Enum) ) for _, e := range enums { byName[e.Name] = e } for _, t := range tables { for _, c := range t.Columns { var enum *Enum switch { case c.Type.IsRef: n, err := enumName(c.Type) if err != nil { return err } e, ok := byName[n] if !ok { return fmt.Errorf("enum %q was not found", n) } enum = e default: n, ok := arrayType(c.Type.T) if !ok || byName[n] == nil { continue } enum = byName[n] } used[enum] = struct{}{} schemaE, err := specutil.SchemaName(enum.Schema) if err != nil { return fmt.Errorf("extract schema name from enum refrence: %w", err) } es, ok := r.Schema(schemaE) if !ok { return fmt.Errorf("schema %q not found in realm for table %q", schemaE, t.Name) } schemaT, err := specutil.SchemaName(t.Schema) if err != nil { return fmt.Errorf("extract schema name from table refrence: %w", err) } ts, ok := r.Schema(schemaT) if !ok { return fmt.Errorf("schema %q not found in realm for table %q", schemaT, t.Name) } tt, ok := ts.Table(t.Name) if !ok { return fmt.Errorf("table %q not found in schema %q", t.Name, ts.Name) } cc, ok := tt.Column(c.Name) if !ok { return fmt.Errorf("column %q not found in table %q", c.Name, t.Name) } e := &schema.EnumType{T: enum.Name, Schema: es, Values: enum.Values} switch t := cc.Type.Type.(type) { case *ArrayType: t.Type = e default: cc.Type.Type = e } } } for _, e := range enums { if _, ok := used[e]; !ok { return fmt.Errorf("enum %q declared but not used", e.Name) } } return nil } // enumName extracts the name of the referenced Enum from the reference string. func enumName(ref *schemahcl.Type) (string, error) { s := strings.Split(ref.T, "$enum.") if len(s) != 2 { return "", fmt.Errorf("postgres: failed to extract enum name from %q", ref.T) } return s[1], nil } // enumRef returns a reference string to the given enum name. func enumRef(n string) *schemahcl.Ref { return &schemahcl.Ref{ V: "$enum." + n, } } // schemaSpec converts from a concrete Postgres schema to Atlas specification. func schemaSpec(schem *schema.Schema) (*doc, error) { s, tbls, err := specutil.FromSchema(schem, tableSpec) if err != nil { return nil, err } d := &doc{ Tables: tbls, Schemas: []*sqlspec.Schema{s}, } enums := make(map[string]bool) for _, t := range schem.Tables { for _, c := range t.Columns { if e, ok := hasEnumType(c); ok && !enums[e.T] { d.Enums = append(d.Enums, &Enum{ Name: e.T, Schema: specutil.SchemaRef(s.Name), Values: e.Values, }) enums[e.T] = true } } } return d, nil } // tableSpec converts from a concrete Postgres sqlspec.Table to a schema.Table. func tableSpec(table *schema.Table) (*sqlspec.Table, error) { spec, err := specutil.FromTable( table, columnSpec, specutil.FromPrimaryKey, indexSpec, specutil.FromForeignKey, specutil.FromCheck, ) if err != nil { return nil, err } if p := (Partition{}); sqlx.Has(table.Attrs, &p) { spec.Extra.Children = append(spec.Extra.Children, fromPartition(p)) } return spec, nil } func indexSpec(idx *schema.Index) (*sqlspec.Index, error) { spec, err := specutil.FromIndex(idx) if err != nil { return nil, err } // Avoid printing the index type if it is the default. if i := (IndexType{}); sqlx.Has(idx.Attrs, &i) && i.T != IndexTypeBTree { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.VarAttr("type", strings.ToUpper(i.T))) } if i := (IndexInclude{}); sqlx.Has(idx.Attrs, &i) && len(i.Columns) > 0 { attr := &schemahcl.ListValue{} for _, c := range i.Columns { attr.V = append(attr.V, specutil.ColumnRef(c.Name)) } spec.Extra.Attrs = append(spec.Extra.Attrs, &schemahcl.Attr{ K: "include", V: attr, }) } if i := (IndexPredicate{}); sqlx.Has(idx.Attrs, &i) && i.P != "" { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.VarAttr("where", strconv.Quote(i.P))) } if p, ok := indexStorageParams(idx.Attrs); ok { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.Int64Attr("page_per_range", p.PagesPerRange)) } return spec, nil } // columnSpec converts from a concrete Postgres schema.Column into a sqlspec.Column. func columnSpec(c *schema.Column, _ *schema.Table) (*sqlspec.Column, error) { s, err := specutil.FromColumn(c, columnTypeSpec) if err != nil { return nil, err } if i := (&Identity{}); sqlx.Has(c.Attrs, i) { s.Extra.Children = append(s.Extra.Children, fromIdentity(i)) } if x := (schema.GeneratedExpr{}); sqlx.Has(c.Attrs, &x) { s.Extra.Children = append(s.Extra.Children, specutil.FromGenExpr(x, generatedType)) } return s, nil } // fromIdentity returns the resource spec for representing the identity attributes. func fromIdentity(i *Identity) *schemahcl.Resource { id := &schemahcl.Resource{ Type: "identity", Attrs: []*schemahcl.Attr{ specutil.VarAttr("generated", strings.ToUpper(specutil.Var(i.Generation))), }, } if s := i.Sequence; s != nil { if s.Start != 1 { id.Attrs = append(id.Attrs, specutil.Int64Attr("start", s.Start)) } if s.Increment != 1 { id.Attrs = append(id.Attrs, specutil.Int64Attr("increment", s.Increment)) } } return id } // columnTypeSpec converts from a concrete Postgres schema.Type into sqlspec.Column Type. func columnTypeSpec(t schema.Type) (*sqlspec.Column, error) { // Handle postgres enum types. They cannot be put into the TypeRegistry since their name is dynamic. if e, ok := t.(*schema.EnumType); ok { return &sqlspec.Column{Type: &schemahcl.Type{ T: enumRef(e.T).V, IsRef: true, }}, nil } st, err := TypeRegistry.Convert(t) if err != nil { return nil, err } return &sqlspec.Column{Type: st}, nil } // TypeRegistry contains the supported TypeSpecs for the Postgres driver. var TypeRegistry = schemahcl.NewRegistry( schemahcl.WithSpecFunc(typeSpec), schemahcl.WithParser(ParseType), schemahcl.WithSpecs( schemahcl.NewTypeSpec(TypeBit, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "len", Kind: reflect.Int64})), schemahcl.AliasTypeSpec("bit_varying", TypeBitVar, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "len", Kind: reflect.Int64})), schemahcl.NewTypeSpec(TypeVarChar, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.AliasTypeSpec("character_varying", TypeCharVar, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeChar, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeCharacter, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeInt2), schemahcl.NewTypeSpec(TypeInt4), schemahcl.NewTypeSpec(TypeInt8), schemahcl.NewTypeSpec(TypeInt), schemahcl.NewTypeSpec(TypeInteger), schemahcl.NewTypeSpec(TypeSmallInt), schemahcl.NewTypeSpec(TypeBigInt), schemahcl.NewTypeSpec(TypeText), schemahcl.NewTypeSpec(TypeBoolean), schemahcl.NewTypeSpec(TypeBool), schemahcl.NewTypeSpec(TypeBytea), schemahcl.NewTypeSpec(TypeCIDR), schemahcl.NewTypeSpec(TypeInet), schemahcl.NewTypeSpec(TypeMACAddr), schemahcl.NewTypeSpec(TypeMACAddr8), schemahcl.NewTypeSpec(TypeCircle), schemahcl.NewTypeSpec(TypeLine), schemahcl.NewTypeSpec(TypeLseg), schemahcl.NewTypeSpec(TypeBox), schemahcl.NewTypeSpec(TypePath), schemahcl.NewTypeSpec(TypePoint), schemahcl.NewTypeSpec(TypePolygon), schemahcl.NewTypeSpec(TypeDate), schemahcl.NewTypeSpec(TypeTime, schemahcl.WithAttributes(precisionTypeAttr()), formatTime()), schemahcl.NewTypeSpec(TypeTimeTZ, schemahcl.WithAttributes(precisionTypeAttr()), formatTime()), schemahcl.NewTypeSpec(TypeTimestampTZ, schemahcl.WithAttributes(precisionTypeAttr()), formatTime()), schemahcl.NewTypeSpec(TypeTimestamp, schemahcl.WithAttributes(precisionTypeAttr()), formatTime()), schemahcl.AliasTypeSpec("double_precision", TypeDouble), schemahcl.NewTypeSpec(TypeReal), schemahcl.NewTypeSpec(TypeFloat, schemahcl.WithAttributes(precisionTypeAttr())), schemahcl.NewTypeSpec(TypeFloat8), schemahcl.NewTypeSpec(TypeFloat4), schemahcl.NewTypeSpec(TypeNumeric, schemahcl.WithAttributes(precisionTypeAttr(), &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeDecimal, schemahcl.WithAttributes(precisionTypeAttr(), &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeSmallSerial), schemahcl.NewTypeSpec(TypeSerial), schemahcl.NewTypeSpec(TypeBigSerial), schemahcl.NewTypeSpec(TypeSerial2), schemahcl.NewTypeSpec(TypeSerial4), schemahcl.NewTypeSpec(TypeSerial8), schemahcl.NewTypeSpec(TypeXML), schemahcl.NewTypeSpec(TypeJSON), schemahcl.NewTypeSpec(TypeJSONB), schemahcl.NewTypeSpec(TypeUUID), schemahcl.NewTypeSpec(TypeMoney), schemahcl.NewTypeSpec("hstore"), schemahcl.NewTypeSpec("sql", schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "def", Required: true, Kind: reflect.String})), ), schemahcl.WithSpecs(func() (specs []*schemahcl.TypeSpec) { opts := []schemahcl.TypeSpecOption{ schemahcl.WithToSpec(func(t schema.Type) (*schemahcl.Type, error) { i, ok := t.(*IntervalType) if !ok { return nil, fmt.Errorf("postgres: unexpected interval type %T", t) } spec := &schemahcl.Type{T: TypeInterval} if i.F != "" { spec.T = specutil.Var(strings.ToLower(i.F)) } if p := i.Precision; p != nil && *p != defaultTimePrecision { spec.Attrs = []*schemahcl.Attr{specutil.IntAttr("precision", *p)} } return spec, nil }), schemahcl.WithFromSpec(func(t *schemahcl.Type) (schema.Type, error) { i := &IntervalType{T: TypeInterval} if t.T != TypeInterval { i.F = specutil.FromVar(t.T) } if a, ok := attr(t, "precision"); ok { p, err := a.Int() if err != nil { return nil, fmt.Errorf(`postgres: parsing attribute "precision": %w`, err) } if p != defaultTimePrecision { i.Precision = &p } } return i, nil }), } for _, f := range []string{"interval", "second", "day to second", "hour to second", "minute to second"} { specs = append(specs, schemahcl.NewTypeSpec(specutil.Var(f), append(opts, schemahcl.WithAttributes(precisionTypeAttr()))...)) } for _, f := range []string{"year", "month", "day", "hour", "minute", "year to month", "day to hour", "day to minute", "hour to minute"} { specs = append(specs, schemahcl.NewTypeSpec(specutil.Var(f), opts...)) } return specs }()...), ) func precisionTypeAttr() *schemahcl.TypeAttr { return &schemahcl.TypeAttr{ Name: "precision", Kind: reflect.Int, Required: false, } } func attr(typ *schemahcl.Type, key string) (*schemahcl.Attr, bool) { for _, a := range typ.Attrs { if a.K == key { return a, true } } return nil, false } func typeSpec(t schema.Type) (*schemahcl.Type, error) { if t, ok := t.(*schema.TimeType); ok && t.T != TypeDate { spec := &schemahcl.Type{T: timeAlias(t.T)} if p := t.Precision; p != nil && *p != defaultTimePrecision { spec.Attrs = []*schemahcl.Attr{specutil.IntAttr("precision", *p)} } return spec, nil } s, err := FormatType(t) if err != nil { return nil, err } return &schemahcl.Type{T: s}, nil } // formatTime overrides the default printing logic done by schemahcl.hclType. func formatTime() schemahcl.TypeSpecOption { return schemahcl.WithTypeFormatter(func(t *schemahcl.Type) (string, error) { a, ok := attr(t, "precision") if !ok { return t.T, nil } p, err := a.Int() if err != nil { return "", fmt.Errorf(`postgres: parsing attribute "precision": %w`, err) } return FormatType(&schema.TimeType{T: t.T, Precision: &p}) }) } // generatedType returns the default and only type for a generated column. func generatedType(string) string { return "STORED" } atlas-0.7.2/sql/postgres/sqlspec_test.go000066400000000000000000000753031431455511600203460ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package postgres import ( "fmt" "strconv" "testing" "ariga.io/atlas/sql/internal/spectest" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestSQLSpec(t *testing.T) { f := ` schema "schema" { } table "table" { schema = schema.schema column "col" { type = integer comment = "column comment" } column "age" { type = integer } column "price" { type = int } column "account_name" { type = varchar(32) } column "varchar_length_is_not_required" { type = varchar } column "character_varying_length_is_not_required" { type = character_varying } column "tags" { type = hstore } column "created_at" { type = timestamp(4) default = sql("current_timestamp(4)") } column "updated_at" { type = time default = sql("current_time") } primary_key { columns = [table.table.column.col] } index "index" { type = HASH unique = true columns = [ table.table.column.col, table.table.column.age, ] where = "active" comment = "index comment" } foreign_key "accounts" { columns = [ table.table.column.account_name, ] ref_columns = [ table.accounts.column.name, ] on_delete = SET_NULL } check "positive price" { expr = "price > 0" } comment = "table comment" } table "accounts" { schema = schema.schema column "name" { type = varchar(32) } column "type" { type = enum.account_type } primary_key { columns = [table.accounts.column.name] } } enum "account_type" { schema = schema.schema values = ["private", "business"] } ` var s schema.Schema err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) exp := &schema.Schema{ Name: "schema", } exp.Tables = []*schema.Table{ { Name: "table", Schema: exp, Columns: []*schema.Column{ { Name: "col", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "integer", }, }, Attrs: []schema.Attr{ &schema.Comment{Text: "column comment"}, }, }, { Name: "age", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "integer", }, }, }, { Name: "price", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: TypeInt, }, }, }, { Name: "account_name", Type: &schema.ColumnType{ Type: &schema.StringType{ T: "varchar", Size: 32, }, }, }, { Name: "varchar_length_is_not_required", Type: &schema.ColumnType{ Type: &schema.StringType{ T: "varchar", Size: 0, }, }, }, { Name: "character_varying_length_is_not_required", Type: &schema.ColumnType{ Type: &schema.StringType{ T: "character varying", Size: 0, }, }, }, { Name: "tags", Type: &schema.ColumnType{ Type: &UserDefinedType{ T: "hstore", }, }, }, { Name: "created_at", Type: &schema.ColumnType{ Type: typeTime(TypeTimestamp, 4), }, Default: &schema.RawExpr{X: "current_timestamp(4)"}, }, { Name: "updated_at", Type: &schema.ColumnType{ Type: typeTime(TypeTime, 6), }, Default: &schema.RawExpr{X: "current_time"}, }, }, Attrs: []schema.Attr{ &schema.Check{ Name: "positive price", Expr: "price > 0", }, &schema.Comment{Text: "table comment"}, }, }, { Name: "accounts", Schema: exp, Columns: []*schema.Column{ { Name: "name", Type: &schema.ColumnType{ Type: &schema.StringType{ T: "varchar", Size: 32, }, }, }, { Name: "type", Type: &schema.ColumnType{ Type: &schema.EnumType{ T: "account_type", Values: []string{"private", "business"}, Schema: exp, }, }, }, }, }, } exp.Tables[0].PrimaryKey = &schema.Index{ Table: exp.Tables[0], Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[0].Columns[0]}, }, } exp.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: exp.Tables[0], Unique: true, Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[0].Columns[0]}, {SeqNo: 1, C: exp.Tables[0].Columns[1]}, }, Attrs: []schema.Attr{ &schema.Comment{Text: "index comment"}, &IndexType{T: IndexTypeHash}, &IndexPredicate{P: "active"}, }, }, } exp.Tables[0].ForeignKeys = []*schema.ForeignKey{ { Symbol: "accounts", Table: exp.Tables[0], Columns: []*schema.Column{exp.Tables[0].Columns[3]}, RefTable: exp.Tables[1], RefColumns: []*schema.Column{exp.Tables[1].Columns[0]}, OnDelete: schema.SetNull, }, } exp.Tables[1].PrimaryKey = &schema.Index{ Table: exp.Tables[1], Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[1].Columns[0]}, }, } exp.Realm = schema.NewRealm(exp) require.EqualValues(t, exp, &s) } func TestUnmarshalSpec_IndexType(t *testing.T) { f := ` schema "s" {} table "t" { schema = schema.s column "c" { type = int } index "i" { type = %s columns = [column.c] } } ` t.Run("Invalid", func(t *testing.T) { f := fmt.Sprintf(f, "UNK") err := EvalHCLBytes([]byte(f), &schema.Schema{}, nil) require.Error(t, err) }) t.Run("Valid", func(t *testing.T) { var ( s schema.Schema f = fmt.Sprintf(f, "HASH") ) err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) idx := s.Tables[0].Indexes[0] require.Equal(t, IndexTypeHash, idx.Attrs[0].(*IndexType).T) }) } func TestUnmarshalSpec_BRINIndex(t *testing.T) { f := ` schema "s" {} table "t" { schema = schema.s column "c" { type = int } index "i" { type = BRIN columns = [column.c] page_per_range = 2 } } ` var s schema.Schema err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) idx := s.Tables[0].Indexes[0] require.Equal(t, IndexTypeBRIN, idx.Attrs[0].(*IndexType).T) require.EqualValues(t, 2, idx.Attrs[1].(*IndexStorageParams).PagesPerRange) } func TestUnmarshalSpec_Partitioned(t *testing.T) { t.Run("Columns", func(t *testing.T) { var ( s = &schema.Schema{} f = ` schema "test" {} table "logs" { schema = schema.test column "name" { type = text } partition { type = HASH columns = [ column.name ] } } ` ) err := EvalHCLBytes([]byte(f), s, nil) require.NoError(t, err) c := schema.NewStringColumn("name", "text") expected := schema.New("test"). AddTables(schema.NewTable("logs").AddColumns(c).AddAttrs(&Partition{T: PartitionTypeHash, Parts: []*PartitionPart{{C: c}}})) expected.SetRealm(schema.NewRealm(expected)) require.Equal(t, expected, s) }) t.Run("Parts", func(t *testing.T) { var ( s = &schema.Schema{} f = ` schema "test" {} table "logs" { schema = schema.test column "name" { type = text } partition { type = RANGE by { column = column.name } by { expr = "lower(name)" } } } ` ) err := EvalHCLBytes([]byte(f), s, nil) require.NoError(t, err) c := schema.NewStringColumn("name", "text") expected := schema.New("test"). AddTables(schema.NewTable("logs").AddColumns(c).AddAttrs(&Partition{T: PartitionTypeRange, Parts: []*PartitionPart{{C: c}, {X: &schema.RawExpr{X: "lower(name)"}}}})) expected.SetRealm(schema.NewRealm(expected)) require.Equal(t, expected, s) }) t.Run("Invalid", func(t *testing.T) { err := EvalHCLBytes([]byte(` schema "test" {} table "logs" { schema = schema.test column "name" { type = text } partition { columns = [column.name] } } `), &schema.Schema{}, nil) require.EqualError(t, err, "missing attribute logs.partition.type") err = EvalHCLBytes([]byte(` schema "test" {} table "logs" { schema = schema.test column "name" { type = text } partition { type = HASH } } `), &schema.Schema{}, nil) require.EqualError(t, err, `missing columns or expressions for logs.partition`) err = EvalHCLBytes([]byte(` schema "test" {} table "logs" { schema = schema.test column "name" { type = text } partition { type = HASH columns = [column.name] by { column = column.name } } } `), &schema.Schema{}, nil) require.EqualError(t, err, `multiple definitions for logs.partition, use "columns" or "by"`) }) } func TestMarshalSpec_Partitioned(t *testing.T) { t.Run("Columns", func(t *testing.T) { c := schema.NewStringColumn("name", "text") s := schema.New("test"). AddTables(schema.NewTable("logs").AddColumns(c).AddAttrs(&Partition{T: PartitionTypeHash, Parts: []*PartitionPart{{C: c}}})) buf, err := MarshalHCL(s) require.NoError(t, err) require.Equal(t, `table "logs" { schema = schema.test column "name" { null = false type = text } partition { type = HASH columns = [column.name] } } schema "test" { } `, string(buf)) }) t.Run("Parts", func(t *testing.T) { c := schema.NewStringColumn("name", "text") s := schema.New("test"). AddTables(schema.NewTable("logs").AddColumns(c).AddAttrs(&Partition{T: PartitionTypeHash, Parts: []*PartitionPart{{C: c}, {X: &schema.RawExpr{X: "lower(name)"}}}})) buf, err := MarshalHCL(s) require.NoError(t, err) require.Equal(t, `table "logs" { schema = schema.test column "name" { null = false type = text } partition { type = HASH by { column = column.name } by { expr = "lower(name)" } } } schema "test" { } `, string(buf)) }) } func TestMarshalSpec_IndexPredicate(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, }, }, }, }, } s.Tables[0].Schema = s s.Tables[0].Schema = s s.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: s.Tables[0], Unique: true, Parts: []*schema.IndexPart{ {SeqNo: 0, C: s.Tables[0].Columns[0]}, }, Attrs: []schema.Attr{ &IndexPredicate{P: "id <> 0"}, }, }, } buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "id" { null = false type = int } index "index" { unique = true columns = [column.id] where = "id <> 0" } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_BRINIndex(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, }, }, }, }, } s.Tables[0].Schema = s s.Tables[0].Schema = s s.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: s.Tables[0], Unique: true, Parts: []*schema.IndexPart{ {SeqNo: 0, C: s.Tables[0].Columns[0]}, }, Attrs: []schema.Attr{ &IndexType{T: IndexTypeBRIN}, &IndexStorageParams{PagesPerRange: 2}, }, }, } buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "id" { null = false type = int } index "index" { unique = true columns = [column.id] type = BRIN page_per_range = 2 } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestUnmarshalSpec_Identity(t *testing.T) { f := ` schema "s" {} table "t" { schema = schema.s column "c" { type = int identity { generated = %s start = 10 } } } ` t.Run("Invalid", func(t *testing.T) { f := fmt.Sprintf(f, "UNK") err := EvalHCLBytes([]byte(f), &schema.Schema{}, nil) require.Error(t, err) }) t.Run("Valid", func(t *testing.T) { var ( s schema.Schema f = fmt.Sprintf(f, "ALWAYS") ) err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) id := s.Tables[0].Columns[0].Attrs[0].(*Identity) require.Equal(t, GeneratedTypeAlways, id.Generation) require.EqualValues(t, 10, id.Sequence.Start) require.Zero(t, id.Sequence.Increment) }) } func TestUnmarshalSpec_IndexInclude(t *testing.T) { f := ` schema "s" {} table "t" { schema = schema.s column "c" { type = int } column "d" { type = int } index "c" { columns = [ column.c, ] include = [ column.d, ] } } ` var s schema.Schema err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) require.Len(t, s.Tables, 1) require.Len(t, s.Tables[0].Columns, 2) require.Len(t, s.Tables[0].Indexes, 1) idx, ok := s.Tables[0].Index("c") require.True(t, ok) require.Len(t, idx.Parts, 1) require.Len(t, idx.Attrs, 1) var include IndexInclude require.True(t, sqlx.Has(idx.Attrs, &include)) require.Len(t, include.Columns, 1) require.Equal(t, "d", include.Columns[0].Name) } func TestMarshalSpec_IndexInclude(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ { Name: "c", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, }, { Name: "d", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, }, }, }, }, } s.Tables[0].Schema = s s.Tables[0].Schema = s s.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: s.Tables[0], Parts: []*schema.IndexPart{ {SeqNo: 0, C: s.Tables[0].Columns[0]}, }, Attrs: []schema.Attr{ &IndexInclude{Columns: s.Tables[0].Columns[1:]}, }, }, } buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "c" { null = false type = int } column "d" { null = false type = int } index "index" { columns = [column.c] include = [column.d] } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_GeneratedColumn(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("users"). AddColumns( schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c1 * 2"}), schema.NewIntColumn("c2", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "c3 * c4", Type: "STORED"}), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "c1" { null = false type = int as { expr = "c1 * 2" type = STORED } } column "c2" { null = false type = int as { expr = "c3 * c4" type = STORED } } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestUnmarshalSpec_GeneratedColumns(t *testing.T) { var ( s schema.Schema f = ` schema "test" {} table "users" { schema = schema.test column "c1" { type = int as = "1" } column "c2" { type = int as { expr = "2" } } column "c3" { type = int as { expr = "3" type = STORED } } } ` ) err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) expected := schema.New("test"). AddTables( schema.NewTable("users"). AddColumns( schema.NewIntColumn("c1", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), schema.NewIntColumn("c2", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "2", Type: "STORED"}), schema.NewIntColumn("c3", "int").SetGeneratedExpr(&schema.GeneratedExpr{Expr: "3", Type: "STORED"}), ), ) expected.SetRealm(schema.NewRealm(expected)) require.EqualValues(t, expected, &s) } func TestMarshalSpec_Enum(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("account"). AddColumns( schema.NewEnumColumn("account_type", schema.EnumName("account_type"), schema.EnumValues("private", "business"), ), schema.NewColumn("account_states"). SetType(&ArrayType{ T: "states[]", Type: &schema.EnumType{ T: "state", Values: []string{"on", "off"}, }, }), ), schema.NewTable("table2"). AddColumns( schema.NewEnumColumn("account_type", schema.EnumName("account_type"), schema.EnumValues("private", "business"), ), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "account" { schema = schema.test column "account_type" { null = false type = enum.account_type } column "account_states" { null = false type = sql("states[]") } } table "table2" { schema = schema.test column "account_type" { null = false type = enum.account_type } } enum "account_type" { schema = schema.test values = ["private", "business"] } enum "state" { schema = schema.test values = ["on", "off"] } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_TimePrecision(t *testing.T) { s := schema.New("test"). AddTables( schema.NewTable("times"). AddColumns( schema.NewTimeColumn("t_time_def", TypeTime), schema.NewTimeColumn("t_time_with_time_zone", TypeTimeTZ, schema.TimePrecision(2)), schema.NewTimeColumn("t_time_without_time_zone", TypeTime, schema.TimePrecision(2)), schema.NewTimeColumn("t_timestamp", TypeTimestamp, schema.TimePrecision(2)), schema.NewTimeColumn("t_timestamptz", TypeTimestampTZ, schema.TimePrecision(2)), ), ) buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "times" { schema = schema.test column "t_time_def" { null = false type = time } column "t_time_with_time_zone" { null = false type = timetz(2) } column "t_time_without_time_zone" { null = false type = time(2) } column "t_timestamp" { null = false type = timestamp(2) } column "t_timestamptz" { null = false type = timestamptz(2) } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestTypes(t *testing.T) { p := func(i int) *int { return &i } for _, tt := range []struct { typeExpr string expected schema.Type }{ { typeExpr: "bit(10)", expected: &BitType{T: TypeBit, Len: 10}, }, { typeExpr: `hstore`, expected: &UserDefinedType{T: "hstore"}, }, { typeExpr: "bit_varying(10)", expected: &BitType{T: TypeBitVar, Len: 10}, }, { typeExpr: "boolean", expected: &schema.BoolType{T: TypeBoolean}, }, { typeExpr: "bool", expected: &schema.BoolType{T: TypeBool}, }, { typeExpr: "bytea", expected: &schema.BinaryType{T: TypeBytea}, }, { typeExpr: "varchar(255)", expected: &schema.StringType{T: TypeVarChar, Size: 255}, }, { typeExpr: "char(255)", expected: &schema.StringType{T: TypeChar, Size: 255}, }, { typeExpr: "character(255)", expected: &schema.StringType{T: TypeCharacter, Size: 255}, }, { typeExpr: "text", expected: &schema.StringType{T: TypeText}, }, { typeExpr: "smallint", expected: &schema.IntegerType{T: TypeSmallInt}, }, { typeExpr: "integer", expected: &schema.IntegerType{T: TypeInteger}, }, { typeExpr: "bigint", expected: &schema.IntegerType{T: TypeBigInt}, }, { typeExpr: "int", expected: &schema.IntegerType{T: TypeInt}, }, { typeExpr: "int2", expected: &schema.IntegerType{T: TypeInt2}, }, { typeExpr: "int4", expected: &schema.IntegerType{T: TypeInt4}, }, { typeExpr: "int8", expected: &schema.IntegerType{T: TypeInt8}, }, { typeExpr: "cidr", expected: &NetworkType{T: TypeCIDR}, }, { typeExpr: "inet", expected: &NetworkType{T: TypeInet}, }, { typeExpr: "macaddr", expected: &NetworkType{T: TypeMACAddr}, }, { typeExpr: "macaddr8", expected: &NetworkType{T: TypeMACAddr8}, }, { typeExpr: "circle", expected: &schema.SpatialType{T: TypeCircle}, }, { typeExpr: "line", expected: &schema.SpatialType{T: TypeLine}, }, { typeExpr: "lseg", expected: &schema.SpatialType{T: TypeLseg}, }, { typeExpr: "box", expected: &schema.SpatialType{T: TypeBox}, }, { typeExpr: "path", expected: &schema.SpatialType{T: TypePath}, }, { typeExpr: "point", expected: &schema.SpatialType{T: TypePoint}, }, { typeExpr: "date", expected: &schema.TimeType{T: TypeDate}, }, { typeExpr: "time", expected: typeTime(TypeTime, 6), }, { typeExpr: "time(4)", expected: typeTime(TypeTime, 4), }, { typeExpr: "timetz", expected: typeTime(TypeTimeTZ, 6), }, { typeExpr: "timestamp", expected: typeTime(TypeTimestamp, 6), }, { typeExpr: "timestamp(4)", expected: typeTime(TypeTimestamp, 4), }, { typeExpr: "timestamptz", expected: typeTime(TypeTimestampTZ, 6), }, { typeExpr: "timestamptz(4)", expected: typeTime(TypeTimestampTZ, 4), }, { typeExpr: "interval", expected: &IntervalType{T: "interval"}, }, { typeExpr: "interval(1)", expected: &IntervalType{T: "interval", Precision: p(1)}, }, { typeExpr: "second", expected: &IntervalType{T: "interval", F: "second"}, }, { typeExpr: "minute_to_second", expected: &IntervalType{T: "interval", F: "minute to second"}, }, { typeExpr: "minute_to_second(2)", expected: &IntervalType{T: "interval", F: "minute to second", Precision: p(2)}, }, { typeExpr: "real", expected: &schema.FloatType{T: TypeReal, Precision: 24}, }, { typeExpr: "float", expected: &schema.FloatType{T: TypeFloat}, }, { typeExpr: "float(1)", expected: &schema.FloatType{T: TypeFloat, Precision: 1}, }, { typeExpr: "float(25)", expected: &schema.FloatType{T: TypeFloat, Precision: 25}, }, { typeExpr: "float8", expected: &schema.FloatType{T: TypeFloat8, Precision: 53}, }, { typeExpr: "float4", expected: &schema.FloatType{T: TypeFloat4, Precision: 24}, }, { typeExpr: "numeric", expected: &schema.DecimalType{T: TypeNumeric}, }, { typeExpr: "numeric(10)", expected: &schema.DecimalType{T: TypeNumeric, Precision: 10}, }, { typeExpr: "numeric(10, 2)", expected: &schema.DecimalType{T: TypeNumeric, Precision: 10, Scale: 2}, }, { typeExpr: "decimal", expected: &schema.DecimalType{T: TypeDecimal}, }, { typeExpr: "decimal(10)", expected: &schema.DecimalType{T: TypeDecimal, Precision: 10}, }, { typeExpr: "decimal(10,2)", expected: &schema.DecimalType{T: TypeDecimal, Precision: 10, Scale: 2}, }, { typeExpr: "smallserial", expected: &SerialType{T: TypeSmallSerial}, }, { typeExpr: "serial", expected: &SerialType{T: TypeSerial}, }, { typeExpr: "bigserial", expected: &SerialType{T: TypeBigSerial}, }, { typeExpr: "serial2", expected: &SerialType{T: TypeSerial2}, }, { typeExpr: "serial4", expected: &SerialType{T: TypeSerial4}, }, { typeExpr: "serial8", expected: &SerialType{T: TypeSerial8}, }, { typeExpr: "xml", expected: &XMLType{T: TypeXML}, }, { typeExpr: "json", expected: &schema.JSONType{T: TypeJSON}, }, { typeExpr: "jsonb", expected: &schema.JSONType{T: TypeJSONB}, }, { typeExpr: "uuid", expected: &UUIDType{T: TypeUUID}, }, { typeExpr: "money", expected: &CurrencyType{T: TypeMoney}, }, { typeExpr: `sql("int[]")`, expected: &ArrayType{Type: &schema.IntegerType{T: "int"}, T: "int[]"}, }, { typeExpr: `sql("int[2]")`, expected: &ArrayType{Type: &schema.IntegerType{T: "int"}, T: "int[]"}, }, { typeExpr: `sql("text[][]")`, expected: &ArrayType{Type: &schema.StringType{T: "text"}, T: "text[]"}, }, { typeExpr: `sql("integer [3][3]")`, expected: &ArrayType{Type: &schema.IntegerType{T: "integer"}, T: "integer[]"}, }, { typeExpr: `sql("integer ARRAY[4]")`, expected: &ArrayType{Type: &schema.IntegerType{T: "integer"}, T: "integer[]"}, }, { typeExpr: `sql("integer ARRAY")`, expected: &ArrayType{Type: &schema.IntegerType{T: "integer"}, T: "integer[]"}, }, { typeExpr: `sql("character varying(255) [1][2]")`, expected: &ArrayType{Type: &schema.StringType{T: "character varying", Size: 255}, T: "character varying(255)[]"}, }, { typeExpr: `sql("character varying ARRAY[2]")`, expected: &ArrayType{Type: &schema.StringType{T: "character varying"}, T: "character varying[]"}, }, { typeExpr: `sql("varchar(2) [ 2 ] [ ]")`, expected: &ArrayType{Type: &schema.StringType{T: "varchar", Size: 2}, T: "varchar(2)[]"}, }, } { t.Run(tt.typeExpr, func(t *testing.T) { var test schema.Schema doc := fmt.Sprintf(`table "test" { schema = schema.test column "test" { null = false type = %s } } schema "test" { } `, tt.typeExpr) err := EvalHCLBytes([]byte(doc), &test, nil) require.NoError(t, err) colspec := test.Tables[0].Columns[0] require.EqualValues(t, tt.expected, colspec.Type.Type) spec, err := MarshalHCL(&test) require.NoError(t, err) var after schema.Schema err = EvalHCLBytes(spec, &after, nil) require.NoError(t, err) require.EqualValues(t, tt.expected, after.Tables[0].Columns[0].Type.Type) }) } } func typeTime(t string, p int) schema.Type { return &schema.TimeType{T: t, Precision: &p} } func TestParseType_Time(t *testing.T) { for _, tt := range []struct { typ string expected schema.Type }{ { typ: "timestamptz", expected: typeTime(TypeTimestampTZ, 6), }, { typ: "timestamptz(0)", expected: typeTime(TypeTimestampTZ, 0), }, { typ: "timestamptz(6)", expected: typeTime(TypeTimestampTZ, 6), }, { typ: "timestamp with time zone", expected: typeTime(TypeTimestampTZ, 6), }, { typ: "timestamp(1) with time zone", expected: typeTime(TypeTimestampTZ, 1), }, { typ: "timestamp", expected: typeTime(TypeTimestamp, 6), }, { typ: "timestamp(0)", expected: typeTime(TypeTimestamp, 0), }, { typ: "timestamp(6)", expected: typeTime(TypeTimestamp, 6), }, { typ: "timestamp without time zone", expected: typeTime(TypeTimestamp, 6), }, { typ: "timestamp(1) without time zone", expected: typeTime(TypeTimestamp, 1), }, { typ: "time", expected: typeTime(TypeTime, 6), }, { typ: "time(3)", expected: typeTime(TypeTime, 3), }, { typ: "time without time zone", expected: typeTime(TypeTime, 6), }, { typ: "time(3) without time zone", expected: typeTime(TypeTime, 3), }, { typ: "timetz", expected: typeTime(TypeTimeTZ, 6), }, { typ: "timetz(4)", expected: typeTime(TypeTimeTZ, 4), }, { typ: "time with time zone", expected: typeTime(TypeTimeTZ, 6), }, { typ: "time(4) with time zone", expected: typeTime(TypeTimeTZ, 4), }, } { t.Run(tt.typ, func(t *testing.T) { typ, err := ParseType(tt.typ) require.NoError(t, err) require.Equal(t, tt.expected, typ) }) } } func TestFormatType_Interval(t *testing.T) { p := func(i int) *int { return &i } for i, tt := range []struct { typ *IntervalType fmt string }{ { typ: &IntervalType{T: "interval"}, fmt: "interval", }, { typ: &IntervalType{T: "interval", Precision: p(6)}, fmt: "interval", }, { typ: &IntervalType{T: "interval", Precision: p(3)}, fmt: "interval(3)", }, { typ: &IntervalType{T: "interval", F: "DAY"}, fmt: "interval day", }, { typ: &IntervalType{T: "interval", F: "HOUR TO SECOND"}, fmt: "interval hour to second", }, { typ: &IntervalType{T: "interval", F: "HOUR TO SECOND", Precision: p(2)}, fmt: "interval hour to second(2)", }, { typ: &IntervalType{T: "interval", F: "DAY TO HOUR", Precision: p(6)}, fmt: "interval day to hour", }, } { t.Run(strconv.Itoa(i), func(t *testing.T) { f, err := FormatType(tt.typ) require.NoError(t, err) require.Equal(t, tt.fmt, f) }) } } func TestParseType_Interval(t *testing.T) { p := func(i int) *int { return &i } for i, tt := range []struct { typ string parsed *IntervalType }{ { typ: "interval", parsed: &IntervalType{T: "interval", Precision: p(6)}, }, { typ: "interval(2)", parsed: &IntervalType{T: "interval", Precision: p(2)}, }, { typ: "interval day", parsed: &IntervalType{T: "interval", F: "day", Precision: p(6)}, }, { typ: "interval day to second(2)", parsed: &IntervalType{T: "interval", F: "day to second", Precision: p(2)}, }, { typ: "interval day to second (2)", parsed: &IntervalType{T: "interval", F: "day to second", Precision: p(2)}, }, } { t.Run(strconv.Itoa(i), func(t *testing.T) { p, err := ParseType(tt.typ) require.NoError(t, err) require.Equal(t, tt.parsed, p) }) } } func TestRegistrySanity(t *testing.T) { spectest.RegistrySanityTest(t, TypeRegistry, []string{"enum"}) } func TestInputVars(t *testing.T) { spectest.TestInputVars(t, EvalHCL) } func TestMarshalRealm(t *testing.T) { t1 := schema.NewTable("t1"). AddColumns(schema.NewIntColumn("id", "int")) t2 := schema.NewTable("t2"). SetComment("Qualified with s1"). AddColumns(schema.NewIntColumn("oid", "int")) t2.AddForeignKeys(schema.NewForeignKey("oid2id").AddColumns(t2.Columns[0]).SetRefTable(t1).AddRefColumns(t1.Columns[0])) t3 := schema.NewTable("t3"). AddColumns(schema.NewIntColumn("id", "int")) t4 := schema.NewTable("t2"). SetComment("Qualified with s2"). AddColumns(schema.NewIntColumn("oid", "int")) t4.AddForeignKeys(schema.NewForeignKey("oid2id").AddColumns(t4.Columns[0]).SetRefTable(t3).AddRefColumns(t3.Columns[0])) t5 := schema.NewTable("t5"). AddColumns(schema.NewIntColumn("oid", "int")) t5.AddForeignKeys(schema.NewForeignKey("oid2id1").AddColumns(t5.Columns[0]).SetRefTable(t1).AddRefColumns(t1.Columns[0])) // Reference is qualified with s1. t5.AddForeignKeys(schema.NewForeignKey("oid2id2").AddColumns(t5.Columns[0]).SetRefTable(t2).AddRefColumns(t2.Columns[0])) r := schema.NewRealm( schema.New("s1").AddTables(t1, t2), schema.New("s2").AddTables(t3, t4, t5), ) got, err := MarshalHCL.MarshalSpec(r) require.NoError(t, err) require.Equal( t, `table "t1" { schema = schema.s1 column "id" { null = false type = int } } table "s1" "t2" { schema = schema.s1 comment = "Qualified with s1" column "oid" { null = false type = int } foreign_key "oid2id" { columns = [column.oid] ref_columns = [table.t1.column.id] } } table "t3" { schema = schema.s2 column "id" { null = false type = int } } table "s2" "t2" { schema = schema.s2 comment = "Qualified with s2" column "oid" { null = false type = int } foreign_key "oid2id" { columns = [column.oid] ref_columns = [table.t3.column.id] } } table "t5" { schema = schema.s2 column "oid" { null = false type = int } foreign_key "oid2id1" { columns = [column.oid] ref_columns = [table.t1.column.id] } foreign_key "oid2id2" { columns = [column.oid] ref_columns = [table.s1.t2.column.oid] } } schema "s1" { } schema "s2" { } `, string(got)) } atlas-0.7.2/sql/schema/000077500000000000000000000000001431455511600146705ustar00rootroot00000000000000atlas-0.7.2/sql/schema/dsl.go000066400000000000000000000460211431455511600160040ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema import ( "reflect" ) // The functions and methods below provide a DSL for creating schema resources using // a fluent interface. Note that some methods create links between the schema elements. // New creates a new Schema. func New(name string) *Schema { return &Schema{Name: name} } // SetCharset sets or appends the Charset attribute // to the schema with the given value. func (s *Schema) SetCharset(v string) *Schema { replaceOrAppend(&s.Attrs, &Charset{V: v}) return s } // UnsetCharset unsets the Charset attribute. func (s *Schema) UnsetCharset() *Schema { del(&s.Attrs, &Charset{}) return s } // SetCollation sets or appends the Collation attribute // to the schema with the given value. func (s *Schema) SetCollation(v string) *Schema { replaceOrAppend(&s.Attrs, &Collation{V: v}) return s } // UnsetCollation the Collation attribute. func (s *Schema) UnsetCollation() *Schema { del(&s.Attrs, &Collation{}) return s } // SetComment sets or appends the Comment attribute // to the schema with the given value. func (s *Schema) SetComment(v string) *Schema { replaceOrAppend(&s.Attrs, &Comment{Text: v}) return s } // AddAttrs adds additional attributes to the schema. func (s *Schema) AddAttrs(attrs ...Attr) *Schema { s.Attrs = append(s.Attrs, attrs...) return s } // SetRealm sets the database/realm of the schema. func (s *Schema) SetRealm(r *Realm) *Schema { s.Realm = r return s } // AddTables adds and links the given tables to the schema. func (s *Schema) AddTables(tables ...*Table) *Schema { for _, t := range tables { t.SetSchema(s) } s.Tables = append(s.Tables, tables...) return s } // NewRealm creates a new Realm. func NewRealm(schemas ...*Schema) *Realm { r := &Realm{Schemas: schemas} for _, s := range schemas { s.Realm = r } return r } // AddSchemas adds and links the given schemas to the realm. func (r *Realm) AddSchemas(schemas ...*Schema) *Realm { for _, s := range schemas { s.SetRealm(r) } r.Schemas = append(r.Schemas, schemas...) return r } // SetCharset sets or appends the Charset attribute // to the realm with the given value. func (r *Realm) SetCharset(v string) *Realm { replaceOrAppend(&r.Attrs, &Charset{V: v}) return r } // UnsetCharset unsets the Charset attribute. func (r *Realm) UnsetCharset() *Realm { del(&r.Attrs, &Charset{}) return r } // SetCollation sets or appends the Collation attribute // to the realm with the given value. func (r *Realm) SetCollation(v string) *Realm { replaceOrAppend(&r.Attrs, &Collation{V: v}) return r } // UnsetCollation the Collation attribute. func (r *Realm) UnsetCollation() *Realm { del(&r.Attrs, &Collation{}) return r } // NewTable creates a new Table. func NewTable(name string) *Table { return &Table{Name: name} } // SetCharset sets or appends the Charset attribute // to the table with the given value. func (t *Table) SetCharset(v string) *Table { replaceOrAppend(&t.Attrs, &Charset{V: v}) return t } // UnsetCharset unsets the Charset attribute. func (t *Table) UnsetCharset() *Table { del(&t.Attrs, &Charset{}) return t } // SetCollation sets or appends the Collation attribute // to the table with the given value. func (t *Table) SetCollation(v string) *Table { replaceOrAppend(&t.Attrs, &Collation{V: v}) return t } // UnsetCollation the Collation attribute. func (t *Table) UnsetCollation() *Table { del(&t.Attrs, &Collation{}) return t } // SetComment sets or appends the Comment attribute // to the table with the given value. func (t *Table) SetComment(v string) *Table { replaceOrAppend(&t.Attrs, &Comment{Text: v}) return t } // AddChecks appends the given checks to the attribute list. func (t *Table) AddChecks(checks ...*Check) *Table { for _, c := range checks { t.Attrs = append(t.Attrs, c) } return t } // SetSchema sets the schema (named-database) of the table. func (t *Table) SetSchema(s *Schema) *Table { t.Schema = s return t } // SetPrimaryKey sets the primary-key of the table. func (t *Table) SetPrimaryKey(pk *Index) *Table { pk.Table = t t.PrimaryKey = pk for _, p := range pk.Parts { if p.C == nil { continue } if _, ok := t.Column(p.C.Name); !ok { t.AddColumns(p.C) } } return t } // AddColumns appends the given columns to the table column list. func (t *Table) AddColumns(columns ...*Column) *Table { t.Columns = append(t.Columns, columns...) return t } // AddIndexes appends the given indexes to the table index list. func (t *Table) AddIndexes(indexes ...*Index) *Table { for _, idx := range indexes { idx.Table = t } t.Indexes = append(t.Indexes, indexes...) return t } // AddForeignKeys appends the given foreign-keys to the table foreign-key list. func (t *Table) AddForeignKeys(fks ...*ForeignKey) *Table { for _, fk := range fks { fk.Table = t } t.ForeignKeys = append(t.ForeignKeys, fks...) return t } // AddAttrs adds and additional attributes to the table. func (t *Table) AddAttrs(attrs ...Attr) *Table { t.Attrs = append(t.Attrs, attrs...) return t } // NewColumn creates a new column with the given name. func NewColumn(name string) *Column { return &Column{Name: name} } // NewNullColumn creates a new nullable column with the given name. func NewNullColumn(name string) *Column { return NewColumn(name). SetNull(true) } // NewBoolColumn creates a new BoolType column. func NewBoolColumn(name, typ string) *Column { return NewColumn(name). SetType(&BoolType{T: typ}) } // NewNullBoolColumn creates a new nullable BoolType column. func NewNullBoolColumn(name, typ string) *Column { return NewBoolColumn(name, typ). SetNull(true) } // NewIntColumn creates a new IntegerType column. func NewIntColumn(name, typ string) *Column { return NewColumn(name). SetType(&IntegerType{T: typ}) } // NewNullIntColumn creates a new nullable IntegerType column. func NewNullIntColumn(name, typ string) *Column { return NewIntColumn(name, typ). SetNull(true) } // NewUintColumn creates a new unsigned IntegerType column. func NewUintColumn(name, typ string) *Column { return NewColumn(name). SetType(&IntegerType{T: typ, Unsigned: true}) } // NewNullUintColumn creates a new nullable unsigned IntegerType column. func NewNullUintColumn(name, typ string) *Column { return NewUintColumn(name, typ). SetNull(true) } // EnumOption allows configuring EnumType using functional options. type EnumOption func(*EnumType) // EnumName configures the name of the name. This option // is useful for databases like PostgreSQL that supports // user-defined types for enums. func EnumName(name string) EnumOption { return func(e *EnumType) { e.T = name } } // EnumValues configures the values of the enum. func EnumValues(values ...string) EnumOption { return func(e *EnumType) { e.Values = values } } // EnumSchema configures the schema of the enum. func EnumSchema(s *Schema) EnumOption { return func(e *EnumType) { e.Schema = s } } // NewEnumColumn creates a new EnumType column. func NewEnumColumn(name string, opts ...EnumOption) *Column { t := &EnumType{} for _, opt := range opts { opt(t) } return NewColumn(name).SetType(t) } // NewNullEnumColumn creates a new nullable EnumType column. func NewNullEnumColumn(name string, opts ...EnumOption) *Column { return NewEnumColumn(name, opts...). SetNull(true) } // BinaryOption allows configuring BinaryType using functional options. type BinaryOption func(*BinaryType) // BinarySize configures the size of the binary type. func BinarySize(size int) BinaryOption { return func(b *BinaryType) { b.Size = &size } } // NewBinaryColumn creates a new BinaryType column. func NewBinaryColumn(name, typ string, opts ...BinaryOption) *Column { t := &BinaryType{T: typ} for _, opt := range opts { opt(t) } return NewColumn(name).SetType(t) } // NewNullBinaryColumn creates a new nullable BinaryType column. func NewNullBinaryColumn(name, typ string, opts ...BinaryOption) *Column { return NewBinaryColumn(name, typ, opts...). SetNull(true) } // StringOption allows configuring StringType using functional options. type StringOption func(*StringType) // StringSize configures the size of the string type. func StringSize(size int) StringOption { return func(b *StringType) { b.Size = size } } // NewStringColumn creates a new StringType column. func NewStringColumn(name, typ string, opts ...StringOption) *Column { t := &StringType{T: typ} for _, opt := range opts { opt(t) } return NewColumn(name).SetType(t) } // NewNullStringColumn creates a new nullable StringType column. func NewNullStringColumn(name, typ string, opts ...StringOption) *Column { return NewStringColumn(name, typ, opts...). SetNull(true) } // DecimalOption allows configuring DecimalType using functional options. type DecimalOption func(*DecimalType) // DecimalPrecision configures the precision of the decimal type. func DecimalPrecision(precision int) DecimalOption { return func(b *DecimalType) { b.Precision = precision } } // DecimalScale configures the scale of the decimal type. func DecimalScale(scale int) DecimalOption { return func(b *DecimalType) { b.Scale = scale } } // DecimalUnsigned configures the unsigned of the float type. func DecimalUnsigned(unsigned bool) DecimalOption { return func(b *DecimalType) { b.Unsigned = unsigned } } // NewDecimalColumn creates a new DecimalType column. func NewDecimalColumn(name, typ string, opts ...DecimalOption) *Column { t := &DecimalType{T: typ} for _, opt := range opts { opt(t) } return NewColumn(name).SetType(t) } // NewNullDecimalColumn creates a new nullable DecimalType column. func NewNullDecimalColumn(name, typ string, opts ...DecimalOption) *Column { return NewDecimalColumn(name, typ, opts...). SetNull(true) } // FloatOption allows configuring FloatType using functional options. type FloatOption func(*FloatType) // FloatPrecision configures the precision of the float type. func FloatPrecision(precision int) FloatOption { return func(b *FloatType) { b.Precision = precision } } // FloatUnsigned configures the unsigned of the float type. func FloatUnsigned(unsigned bool) FloatOption { return func(b *FloatType) { b.Unsigned = unsigned } } // NewFloatColumn creates a new FloatType column. func NewFloatColumn(name, typ string, opts ...FloatOption) *Column { t := &FloatType{T: typ} for _, opt := range opts { opt(t) } return NewColumn(name).SetType(t) } // NewNullFloatColumn creates a new nullable FloatType column. func NewNullFloatColumn(name, typ string, opts ...FloatOption) *Column { return NewFloatColumn(name, typ, opts...). SetNull(true) } // TimeOption allows configuring TimeType using functional options. type TimeOption func(*TimeType) // TimePrecision configures the precision of the time type. func TimePrecision(precision int) TimeOption { return func(b *TimeType) { b.Precision = &precision } } // NewTimeColumn creates a new TimeType column. func NewTimeColumn(name, typ string, opts ...TimeOption) *Column { t := &TimeType{T: typ} for _, opt := range opts { opt(t) } return NewColumn(name).SetType(t) } // NewNullTimeColumn creates a new nullable TimeType column. func NewNullTimeColumn(name, typ string) *Column { return NewTimeColumn(name, typ). SetNull(true) } // NewJSONColumn creates a new JSONType column. func NewJSONColumn(name, typ string) *Column { return NewColumn(name). SetType(&JSONType{T: typ}) } // NewNullJSONColumn creates a new nullable JSONType column. func NewNullJSONColumn(name, typ string) *Column { return NewJSONColumn(name, typ). SetNull(true) } // NewSpatialColumn creates a new SpatialType column. func NewSpatialColumn(name, typ string) *Column { return NewColumn(name). SetType(&SpatialType{T: typ}) } // NewNullSpatialColumn creates a new nullable SpatialType column. func NewNullSpatialColumn(name, typ string) *Column { return NewSpatialColumn(name, typ). SetNull(true) } // SetNull configures the nullability of the column func (c *Column) SetNull(b bool) *Column { if c.Type == nil { c.Type = &ColumnType{} } c.Type.Null = b return c } // SetType configures the type of the column func (c *Column) SetType(t Type) *Column { if c.Type == nil { c.Type = &ColumnType{} } c.Type.Type = t return c } // SetDefault configures the default of the column func (c *Column) SetDefault(x Expr) *Column { c.Default = x return c } // SetCharset sets or appends the Charset attribute // to the column with the given value. func (c *Column) SetCharset(v string) *Column { replaceOrAppend(&c.Attrs, &Charset{V: v}) return c } // UnsetCharset unsets the Charset attribute. func (c *Column) UnsetCharset() *Column { del(&c.Attrs, &Charset{}) return c } // SetCollation sets or appends the Collation attribute // to the column with the given value. func (c *Column) SetCollation(v string) *Column { replaceOrAppend(&c.Attrs, &Collation{V: v}) return c } // UnsetCollation the Collation attribute. func (c *Column) UnsetCollation() *Column { del(&c.Attrs, &Collation{}) return c } // SetComment sets or appends the Comment attribute // to the column with the given value. func (c *Column) SetComment(v string) *Column { replaceOrAppend(&c.Attrs, &Comment{Text: v}) return c } // SetGeneratedExpr sets or appends the GeneratedExpr attribute. func (c *Column) SetGeneratedExpr(x *GeneratedExpr) *Column { replaceOrAppend(&c.Attrs, x) return c } // AddAttrs adds additional attributes to the column. func (c *Column) AddAttrs(attrs ...Attr) *Column { c.Attrs = append(c.Attrs, attrs...) return c } // NewCheck creates a new check. func NewCheck() *Check { return &Check{} } // SetName configures the name of the check constraint. func (c *Check) SetName(name string) *Check { c.Name = name return c } // SetExpr configures the expression of the check constraint. func (c *Check) SetExpr(expr string) *Check { c.Expr = expr return c } // AddAttrs adds additional attributes to the check constraint. func (c *Check) AddAttrs(attrs ...Attr) *Check { c.Attrs = append(c.Attrs, attrs...) return c } // NewIndex creates a new index with the given name. func NewIndex(name string) *Index { return &Index{Name: name} } // NewUniqueIndex creates a new unique index with the given name. func NewUniqueIndex(name string) *Index { return NewIndex(name).SetUnique(true) } // NewPrimaryKey creates a new primary-key index // for the given columns. func NewPrimaryKey(columns ...*Column) *Index { return new(Index).SetUnique(true).AddColumns(columns...) } // SetName configures the name of the index. func (i *Index) SetName(name string) *Index { i.Name = name return i } // SetUnique configures the uniqueness of the index. func (i *Index) SetUnique(b bool) *Index { i.Unique = b return i } // SetTable configures the table of the index. func (i *Index) SetTable(t *Table) *Index { i.Table = t return i } // SetComment sets or appends the Comment attribute // to the index with the given value. func (i *Index) SetComment(v string) *Index { replaceOrAppend(&i.Attrs, &Comment{Text: v}) return i } // AddAttrs adds additional attributes to the index. func (i *Index) AddAttrs(attrs ...Attr) *Index { i.Attrs = append(i.Attrs, attrs...) return i } // AddColumns adds the columns to index parts. func (i *Index) AddColumns(columns ...*Column) *Index { for _, c := range columns { if !c.hasIndex(i) { c.Indexes = append(c.Indexes, i) } i.Parts = append(i.Parts, &IndexPart{SeqNo: len(i.Parts), C: c}) } return i } func (c *Column) hasIndex(idx *Index) bool { for i := range c.Indexes { if c.Indexes[i] == idx { return true } } return false } // AddExprs adds the expressions to index parts. func (i *Index) AddExprs(exprs ...Expr) *Index { for _, x := range exprs { i.Parts = append(i.Parts, &IndexPart{SeqNo: len(i.Parts), X: x}) } return i } // AddParts appends the given parts. func (i *Index) AddParts(parts ...*IndexPart) *Index { for _, p := range parts { if p.C != nil && !p.C.hasIndex(i) { p.C.Indexes = append(p.C.Indexes, i) } p.SeqNo = len(i.Parts) i.Parts = append(i.Parts, p) } return i } // NewIndexPart creates a new index part. func NewIndexPart() *IndexPart { return &IndexPart{} } // NewColumnPart creates a new index part with the given column. func NewColumnPart(c *Column) *IndexPart { return &IndexPart{C: c} } // NewExprPart creates a new index part with the given expression. func NewExprPart(x Expr) *IndexPart { return &IndexPart{X: x} } // SetDesc configures the "DESC" attribute of the key part. func (p *IndexPart) SetDesc(b bool) *IndexPart { p.Desc = b return p } // AddAttrs adds and additional attributes to the index-part. func (p *IndexPart) AddAttrs(attrs ...Attr) *IndexPart { p.Attrs = append(p.Attrs, attrs...) return p } // SetColumn sets the column of the index-part. func (p *IndexPart) SetColumn(c *Column) *IndexPart { p.C = c return p } // SetExpr sets the expression of the index-part. func (p *IndexPart) SetExpr(x Expr) *IndexPart { p.X = x return p } // NewForeignKey creates a new foreign-key with // the given constraints/symbol name. func NewForeignKey(symbol string) *ForeignKey { return &ForeignKey{Symbol: symbol} } // SetTable configures the table that holds the foreign-key (child table). func (f *ForeignKey) SetTable(t *Table) *ForeignKey { f.Table = t return f } // AddColumns appends columns to the child-table columns. func (f *ForeignKey) AddColumns(columns ...*Column) *ForeignKey { for _, c := range columns { if !c.hasForeignKey(f) { c.ForeignKeys = append(c.ForeignKeys, f) } } f.Columns = append(f.Columns, columns...) return f } func (c *Column) hasForeignKey(fk *ForeignKey) bool { for i := range c.ForeignKeys { if c.ForeignKeys[i] == fk { return true } } return false } // SetRefTable configures the referenced/parent table. func (f *ForeignKey) SetRefTable(t *Table) *ForeignKey { f.RefTable = t return f } // AddRefColumns appends columns to the parent-table columns. func (f *ForeignKey) AddRefColumns(columns ...*Column) *ForeignKey { f.RefColumns = append(f.RefColumns, columns...) return f } // SetOnUpdate sets the ON UPDATE constraint action. func (f *ForeignKey) SetOnUpdate(o ReferenceOption) *ForeignKey { f.OnUpdate = o return f } // SetOnDelete sets the ON DELETE constraint action. func (f *ForeignKey) SetOnDelete(o ReferenceOption) *ForeignKey { f.OnDelete = o return f } // replaceOrAppend searches an attribute of the same type as v in // the list and replaces it. Otherwise, v is appended to the list. func replaceOrAppend(attrs *[]Attr, v Attr) { t := reflect.TypeOf(v) for i := range *attrs { if reflect.TypeOf((*attrs)[i]) == t { (*attrs)[i] = v return } } *attrs = append(*attrs, v) } // ReplaceOrAppend searches an attribute of the same type as v in // the list and replaces it. Otherwise, v is appended to the list. func ReplaceOrAppend(attrs *[]Attr, v Attr) { replaceOrAppend(attrs, v) } // del searches an attribute of the same type as v in // the list and delete it. func del(attrs *[]Attr, v Attr) { t := reflect.TypeOf(v) for i := range *attrs { if reflect.TypeOf((*attrs)[i]) == t { *attrs = append((*attrs)[:i], (*attrs)[i+1:]...) return } } } atlas-0.7.2/sql/schema/dsl_test.go000066400000000000000000000145731431455511600170520ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema_test import ( "testing" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestTable_AddColumns(t *testing.T) { users := schema.NewTable("users"). SetComment("users table"). AddColumns( schema.NewBoolColumn("active", "bool"), schema.NewDecimalColumn("age", "decimal"), schema.NewNullStringColumn("name", "varchar", schema.StringSize(255)), ) require.Equal( t, &schema.Table{ Name: "users", Attrs: []schema.Attr{ &schema.Comment{Text: "users table"}, }, Columns: []*schema.Column{ {Name: "active", Type: &schema.ColumnType{Type: &schema.BoolType{T: "bool"}}}, {Name: "age", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "decimal"}}}, {Name: "name", Type: &schema.ColumnType{Null: true, Type: &schema.StringType{T: "varchar", Size: 255}}}, }, }, users, ) } func TestSchema_AddTables(t *testing.T) { userColumns := []*schema.Column{ schema.NewIntColumn("id", "int"), schema.NewBoolColumn("active", "boolean"), schema.NewNullStringColumn("name", "varchar", schema.StringSize(255)), schema.NewTimeColumn("registered_at", "timestamp", schema.TimePrecision(6)), } users := schema.NewTable("users"). AddColumns(userColumns...). SetPrimaryKey(schema.NewPrimaryKey(userColumns[0])). SetComment("users table"). AddIndexes( schema.NewUniqueIndex("unique_name"). AddColumns(userColumns[2]). SetComment("index comment"), ) postColumns := []*schema.Column{ schema.NewIntColumn("id", "int"), schema.NewStringColumn("text", "longtext"), schema.NewNullIntColumn("author_id", "int"), } posts := schema.NewTable("posts"). AddColumns(postColumns...). SetPrimaryKey(schema.NewPrimaryKey(postColumns[0])). SetComment("posts table"). AddForeignKeys( schema.NewForeignKey("author_id"). AddColumns(postColumns[2]). SetRefTable(users). AddRefColumns(userColumns[0]). SetOnDelete(schema.Cascade). SetOnUpdate(schema.SetNull), ) require.Equal( t, func() *schema.Schema { p := 6 s := &schema.Schema{Name: "public"} users := &schema.Table{ Name: "users", Schema: s, Attrs: []schema.Attr{ &schema.Comment{Text: "users table"}, }, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, {Name: "active", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}}}, {Name: "name", Type: &schema.ColumnType{Null: true, Type: &schema.StringType{T: "varchar", Size: 255}}}, {Name: "registered_at", Type: &schema.ColumnType{Null: false, Type: &schema.TimeType{T: "timestamp", Precision: &p}}}, }, } s.Tables = append(s.Tables, users) users.PrimaryKey = &schema.Index{Unique: true, Parts: []*schema.IndexPart{{C: users.Columns[0]}}} users.PrimaryKey.Table = users users.Columns[0].Indexes = append(users.Columns[0].Indexes, users.PrimaryKey) users.Indexes = append(users.Indexes, &schema.Index{ Name: "unique_name", Unique: true, Parts: []*schema.IndexPart{{C: users.Columns[2]}}, Attrs: []schema.Attr{&schema.Comment{Text: "index comment"}}, }) users.Indexes[0].Table = users users.Columns[2].Indexes = users.Indexes posts := &schema.Table{ Name: "posts", Schema: s, Attrs: []schema.Attr{ &schema.Comment{Text: "posts table"}, }, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "longtext"}}}, {Name: "author_id", Type: &schema.ColumnType{Null: true, Type: &schema.IntegerType{T: "int"}}}, }, } s.Tables = append(s.Tables, posts) posts.PrimaryKey = &schema.Index{Unique: true, Parts: []*schema.IndexPart{{C: posts.Columns[0]}}} posts.PrimaryKey.Table = posts posts.Columns[0].Indexes = append(posts.Columns[0].Indexes, posts.PrimaryKey) posts.ForeignKeys = append(posts.ForeignKeys, &schema.ForeignKey{ Symbol: "author_id", Table: posts, Columns: posts.Columns[2:], RefTable: users, RefColumns: users.Columns[0:1], OnDelete: schema.Cascade, OnUpdate: schema.SetNull, }) posts.Columns[2].ForeignKeys = posts.ForeignKeys return s }(), schema.New("public").AddTables(users, posts), ) } func TestSchema_SetCharset(t *testing.T) { s := schema.New("public") require.Empty(t, s.Attrs) s.SetCharset("utf8mb4") require.Len(t, s.Attrs, 1) require.Equal(t, &schema.Charset{V: "utf8mb4"}, s.Attrs[0]) s.SetCharset("latin1") require.Len(t, s.Attrs, 1) require.Equal(t, &schema.Charset{V: "latin1"}, s.Attrs[0]) s.UnsetCharset() require.Empty(t, s.Attrs) } func TestSchema_SetCollation(t *testing.T) { s := schema.New("public") require.Empty(t, s.Attrs) s.SetCollation("utf8mb4_general_ci") require.Len(t, s.Attrs, 1) require.Equal(t, &schema.Collation{V: "utf8mb4_general_ci"}, s.Attrs[0]) s.SetCollation("latin1_swedish_ci") require.Len(t, s.Attrs, 1) require.Equal(t, &schema.Collation{V: "latin1_swedish_ci"}, s.Attrs[0]) s.UnsetCollation() require.Empty(t, s.Attrs) } func TestSchema_SetComment(t *testing.T) { s := schema.New("public") require.Empty(t, s.Attrs) s.SetComment("1") require.Len(t, s.Attrs, 1) require.Equal(t, &schema.Comment{Text: "1"}, s.Attrs[0]) s.SetComment("2") require.Len(t, s.Attrs, 1) require.Equal(t, &schema.Comment{Text: "2"}, s.Attrs[0]) } func TestSchema_SetGeneratedExpr(t *testing.T) { c := schema.NewIntColumn("c", "int") require.Empty(t, c.Attrs) x := &schema.GeneratedExpr{Expr: "d*2", Type: "VIRTUAL"} c.SetGeneratedExpr(x) require.Equal(t, []schema.Attr{x}, c.Attrs) } func TestCheck(t *testing.T) { enforced := &struct{ schema.Attr }{} tbl := schema.NewTable("table"). AddColumns( schema.NewColumn("price1"), schema.NewColumn("price2"), ) require.Empty(t, tbl.Attrs) tbl.AddChecks( schema.NewCheck(). SetName("unique prices"). SetExpr("price1 <> price2"), schema.NewCheck(). SetExpr("price1 > 0"). AddAttrs(enforced), ) require.Len(t, tbl.Attrs, 2) require.Equal(t, &schema.Check{ Name: "unique prices", Expr: "price1 <> price2", }, tbl.Attrs[0]) require.Equal(t, &schema.Check{ Expr: "price1 > 0", Attrs: []schema.Attr{enforced}, }, tbl.Attrs[1]) } atlas-0.7.2/sql/schema/inspect.go000066400000000000000000000110461431455511600166660ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema import ( "context" "database/sql" "errors" ) // A NotExistError wraps another error to retain its original text // but makes it possible to the migrator to catch it. type NotExistError struct { Err error } func (e NotExistError) Error() string { return e.Err.Error() } // IsNotExistError reports if an error is a NotExistError. func IsNotExistError(err error) bool { if err == nil { return false } var e *NotExistError return errors.As(err, &e) } // ExecQuerier wraps the two standard sql.DB methods. type ExecQuerier interface { QueryContext(ctx context.Context, query string, args ...any) (*sql.Rows, error) ExecContext(ctx context.Context, query string, args ...any) (sql.Result, error) } // An InspectMode controls the amount and depth of information returned on inspection. type InspectMode uint const ( // InspectSchemas enables schema inspection. InspectSchemas InspectMode = 1 << iota // InspectTables enables schema tables inspection including // all its child resources (e.g. columns or indexes). InspectTables ) // Is reports whether the given mode is enabled. func (m InspectMode) Is(i InspectMode) bool { return m&i != 0 } type ( // InspectOptions describes options for Inspector. InspectOptions struct { // Mode defines the amount of information returned by InspectSchema. // If zero, InspectSchema inspects whole resources in the schema. Mode InspectMode // Tables to inspect. Empty means all tables in the schema. Tables []string // Exclude defines a list of glob patterns used to filter resources from inspection. // The syntax used by the different drivers is implemented as follows: // // t // exclude table 't'. // * // exclude all tables. // t.c // exclude column, index and foreign-key named 'c' in table 't'. // t.* // the last item defines the filtering; all resources under 't' are excluded. // *.c // the last item defines the filtering; all resourced named 'c' are excluded in all tables. // *.* // the last item defines the filtering; all resourced under all tables are excluded. // Exclude []string } // InspectRealmOption describes options for RealmInspector. InspectRealmOption struct { // Mode defines the amount of information returned by InspectRealm. // If zero, InspectRealm inspects all schemas and their child resources. Mode InspectMode // Schemas to inspect. Empty means all schemas in the realm. Schemas []string // Exclude defines a list of glob patterns used to filter resources from inspection. // The syntax used by the different drivers is implemented as follows: // // s // exclude schema 't'. // * // exclude all schemas. // s.t // exclude table 't' under schema 's'. // s.* // the last item defines the filtering; all tables under 's' are excluded. // *.t // the last item defines the filtering; all tables named 't' are excluded in all schemas. // *.* // the last item defines the filtering; all tables under all schemas are excluded. // *.*.c // the last item defines the filtering; all resourced named 'c' are excluded in all tables. // *.*.* // the last item defines the filtering; all resources are excluded in all tables. // Exclude []string } // Inspector is the interface implemented by the different database // drivers for inspecting schema or databases. Inspector interface { // InspectSchema returns the schema description by its name. An empty name means the // "attached schema" (e.g. SCHEMA() in MySQL or CURRENT_SCHEMA() in PostgreSQL). // A NotExistError error is returned if the schema does not exist in the database. InspectSchema(ctx context.Context, name string, opts *InspectOptions) (*Schema, error) // InspectRealm returns the description of the connected database. InspectRealm(ctx context.Context, opts *InspectRealmOption) (*Realm, error) } ) // Normalizer is the interface implemented by the different database drivers for // "normalizing" schema objects. i.e. converting schema objects defined in natural // form to their representation in the database. Thus, two schema objects are equal // if their normal forms are equal. type Normalizer interface { // NormalizeSchema returns the normal representation of a schema. NormalizeSchema(context.Context, *Schema) (*Schema, error) // NormalizeRealm returns the normal representation of a database. NormalizeRealm(context.Context, *Realm) (*Realm, error) } atlas-0.7.2/sql/schema/migrate.go000066400000000000000000000255071431455511600166600ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema import ( "context" "errors" "time" ) type ( // A Change represents a schema change. The types below implement this // interface and can be used for describing schema changes. // // The Change interface can also be implemented outside this package // as follows: // // type RenameType struct { // schema.Change // From, To string // } // // var t schema.Change = &RenameType{From: "old", To: "new"} // Change interface { change() } // Clause carries additional information that can be added // to schema changes. The Clause interface can be implemented // outside this package as follows: // // type Authorization struct { // schema.Clause // UserName string // } // // var c schema.Clause = &Authorization{UserName: "a8m"} // Clause interface { clause() } // AddSchema describes a schema (named database) creation change. // Unlike table creation, schemas and their elements are described // with separate changes. For example, "AddSchema" and "AddTable" AddSchema struct { S *Schema Extra []Clause // Extra clauses and options. } // DropSchema describes a schema (named database) removal change. DropSchema struct { S *Schema Extra []Clause // Extra clauses and options. } // ModifySchema describes a modification change for schema attributes. ModifySchema struct { S *Schema Changes []Change } // AddTable describes a table creation change. AddTable struct { T *Table Extra []Clause // Extra clauses and options. } // DropTable describes a table removal change. DropTable struct { T *Table Extra []Clause // Extra clauses. } // ModifyTable describes a table modification change. ModifyTable struct { T *Table Changes []Change } // RenameTable describes a table rename change. RenameTable struct { From, To *Table } // AddColumn describes a column creation change. AddColumn struct { C *Column } // DropColumn describes a column removal change. DropColumn struct { C *Column } // ModifyColumn describes a change that modifies a column. ModifyColumn struct { From, To *Column Change ChangeKind } // RenameColumn describes a column rename change. RenameColumn struct { From, To *Column } // AddIndex describes an index creation change. AddIndex struct { I *Index } // DropIndex describes an index removal change. DropIndex struct { I *Index } // ModifyIndex describes an index modification. ModifyIndex struct { From, To *Index Change ChangeKind } // RenameIndex describes an index rename change. RenameIndex struct { From, To *Index } // AddForeignKey describes a foreign-key creation change. AddForeignKey struct { F *ForeignKey } // DropForeignKey describes a foreign-key removal change. DropForeignKey struct { F *ForeignKey } // ModifyForeignKey describes a change that modifies a foreign-key. ModifyForeignKey struct { From, To *ForeignKey Change ChangeKind } // AddCheck describes a CHECK constraint creation change. AddCheck struct { C *Check } // DropCheck describes a CHECK constraint removal change. DropCheck struct { C *Check } // ModifyCheck describes a change that modifies a check. ModifyCheck struct { From, To *Check Change ChangeKind } // AddAttr describes an attribute addition. AddAttr struct { A Attr } // DropAttr describes an attribute removal. DropAttr struct { A Attr } // ModifyAttr describes a change that modifies an element attribute. ModifyAttr struct { From, To Attr } // IfExists represents a clause in a schema change that is commonly // supported by multiple statements (e.g. DROP TABLE or DROP SCHEMA). IfExists struct{} // IfNotExists represents a clause in a schema change that is commonly // supported by multiple statements (e.g. CREATE TABLE or CREATE SCHEMA). IfNotExists struct{} ) // A ChangeKind describes a change kind that can be combined // using a set of flags. The zero kind is no change. type ChangeKind uint const ( // NoChange holds the zero value of a change kind. NoChange ChangeKind = 0 // Common changes. // ChangeAttr describes attributes change of an element. // For example, a table CHECK was added or changed. ChangeAttr ChangeKind = 1 << (iota - 1) // ChangeCharset describes character-set change. ChangeCharset // ChangeCollate describes collation/encoding change. ChangeCollate // ChangeComment describes comment chang (of any element). ChangeComment // Column specific changes. // ChangeNull describe a change to the NULL constraint. ChangeNull // ChangeType describe a column type change. ChangeType // ChangeDefault describe a column default change. ChangeDefault // ChangeGenerated describe a change to the generated expression. ChangeGenerated // Index specific changes. // ChangeUnique describes a change to the uniqueness constraint. // For example, an index was changed from non-unique to unique. ChangeUnique // ChangeParts describes a change to one or more of the index parts. // For example, index keeps its previous name, but the columns order // was changed. ChangeParts // Foreign key specific changes. // ChangeColumn describes a change to the foreign-key (child) columns. ChangeColumn // ChangeRefColumn describes a change to the foreign-key (parent) columns. ChangeRefColumn // ChangeRefTable describes a change to the foreign-key (parent) table. ChangeRefTable // ChangeUpdateAction describes a change to the foreign-key update action. ChangeUpdateAction // ChangeDeleteAction describes a change to the foreign-key delete action. ChangeDeleteAction ) // Is reports whether c is match the given change kind. func (k ChangeKind) Is(c ChangeKind) bool { return k == c || k&c != 0 } // Differ is the interface implemented by the different // drivers for comparing and diffing schema top elements. type Differ interface { // RealmDiff returns a diff report for migrating a realm // (or a database) from state "from" to state "to". An error // is returned if such step is not possible. RealmDiff(from, to *Realm) ([]Change, error) // SchemaDiff returns a diff report for migrating a schema // from state "from" to state "to". An error is returned // if such step is not possible. SchemaDiff(from, to *Schema) ([]Change, error) // TableDiff returns a diff report for migrating a table // from state "from" to state "to". An error is returned // if such step is not possible. TableDiff(from, to *Table) ([]Change, error) } // ErrLocked is returned on Lock calls which have failed to obtain the lock. var ErrLocked = errors.New("sql/schema: lock is held by other session") type ( // UnlockFunc is returned by the Locker to explicitly // release the named "advisory lock". UnlockFunc func() error // Locker is an interface that is optionally implemented by the different drivers // for obtaining an "advisory lock" with the given name. Locker interface { // Lock acquires a named "advisory lock", using the given timeout. Negative value means no timeout, // and the zero value means a "try lock" mode. i.e. return immediately if the lock is already taken. // The returned unlock function is used to release the advisory lock acquired by the session. // // An ErrLocked is returned if the operation failed to obtain the lock in all different timeout modes. Lock(ctx context.Context, name string, timeout time.Duration) (UnlockFunc, error) } ) // Changes is a list of changes allow for searching and mutating changes. type Changes []Change // IndexAddTable returns the index of the first AddTable in the changes with // the given name, or -1 if there is no such change in the Changes. func (c Changes) IndexAddTable(name string) int { return c.search(func(c Change) bool { a, ok := c.(*AddTable) return ok && a.T.Name == name }) } // IndexDropTable returns the index of the first DropTable in the changes with // the given name, or -1 if there is no such change in the Changes. func (c Changes) IndexDropTable(name string) int { return c.search(func(c Change) bool { a, ok := c.(*DropTable) return ok && a.T.Name == name }) } // IndexAddColumn returns the index of the first AddColumn in the changes with // the given name, or -1 if there is no such change in the Changes. func (c Changes) IndexAddColumn(name string) int { return c.search(func(c Change) bool { a, ok := c.(*AddColumn) return ok && a.C.Name == name }) } // IndexDropColumn returns the index of the first DropColumn in the changes with // the given name, or -1 if there is no such change in the Changes. func (c Changes) IndexDropColumn(name string) int { return c.search(func(c Change) bool { d, ok := c.(*DropColumn) return ok && d.C.Name == name }) } // IndexAddIndex returns the index of the first AddIndex in the changes with // the given name, or -1 if there is no such change in the Changes. func (c Changes) IndexAddIndex(name string) int { return c.search(func(c Change) bool { a, ok := c.(*AddIndex) return ok && a.I.Name == name }) } // IndexDropIndex returns the index of the first DropIndex in the changes with // the given name, or -1 if there is no such change in the Changes. func (c Changes) IndexDropIndex(name string) int { return c.search(func(c Change) bool { a, ok := c.(*DropIndex) return ok && a.I.Name == name }) } // RemoveIndex removes elements in the given indexes from the Changes. func (c *Changes) RemoveIndex(indexes ...int) { changes := make([]Change, 0, len(*c)-len(indexes)) Loop: for i := range *c { for _, idx := range indexes { if i == idx { continue Loop } } changes = append(changes, (*c)[i]) } *c = changes } // search returns the index of the first call to f that returns true, or -1. func (c Changes) search(f func(Change) bool) int { for i := range c { if f(c[i]) { return i } } return -1 } // changes. func (*AddAttr) change() {} func (*DropAttr) change() {} func (*ModifyAttr) change() {} func (*AddSchema) change() {} func (*DropSchema) change() {} func (*ModifySchema) change() {} func (*AddTable) change() {} func (*DropTable) change() {} func (*ModifyTable) change() {} func (*RenameTable) change() {} func (*AddIndex) change() {} func (*DropIndex) change() {} func (*ModifyIndex) change() {} func (*RenameIndex) change() {} func (*AddCheck) change() {} func (*DropCheck) change() {} func (*ModifyCheck) change() {} func (*AddColumn) change() {} func (*DropColumn) change() {} func (*ModifyColumn) change() {} func (*RenameColumn) change() {} func (*AddForeignKey) change() {} func (*DropForeignKey) change() {} func (*ModifyForeignKey) change() {} // clauses. func (*IfExists) clause() {} func (*IfNotExists) clause() {} atlas-0.7.2/sql/schema/migrate_test.go000066400000000000000000000100051431455511600177020ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema_test import ( "fmt" "log" "strconv" "testing" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestChanges_IndexAddTable(t *testing.T) { changes := schema.Changes{ &schema.AddTable{T: schema.NewTable("users")}, &schema.DropTable{T: schema.NewTable("posts")}, &schema.AddTable{T: schema.NewTable("posts")}, &schema.AddTable{T: schema.NewTable("posts")}, } require.Equal(t, 2, changes.IndexAddTable("posts")) require.Equal(t, -1, changes.IndexAddTable("post_tags")) } func TestChanges_IndexDropTable(t *testing.T) { changes := schema.Changes{ &schema.DropTable{T: schema.NewTable("users")}, &schema.AddTable{T: schema.NewTable("posts")}, &schema.DropTable{T: schema.NewTable("posts")}, } require.Equal(t, 2, changes.IndexDropTable("posts")) require.Equal(t, -1, changes.IndexDropTable("post_tags")) } func TestChanges_IndexAddColumn(t *testing.T) { changes := schema.Changes{ &schema.AddColumn{C: schema.NewColumn("name")}, &schema.DropColumn{C: schema.NewColumn("name")}, &schema.AddColumn{C: schema.NewColumn("name")}, } require.Equal(t, 0, changes.IndexAddColumn("name")) require.Equal(t, -1, changes.IndexAddColumn("created_at")) } func TestChanges_IndexDropColumn(t *testing.T) { changes := schema.Changes{ &schema.AddColumn{C: schema.NewColumn("name")}, &schema.DropColumn{C: schema.NewColumn("name")}, &schema.AddColumn{C: schema.NewColumn("name")}, } require.Equal(t, 1, changes.IndexDropColumn("name")) require.Equal(t, -1, changes.IndexDropColumn("created_at")) } func TestChanges_IndexAddIndex(t *testing.T) { changes := schema.Changes{ &schema.DropIndex{I: schema.NewIndex("name")}, &schema.AddIndex{I: schema.NewIndex("created_at")}, &schema.AddIndex{I: schema.NewIndex("name")}, } require.Equal(t, 2, changes.IndexAddIndex("name")) require.Equal(t, -1, changes.IndexAddIndex("age")) } func TestChanges_IndexDropIndex(t *testing.T) { changes := schema.Changes{ &schema.AddIndex{I: schema.NewIndex("name")}, &schema.DropIndex{I: schema.NewIndex("created_at")}, &schema.DropIndex{I: schema.NewIndex("name")}, } require.Equal(t, 2, changes.IndexDropIndex("name")) require.Equal(t, -1, changes.IndexDropIndex("age")) } func TestChanges_RemoveIndex(t *testing.T) { changes := make(schema.Changes, 0, 5) for i := 0; i < 5; i++ { changes = append(changes, &schema.AddColumn{C: schema.NewColumn(strconv.Itoa(i))}) } changes.RemoveIndex(0) require.Equal(t, 4, len(changes)) for i := 0; i < 4; i++ { require.Equal(t, strconv.Itoa(i+1), changes[i].(*schema.AddColumn).C.Name) } changes.RemoveIndex(0, 3, 2) require.Equal(t, 1, len(changes)) require.Equal(t, "2", changes[0].(*schema.AddColumn).C.Name) } func ExampleChanges_Replace() { changes := schema.Changes{ &schema.AddIndex{I: schema.NewIndex("id")}, &schema.AddColumn{C: schema.NewColumn("new_name")}, &schema.AddColumn{C: schema.NewColumn("id")}, &schema.AddColumn{C: schema.NewColumn("created_at")}, &schema.DropColumn{C: schema.NewColumn("old_name")}, } i, j := changes.IndexAddColumn("new_name"), changes.IndexDropColumn("old_name") if i == -1 || j == -1 { log.Fatalln("Unexpected change positions") } // Replace "add" and "drop" with "rename". changes = append(changes, &schema.RenameColumn{From: changes[j].(*schema.DropColumn).C, To: changes[i].(*schema.AddColumn).C}) changes.RemoveIndex(i, j) for _, c := range changes { switch c := c.(type) { case *schema.AddColumn: fmt.Printf("%T(%s)\n", c, c.C.Name) case *schema.DropColumn: fmt.Printf("%T(%s)\n", c, c.C.Name) case *schema.RenameColumn: fmt.Printf("%T(%s -> %s)\n", c, c.From.Name, c.To.Name) case *schema.AddIndex: fmt.Printf("%T(%s)\n", c, c.I.Name) } } // Output: // *schema.AddIndex(id) // *schema.AddColumn(id) // *schema.AddColumn(created_at) // *schema.RenameColumn(old_name -> new_name) } atlas-0.7.2/sql/schema/schema.go000066400000000000000000000166141431455511600164670ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package schema type ( // A Realm or a database describes a domain of schema resources that are logically connected // and can be accessed and queried in the same connection (e.g. a physical database instance). Realm struct { Schemas []*Schema Attrs []Attr } // A Schema describes a database schema (i.e. named database). Schema struct { Name string Realm *Realm Tables []*Table Attrs []Attr // Attrs and options. } // A Table represents a table definition. Table struct { Name string Schema *Schema Columns []*Column Indexes []*Index PrimaryKey *Index ForeignKeys []*ForeignKey Attrs []Attr // Attrs, constraints and options. } // A Column represents a column definition. Column struct { Name string Type *ColumnType Default Expr Attrs []Attr Indexes []*Index // Foreign keys that this column is // part of their child columns. ForeignKeys []*ForeignKey } // ColumnType represents a column type that is implemented by the dialect. ColumnType struct { Type Type Raw string Null bool } // An Index represents an index definition. Index struct { Name string Unique bool Table *Table Attrs []Attr Parts []*IndexPart } // An IndexPart represents an index part that // can be either an expression or a column. IndexPart struct { // SeqNo represents the sequence number of the key part // in the index. SeqNo int // Desc indicates if the key part is stored in descending // order. All databases use ascending order as default. Desc bool X Expr C *Column Attrs []Attr } // A ForeignKey represents an index definition. ForeignKey struct { Symbol string Table *Table Columns []*Column RefTable *Table RefColumns []*Column OnUpdate ReferenceOption OnDelete ReferenceOption } ) // Schema returns the first schema that matched the given name. func (r *Realm) Schema(name string) (*Schema, bool) { for _, s := range r.Schemas { if s.Name == name { return s, true } } return nil, false } // Table returns the first table that matched the given name. func (s *Schema) Table(name string) (*Table, bool) { for _, t := range s.Tables { if t.Name == name { return t, true } } return nil, false } // Column returns the first column that matched the given name. func (t *Table) Column(name string) (*Column, bool) { for _, c := range t.Columns { if c.Name == name { return c, true } } return nil, false } // Index returns the first index that matched the given name. func (t *Table) Index(name string) (*Index, bool) { for _, i := range t.Indexes { if i.Name == name { return i, true } } return nil, false } // ForeignKey returns the first foreign-key that matched the given symbol (constraint name). func (t *Table) ForeignKey(symbol string) (*ForeignKey, bool) { for _, f := range t.ForeignKeys { if f.Symbol == symbol { return f, true } } return nil, false } // Column returns the first column that matches the given name. func (f *ForeignKey) Column(name string) (*Column, bool) { for _, c := range f.Columns { if c.Name == name { return c, true } } return nil, false } // RefColumn returns the first referenced column that matches the given name. func (f *ForeignKey) RefColumn(name string) (*Column, bool) { for _, c := range f.RefColumns { if c.Name == name { return c, true } } return nil, false } // ReferenceOption for constraint actions. type ReferenceOption string // Reference options (actions) specified by ON UPDATE and ON DELETE // subclauses of the FOREIGN KEY clause. const ( NoAction ReferenceOption = "NO ACTION" Restrict ReferenceOption = "RESTRICT" Cascade ReferenceOption = "CASCADE" SetNull ReferenceOption = "SET NULL" SetDefault ReferenceOption = "SET DEFAULT" ) type ( // A Type represents a database type. The types below implements this // interface and can be used for describing schemas. // // The Type interface can also be implemented outside this package as follows: // // type SpatialType struct { // schema.Type // T string // } // // var t schema.Type = &SpatialType{T: "point"} // Type interface { typ() } // EnumType represents an enum type. EnumType struct { T string // Optional type. Values []string // Enum values. Schema *Schema // Optional schema. } // BinaryType represents a type that stores a binary data. BinaryType struct { T string Size *int } // StringType represents a string type. StringType struct { T string Size int } // BoolType represents a boolean type. BoolType struct { T string } // IntegerType represents an int type. IntegerType struct { T string Unsigned bool Attrs []Attr } // DecimalType represents a fixed-point type that stores exact numeric values. DecimalType struct { T string Precision int Scale int Unsigned bool } // FloatType represents a floating-point type that stores approximate numeric values. FloatType struct { T string Unsigned bool Precision int } // TimeType represents a date/time type. TimeType struct { T string Precision *int } // JSONType represents a JSON type. JSONType struct { T string } // SpatialType represents a spatial/geometric type. SpatialType struct { T string } // UnsupportedType represents a type that is not supported by the drivers. UnsupportedType struct { T string } ) type ( // Expr defines an SQL expression in schema DDL. Expr interface { expr() } // Literal represents a basic literal expression like 1, or '1'. // String literals are usually quoted with single or double quotes. Literal struct { V string } // RawExpr represents a raw expression like "uuid()" or "current_timestamp()". // Unlike literals, raw expression are usually inlined as is on migration. RawExpr struct { X string } ) type ( // Attr represents the interface that all attributes implement. Attr interface { attr() } // Comment describes a schema element comment. Comment struct { Text string } // Charset describes a column or a table character-set setting. Charset struct { V string } // Collation describes a column or a table collation setting. Collation struct { V string } // Check describes a CHECK constraint. Check struct { Name string // Optional constraint name. Expr string // Actual CHECK. Attrs []Attr // Additional attributes (e.g. ENFORCED). } // GeneratedExpr describes the expression used for generating // the value of a generated/virtual column. GeneratedExpr struct { Expr string Type string // Optional type. e.g. STORED or VIRTUAL. } ) // expressions. func (*Literal) expr() {} func (*RawExpr) expr() {} // types. func (*BoolType) typ() {} func (*EnumType) typ() {} func (*TimeType) typ() {} func (*JSONType) typ() {} func (*FloatType) typ() {} func (*StringType) typ() {} func (*BinaryType) typ() {} func (*SpatialType) typ() {} func (*IntegerType) typ() {} func (*DecimalType) typ() {} func (*UnsupportedType) typ() {} // attributes. func (*Check) attr() {} func (*Comment) attr() {} func (*Charset) attr() {} func (*Collation) attr() {} func (*GeneratedExpr) attr() {} atlas-0.7.2/sql/sqlcheck/000077500000000000000000000000001431455511600152255ustar00rootroot00000000000000atlas-0.7.2/sql/sqlcheck/datadepend/000077500000000000000000000000001431455511600173165ustar00rootroot00000000000000atlas-0.7.2/sql/sqlcheck/datadepend/datadepend.go000066400000000000000000000142211431455511600217360ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package datadepend import ( "context" "errors" "fmt" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" ) type ( // Analyzer checks data-dependent changes. Analyzer struct { sqlcheck.Options Handler } // Handler holds the underlying driver handlers. Handler struct { // AddNotNull is applied when a new non-nullable column was // added to an existing table. AddNotNull ColumnHandler // ModifyNotNull is an optional handler applied when // a nullable column was changed to non-nullable. ModifyNotNull ColumnHandler } // ColumnPass wraps the information needed // by the handler below to diagnose columns. ColumnPass struct { *sqlcheck.Pass Change *sqlcheck.Change // Change context (statement). Table *schema.Table // The table this column belongs to. Column *schema.Column // The diagnosed column. } // ColumnHandler allows provide custom diagnostic for specific column rules. ColumnHandler func(*ColumnPass) ([]sqlcheck.Diagnostic, error) ) // New creates a new data-dependent analyzer with the given options. func New(r *schemahcl.Resource, h Handler) (*Analyzer, error) { az := &Analyzer{Handler: h} if r, ok := r.Resource(az.Name()); ok { if err := r.As(&az.Options); err != nil { return nil, fmt.Errorf("sql/sqlcheck: parsing datadepend check options: %w", err) } } return az, nil } // Name of the analyzer. Implements the sqlcheck.NamedAnalyzer interface. func (*Analyzer) Name() string { return "data_depend" } // Analyze runs data-depend analysis on MySQL changes. func (a *Analyzer) Analyze(ctx context.Context, p *sqlcheck.Pass) error { return a.Report(p, a.Diagnostics(ctx, p)) } // List of codes. var ( codeAddUniqueI = sqlcheck.Code("MF101") codeModUniqueI = sqlcheck.Code("MF102") codeAddNotNullC = sqlcheck.Code("MF103") codeModNotNullC = sqlcheck.Code("MF104") ) // Diagnostics runs the common analysis on the file and returns its diagnostics. func (a *Analyzer) Diagnostics(_ context.Context, p *sqlcheck.Pass) (diags []sqlcheck.Diagnostic) { for _, sc := range p.File.Changes { for _, c := range sc.Changes { m, ok := c.(*schema.ModifyTable) if !ok { continue } for _, c := range m.Changes { switch c := c.(type) { case *schema.AddIndex: column := func() *schema.Column { for i := range c.I.Parts { // We consider a column a non-new column if // it was not added in this migration file. if column := c.I.Parts[i].C; column != nil && p.File.ColumnSpan(m.T, column)&sqlcheck.SpanAdded == 0 { return column } } return nil }() // A unique index was added on an existing column. if c.I.Unique && column != nil { diags = append(diags, sqlcheck.Diagnostic{ Code: codeAddUniqueI, Pos: sc.Stmt.Pos, Text: fmt.Sprintf("Adding a unique index %q on table %q might fail in case column %q contains duplicate entries", c.I.Name, m.T.Name, column.Name), }) } case *schema.ModifyIndex: if c.Change.Is(schema.ChangeUnique) && c.To.Unique && p.File.IndexSpan(m.T, c.To)&sqlcheck.SpanAdded == 0 { diags = append(diags, sqlcheck.Diagnostic{ Code: codeModUniqueI, Pos: sc.Stmt.Pos, Text: fmt.Sprintf("Modifying an index %q on table %q might fail in case of duplicate entries", c.To.Name, m.T.Name), }) } case *schema.AddColumn: // In case the column is nullable without default // value and the table was not added in this file. if a.Handler.AddNotNull != nil && !c.C.Type.Null && c.C.Default == nil && p.File.TableSpan(m.T)&sqlcheck.SpanAdded != 1 { d, err := a.Handler.AddNotNull(&ColumnPass{Pass: p, Change: sc, Table: m.T, Column: c.C}) if err != nil { return } for i := range d { // In case there is no driver-specific code. if d[i].Code == "" { d[i].Code = codeAddNotNullC } } diags = append(diags, d...) } case *schema.ModifyColumn: switch { case p.File.TableSpan(m.T)&sqlcheck.SpanAdded == 1 || !(c.From.Type.Null && !c.To.Type.Null): case a.ModifyNotNull != nil: d, err := a.Handler.ModifyNotNull(&ColumnPass{Pass: p, Change: sc, Table: m.T, Column: c.To}) if err != nil { return } for i := range d { // In case there is no driver-specific code. if d[i].Code == "" { d[i].Code = codeModNotNullC } } diags = append(diags, d...) // In case the altered column was not added in this file, and the column // was changed nullable to non-nullable without back filling it with values. case !ColumnFilled(p.File, m.T, c.From, sc.Stmt.Pos): diags = append(diags, sqlcheck.Diagnostic{ Code: codeModNotNullC, Pos: sc.Stmt.Pos, Text: fmt.Sprintf("Modifying nullable column %q to non-nullable might fail in case it contains NULL values", c.To.Name), }) } } } } } return } // Report provides standard reporting for data-dependent changes. Drivers that // decorate this Analyzer should call this function to get consistent reporting // between dialects. func (a *Analyzer) Report(p *sqlcheck.Pass, diags []sqlcheck.Diagnostic) error { const reportText = "data dependent changes detected" if len(diags) > 0 { p.Reporter.WriteReport(sqlcheck.Report{Text: reportText, Diagnostics: diags}) if sqlx.V(a.Error) { return errors.New(reportText) } } return nil } // ColumnFilled checks if the column was filled with values before the given position. func ColumnFilled(f *sqlcheck.File, t *schema.Table, c *schema.Column, pos int) bool { // The parser used for parsing this file can check if the // given nullable column was filled before the given position. p, ok := f.Parser.(interface { ColumnFilledBefore(migrate.File, *schema.Table, *schema.Column, int) (bool, error) }) if !ok { return false } filled, _ := p.ColumnFilledBefore(f, t, c, pos) return filled } atlas-0.7.2/sql/sqlcheck/datadepend/datadepend_test.go000066400000000000000000000151031431455511600227750ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package datadepend_test import ( "context" "testing" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/specutil" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlcheck/datadepend" "ariga.io/atlas/sql/sqlclient" "github.com/stretchr/testify/require" ) func TestAnalyzer_AddUniqueIndex(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewColumn("a"), schema.NewColumn("b"), ), Changes: []schema.Change{ // Ignore new created columns. &schema.AddColumn{ C: schema.NewColumn("a"), }, &schema.AddIndex{ I: schema.NewUniqueIndex("idx_a"). AddColumns(schema.NewColumn("a")), }, // Report on existing columns. &schema.AddIndex{ I: schema.NewUniqueIndex("idx_b"). AddColumns(schema.NewColumn("b")), }, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := datadepend.New(nil, datadepend.Handler{}) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.NoError(t, err) require.Equal(t, "data dependent changes detected", report.Text) require.Len(t, report.Diagnostics, 1) require.Equal(t, `Adding a unique index "idx_b" on table "users" might fail in case column "b" contains duplicate entries`, report.Diagnostics[0].Text) } func TestAnalyzer_ModifyUniqueIndex(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewColumn("a"), schema.NewColumn("b"), ), Changes: []schema.Change{ // Ignore new created columns. &schema.AddColumn{ C: schema.NewColumn("a"), }, &schema.ModifyIndex{ From: schema.NewIndex("idx_a"). AddColumns(schema.NewColumn("a")), To: schema.NewUniqueIndex("idx_a"). AddColumns(schema.NewColumn("a")), }, // Report on existing columns. &schema.ModifyIndex{ From: schema.NewIndex("idx_b"). AddColumns(schema.NewColumn("b")), To: schema.NewUniqueIndex("idx_b"). AddColumns(schema.NewColumn("b")), Change: schema.ChangeUnique, }, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := datadepend.New(nil, datadepend.Handler{}) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.NoError(t, err) require.Equal(t, "data dependent changes detected", report.Text) require.Len(t, report.Diagnostics, 1) require.Equal(t, `Modifying an index "idx_b" on table "users" might fail in case of duplicate entries`, report.Diagnostics[0].Text) } func TestAnalyzer_ModifyNullability(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewNullIntColumn("a", "int"), ), Changes: []schema.Change{ &schema.ModifyColumn{ From: schema.NewNullIntColumn("a", "int"), To: schema.NewIntColumn("a", "int"), Change: schema.ChangeNull, }, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := datadepend.New(nil, datadepend.Handler{}) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.NoError(t, err) require.Equal(t, "data dependent changes detected", report.Text) require.Len(t, report.Diagnostics, 1) require.Equal(t, `Modifying nullable column "a" to non-nullable might fail in case it contains NULL values`, report.Diagnostics[0].Text) } func TestAnalyzer_Options(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE users", }, Changes: schema.Changes{ &schema.ModifyTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")). AddColumns( schema.NewColumn("a"), schema.NewColumn("b"), ), Changes: []schema.Change{ &schema.AddIndex{ I: schema.NewIndex("idx_a"). AddColumns(schema.NewColumn("a")), }, &schema.ModifyIndex{ From: schema.NewIndex("idx_b"). AddColumns(schema.NewColumn("b")), To: schema.NewUniqueIndex("idx_b"). AddColumns(schema.NewColumn("b")), Change: schema.ChangeUnique, }, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := datadepend.New(&schemahcl.Resource{ Children: []*schemahcl.Resource{ { Type: "data_depend", Attrs: []*schemahcl.Attr{ specutil.BoolAttr("error", true), }, }, }, }, datadepend.Handler{}) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.EqualError(t, err, "data dependent changes detected") require.NotNil(t, report) } type testFile struct { name string migrate.File } func (t testFile) Name() string { return t.name } atlas-0.7.2/sql/sqlcheck/destructive/000077500000000000000000000000001431455511600175665ustar00rootroot00000000000000atlas-0.7.2/sql/sqlcheck/destructive/destructive.go000066400000000000000000000056221431455511600224630ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package destructive import ( "context" "errors" "fmt" "strings" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" ) // Analyzer checks for destructive changes. type Analyzer struct { sqlcheck.Options } // New creates a new destructive changes Analyzer with the given options. func New(r *schemahcl.Resource) (*Analyzer, error) { az := &Analyzer{} az.Error = sqlx.P(true) if r, ok := r.Resource(az.Name()); ok { if err := r.As(&az.Options); err != nil { return nil, fmt.Errorf("sql/sqlcheck: parsing destructive check options: %w", err) } } return az, nil } // List of codes. var ( codeDropS = sqlcheck.Code("DS101") codeDropT = sqlcheck.Code("DS102") codeDropC = sqlcheck.Code("DS103") ) // Name of the analyzer. Implements the sqlcheck.NamedAnalyzer interface. func (*Analyzer) Name() string { return "destructive" } // Analyze implements sqlcheck.Analyzer. func (a *Analyzer) Analyze(_ context.Context, p *sqlcheck.Pass) error { var diags []sqlcheck.Diagnostic for _, sc := range p.File.Changes { for _, c := range sc.Changes { switch c := c.(type) { case *schema.DropSchema: if p.File.SchemaSpan(c.S) != sqlcheck.SpanTemporary { var text string switch n := len(c.S.Tables); { case n == 0: text = fmt.Sprintf("Dropping schema %q", c.S.Name) case n == 1: text = fmt.Sprintf("Dropping non-empty schema %q with 1 table", c.S.Name) case n > 1: text = fmt.Sprintf("Dropping non-empty schema %q with %d tables", c.S.Name, n) } diags = append(diags, sqlcheck.Diagnostic{ Code: codeDropS, Pos: sc.Stmt.Pos, Text: text, }) } case *schema.DropTable: if p.File.SchemaSpan(c.T.Schema) != sqlcheck.SpanDropped && p.File.TableSpan(c.T) != sqlcheck.SpanTemporary { diags = append(diags, sqlcheck.Diagnostic{ Code: codeDropT, Pos: sc.Stmt.Pos, Text: fmt.Sprintf("Dropping table %q", c.T.Name), }) } case *schema.ModifyTable: for i := range c.Changes { d, ok := c.Changes[i].(*schema.DropColumn) if !ok || p.File.ColumnSpan(c.T, d.C) == sqlcheck.SpanTemporary { continue } if g := (schema.GeneratedExpr{}); !sqlx.Has(d.C.Attrs, &g) || strings.ToUpper(g.Type) != "VIRTUAL" { diags = append(diags, sqlcheck.Diagnostic{ Code: codeDropC, Pos: sc.Stmt.Pos, Text: fmt.Sprintf("Dropping non-virtual column %q", d.C.Name), }) } } } } } if len(diags) > 0 { const reportText = "destructive changes detected" p.Reporter.WriteReport(sqlcheck.Report{Text: reportText, Diagnostics: diags}) if sqlx.V(a.Error) { return errors.New(reportText) } } return nil } atlas-0.7.2/sql/sqlcheck/destructive/destructive_test.go000066400000000000000000000140371431455511600235220ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package destructive_test import ( "context" "testing" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/specutil" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlcheck/destructive" "ariga.io/atlas/sql/sqlclient" "github.com/stretchr/testify/require" ) func TestAnalyzer_DropTable(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "DROP TABLE `users`", }, Changes: schema.Changes{ &schema.DropTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")), }, }, }, { Stmt: &migrate.Stmt{ Text: "DROP TABLE `posts`", }, Changes: schema.Changes{ &schema.DropTable{ T: schema.NewTable("posts"). SetSchema(schema.New("test")), }, }, }, { Stmt: &migrate.Stmt{ Text: "CREATE TABLE `posts`", }, Changes: schema.Changes{ &schema.AddTable{ T: schema.NewTable("posts"). SetSchema(schema.New("test")), }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := destructive.New(nil) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.Error(t, err) require.Equal(t, "destructive changes detected", report.Text) require.Len(t, report.Diagnostics, 2) require.Equal(t, `Dropping table "users"`, report.Diagnostics[0].Text) require.Equal(t, `Dropping table "posts"`, report.Diagnostics[1].Text) } func TestAnalyzer_SkipTemporaryTable(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "DROP TABLE `users`", }, Changes: schema.Changes{ &schema.AddTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")), }, &schema.DropTable{ T: schema.NewTable("users"). SetSchema(schema.New("test")), }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := destructive.New(nil) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.NoError(t, err) require.Nil(t, report, "no report") } func TestAnalyzer_DropSchema(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "DROP SCHEMA `test`", }, Changes: schema.Changes{ &schema.DropSchema{ S: schema.New("test"). AddTables( schema.NewTable("users"), schema.NewTable("orders"), ), }, }, }, { Stmt: &migrate.Stmt{ Text: "DROP SCHEMA `market`", }, Changes: schema.Changes{ &schema.DropSchema{ S: schema.New("market"), }, }, }, { Stmt: &migrate.Stmt{ Text: "CREATE DATABASE `market`", }, Changes: schema.Changes{ &schema.AddSchema{ S: schema.New("market"), }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) az, err := destructive.New(&schemahcl.Resource{ Children: []*schemahcl.Resource{ { Type: "destructive", Attrs: []*schemahcl.Attr{ specutil.BoolAttr("error", false), }, }, }, }) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.NoError(t, err) require.Equal(t, "destructive changes detected", report.Text) require.Len(t, report.Diagnostics, 2) require.Equal(t, `Dropping non-empty schema "test" with 2 tables`, report.Diagnostics[0].Text) require.Equal(t, `Dropping schema "market"`, report.Diagnostics[1].Text) } func TestAnalyzer_DropColumn(t *testing.T) { var ( report sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{Name: "mysql"}, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ { Stmt: &migrate.Stmt{ Text: "ALTER TABLE `pets`", }, Changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("pets"). SetSchema(schema.New("test")), Changes: schema.Changes{ &schema.DropColumn{ C: schema.NewColumn("c"). SetGeneratedExpr(&schema.GeneratedExpr{Type: "STORED"}), }, }, }, }, }, { Stmt: &migrate.Stmt{ Text: "ALTER TABLE `pets`", }, Changes: []schema.Change{ &schema.ModifySchema{ S: schema.New("test"), Changes: schema.Changes{ &schema.ModifyAttr{ From: &schema.Charset{V: "utf8"}, To: &schema.Charset{V: "latin1"}, }, }, }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = r }), } ) az, err := destructive.New(&schemahcl.Resource{ Children: []*schemahcl.Resource{ { Type: "destructive", Attrs: []*schemahcl.Attr{ specutil.BoolAttr("error", false), }, }, }, }) require.NoError(t, err) err = az.Analyze(context.Background(), pass) require.NoError(t, err) require.Len(t, report.Diagnostics, 1) require.Equal(t, "destructive changes detected", report.Text) require.Equal(t, `Dropping non-virtual column "c"`, report.Diagnostics[0].Text) } type testFile struct { name string migrate.File } func (t testFile) Name() string { return t.name } atlas-0.7.2/sql/sqlcheck/sqlcheck.go000066400000000000000000000175561431455511600173670ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Package sqlcheck provides interfaces for analyzing the contents of SQL files // to generate insights on the safety of many kinds of changes to database // schemas. With this package developers may define an Analyzer that can be used // to diagnose the impact of SQL statements on the target database. For instance, // The `destructive` package exposes an Analyzer that detects destructive changes // to the database schema, such as the dropping of tables or columns. package sqlcheck import ( "context" "sync" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" ) type ( // An Analyzer describes a migration file analyzer. Analyzer interface { // Analyze executes the analysis function. Analyze(context.Context, *Pass) error } // A NamedAnalyzer describes an Analyzer that has a name. NamedAnalyzer interface { Analyzer // Name of the analyzer. Identifies the analyzer // in configuration and linting passes. Name() string } // A Pass provides information to the Run function that // applies a specific analyzer to an SQL file. Pass struct { // A migration file and the changes it describes. File *File // Dev is a driver-specific environment used to execute analysis work. Dev *sqlclient.Client // Report reports analysis reports. Reporter ReportWriter } // File represents a parsed version of a migration file. File struct { migrate.File // Changes represents the list of changes this file represents. Changes []*Change // Sum represents a summary of changes this file represents. For example, // in case of a file that contains exactly two statements, and the first // statement is reverted by the one after it, the Sum is nil. Sum schema.Changes // A Parser that may be used for parsing this file. It sets to any as the contract // between checks and their parsers can vary. For example, in case of running checks // from CLI, the injected parser can be found in cmd/atlas/internal/sqlparse.Parser. Parser any // schema spans. lazily initialized. spans map[string]*schemaSpan } // A Change in a migration file. Change struct { schema.Changes // The actual changes. Stmt *migrate.Stmt // The SQL statement generated this change. } // A Report describes an analysis report with an optional specific diagnostic. Report struct { Text string // Report text. Diagnostics []Diagnostic // Report diagnostics. } // A Diagnostic is a text associated with a specific position of a statement in a file. Diagnostic struct { Pos int // Diagnostic position. Text string // Diagnostic text. Code string // Code describes the check. For example, DS101 } // ReportWriter represents a writer for analysis reports. ReportWriter interface { WriteReport(Report) } // Options defines a generic configuration options for analyzers. Options struct { // Error indicates if an analyzer should // error in case a Diagnostic was found. Error *bool `spec:"error"` // Allow drivers to extend the configuration. schemahcl.DefaultExtension } ) // Analyzers implements Analyzer. type Analyzers []Analyzer // Analyze implements Analyzer. func (a Analyzers) Analyze(ctx context.Context, p *Pass) error { for _, a := range a { if err := a.Analyze(ctx, p); err != nil { return err } } return nil } // AnalyzerFunc allows using ordinary functions as analyzers. type AnalyzerFunc func(ctx context.Context, p *Pass) error // Analyze calls f. func (f AnalyzerFunc) Analyze(ctx context.Context, p *Pass) error { return f(ctx, p) } // ReportWriterFunc is a function that implements Reporter. type ReportWriterFunc func(Report) // WriteReport calls f(r). func (f ReportWriterFunc) WriteReport(r Report) { f(r) } // ResourceSpan describes the lifespan of a resource // in perspective to the migration file. type ResourceSpan uint const ( // SpanUnknown describes unknown lifespan. // e.g. resource may exist before this file. SpanUnknown ResourceSpan = iota // SpanAdded describes that a span of // a resource was started in this file. SpanAdded // SpanDropped describes that a span of // a resource was ended in this file. SpanDropped // SpanTemporary indicates that a resource lifetime // was started and ended in this file (CREATE and DROP). SpanTemporary = SpanAdded | SpanDropped ) // SchemaSpan returns the span information for the schema. func (f *File) SchemaSpan(s *schema.Schema) ResourceSpan { return f.schemaSpan(s).state } // TableSpan returns the span information for the table. func (f *File) TableSpan(t *schema.Table) ResourceSpan { return f.tableSpan(t).state } // ColumnSpan returns the span information for the column. func (f *File) ColumnSpan(t *schema.Table, c *schema.Column) ResourceSpan { return f.tableSpan(t).columns[c.Name] } // IndexSpan returns the span information for the span. func (f *File) IndexSpan(t *schema.Table, i *schema.Index) ResourceSpan { return f.tableSpan(t).indexes[i.Name] } type ( // schemaSpan holds the span structure of a schema. schemaSpan struct { state ResourceSpan tables map[string]*tableSpan } // schemaSpan holds the span structure of a table. tableSpan struct { state ResourceSpan columns map[string]ResourceSpan indexes map[string]ResourceSpan } ) func (f *File) loadSpans() { f.spans = make(map[string]*schemaSpan) for _, sc := range f.Changes { for _, c := range sc.Changes { switch c := c.(type) { case *schema.AddSchema: f.schemaSpan(c.S).state = SpanAdded case *schema.DropSchema: f.schemaSpan(c.S).state |= SpanDropped case *schema.AddTable: span := f.tableSpan(c.T) span.state = SpanAdded for _, column := range c.T.Columns { span.columns[column.Name] = SpanAdded } for _, idx := range c.T.Indexes { span.indexes[idx.Name] = SpanAdded } case *schema.DropTable: f.tableSpan(c.T).state |= SpanDropped case *schema.ModifyTable: span := f.tableSpan(c.T) for _, c1 := range c.Changes { switch c1 := c1.(type) { case *schema.AddColumn: span.columns[c1.C.Name] = SpanAdded case *schema.DropColumn: span.columns[c1.C.Name] |= SpanDropped case *schema.AddIndex: span.indexes[c1.I.Name] = SpanAdded case *schema.DropIndex: span.indexes[c1.I.Name] |= SpanDropped } } } } } } func (f *File) schemaSpan(s *schema.Schema) *schemaSpan { if f.spans == nil { f.loadSpans() } if f.spans[s.Name] == nil { f.spans[s.Name] = &schemaSpan{tables: make(map[string]*tableSpan)} } return f.spans[s.Name] } func (f *File) tableSpan(t *schema.Table) *tableSpan { span := f.schemaSpan(t.Schema) if span.tables[t.Name] == nil { span.tables[t.Name] = &tableSpan{ columns: make(map[string]ResourceSpan), indexes: make(map[string]ResourceSpan), } } return f.spans[t.Schema.Name].tables[t.Name] } // codes registry var codes sync.Map // Code stores the given code in the registry. // It protects from duplicate analyzers' codes. func Code(code string) string { if _, loaded := codes.LoadOrStore(code, struct{}{}); loaded { panic("sqlcheck: Code called twice for " + code) } return code } // drivers specific analyzers. var drivers sync.Map // Register allows drivers to register a constructor function for creating // analyzers from the given HCL resource. func Register(name string, f func(*schemahcl.Resource) ([]Analyzer, error)) { drivers.Store(name, f) } // AnalyzerFor instantiates a new Analyzer from the given HCL resource // based on the registered constructor function. func AnalyzerFor(name string, r *schemahcl.Resource) ([]Analyzer, error) { f, ok := drivers.Load(name) if ok { return f.(func(*schemahcl.Resource) ([]Analyzer, error))(r) } return nil, nil } atlas-0.7.2/sql/sqlclient/000077500000000000000000000000001431455511600154265ustar00rootroot00000000000000atlas-0.7.2/sql/sqlclient/client.go000066400000000000000000000245641431455511600172460ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlclient import ( "context" "database/sql" "errors" "fmt" "io" "net/url" "sync" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) type ( // Client provides the common functionalities for working with Atlas from different // applications (e.g. CLI and TF). Note, the Client is dialect specific and should // be instantiated using a call to Open. Client struct { // Name used when creating the client. Name string // DB used for creating the client. DB *sql.DB // URL holds an enriched url.URL. URL *URL // A migration driver for the attached dialect. migrate.Driver // Additional closers that can be closed at the // end of the client lifetime. closers []io.Closer // Marshal and Evaluator functions for decoding // and encoding the schema documents. schemahcl.Marshaler schemahcl.Evaluator // Functions registered by the drivers and used for opening transactions and their clients. openDriver func(schema.ExecQuerier) (migrate.Driver, error) openTx TxOpener } // TxClient is returned by calling Client.Tx. It behaves the same as Client, // but wraps all operations within a transaction. TxClient struct { *Client // The transaction this Client wraps. Tx *Tx } // URL extends the standard url.URL with additional // connection information attached by the Opener (if any). URL struct { *url.URL // The DSN used for opening the connection. DSN string // The Schema this client is connected to. Schema string } ) // Tx returns a transactional client. func (c *Client) Tx(ctx context.Context, opts *sql.TxOptions) (*TxClient, error) { if c.openDriver == nil { return nil, errors.New("sql/sqlclient: unexpected driver opener: ") } var tx *Tx switch { case c.openTx != nil: ttx, err := c.openTx(ctx, c.DB, opts) if err != nil { return nil, err } tx = ttx default: ttx, err := c.DB.BeginTx(ctx, opts) if err != nil { return nil, fmt.Errorf("sql/sqlclient: starting transaction: %w", err) } tx = &Tx{Tx: ttx} } drv, err := c.openDriver(tx) if err != nil { return nil, fmt.Errorf("sql/sqlclient: opening atlas driver: %w", err) } ic := *c ic.Driver = drv return &TxClient{Client: &ic, Tx: tx}, nil } // Commit the transaction. func (c *TxClient) Commit() error { return c.Tx.Commit() } // Rollback the transaction. func (c *TxClient) Rollback() error { return c.Tx.Rollback() } // AddClosers adds list of closers to close at the end of the client lifetime. func (c *Client) AddClosers(closers ...io.Closer) { c.closers = append(c.closers, closers...) } // Close closes the underlying database connection and the migration // driver in case it implements the io.Closer interface. func (c *Client) Close() (err error) { for _, closer := range append(c.closers, c.DB) { if cerr := closer.Close(); cerr != nil { if err != nil { cerr = fmt.Errorf("%v: %v", err, cerr) } err = cerr } } return err } type ( // Opener opens a migration driver by the given URL. Opener interface { Open(ctx context.Context, u *url.URL) (*Client, error) } // OpenerFunc allows using a function as an Opener. OpenerFunc func(context.Context, *url.URL) (*Client, error) // URLParser parses an url.URL into an enriched URL and attaches additional info to it. URLParser interface { ParseURL(*url.URL) *URL } // URLParserFunc allows using a function as an URLParser. URLParserFunc func(*url.URL) *URL // SchemaChanger is implemented by a driver if it how to change the connection URL to represent another schema. SchemaChanger interface { ChangeSchema(*url.URL, string) *url.URL } driver struct { Opener name string parser URLParser txOpener TxOpener } ) // Open calls f(ctx, u). func (f OpenerFunc) Open(ctx context.Context, u *url.URL) (*Client, error) { return f(ctx, u) } // ParseURL calls f(u). func (f URLParserFunc) ParseURL(u *url.URL) *URL { return f(u) } var drivers sync.Map type ( // openOptions holds additional configuration values for opening a Client. openOptions struct { schema *string } // OpenOption allows to configure a openOptions using functional arguments. OpenOption func(*openOptions) error ) // ErrUnsupported is returned if a registered driver does not support changing the schema. var ErrUnsupported = errors.New("sql/sqlclient: driver does not support changing connected schema") // Open opens an Atlas client by its provided url string. func Open(ctx context.Context, s string, opts ...OpenOption) (*Client, error) { u, err := url.Parse(s) if err != nil { return nil, fmt.Errorf("sql/sqlclient: parse open url: %w", err) } return OpenURL(ctx, u, opts...) } // OpenURL opens an Atlas client by its provided url.URL. func OpenURL(ctx context.Context, u *url.URL, opts ...OpenOption) (*Client, error) { cfg := &openOptions{} for _, opt := range opts { if err := opt(cfg); err != nil { return nil, err } } v, ok := drivers.Load(u.Scheme) if !ok { return nil, fmt.Errorf("sql/sqlclient: no opener was register with name %q", u.Scheme) } drv := v.(*driver) // If there is a schema given and the driver allows to change the schema for the url, do it. if cfg.schema != nil { sc, ok := drv.parser.(SchemaChanger) if !ok { return nil, ErrUnsupported } u = sc.ChangeSchema(u, *cfg.schema) } client, err := drv.Open(ctx, u) if err != nil { return nil, err } if client.URL == nil { client.URL = drv.parser.ParseURL(u) } if client.openTx == nil && drv.txOpener != nil { client.openTx = drv.txOpener } return client, nil } // OpenSchema opens the connection to the given schema. // If the registered driver does not support this, ErrUnsupported is returned instead. func OpenSchema(s string) OpenOption { return func(c *openOptions) error { c.schema = &s return nil } } type ( registerOptions struct { openDriver func(schema.ExecQuerier) (migrate.Driver, error) txOpener TxOpener parser URLParser flavours []string codec interface { schemahcl.Marshaler schemahcl.Evaluator } } // RegisterOption allows configuring the Opener // registration using functional options. RegisterOption func(*registerOptions) ) // RegisterFlavours allows registering additional flavours // (i.e. names), accepted by Atlas to open clients. func RegisterFlavours(flavours ...string) RegisterOption { return func(opts *registerOptions) { opts.flavours = flavours } } // RegisterURLParser allows registering a function for parsing // the url.URL and attach additional info to the extended URL. func RegisterURLParser(p URLParser) RegisterOption { return func(opts *registerOptions) { opts.parser = p } } // RegisterCodec registers static codec for attaching into // the client after it is opened. func RegisterCodec(m schemahcl.Marshaler, e schemahcl.Evaluator) RegisterOption { return func(opts *registerOptions) { opts.codec = struct { schemahcl.Marshaler schemahcl.Evaluator }{ Marshaler: m, Evaluator: e, } } } // RegisterDriverOpener registers a func to create a migrate.Driver from a schema.ExecQuerier. // Registering this function is implicitly done when using DriverOpener. // The passed opener is used when creating a TxClient. func RegisterDriverOpener(open func(schema.ExecQuerier) (migrate.Driver, error)) RegisterOption { return func(opts *registerOptions) { opts.openDriver = open } } // DriverOpener is a helper Opener creator for sharing between all drivers. func DriverOpener(open func(schema.ExecQuerier) (migrate.Driver, error)) Opener { return OpenerFunc(func(_ context.Context, u *url.URL) (*Client, error) { v, ok := drivers.Load(u.Scheme) if !ok { return nil, fmt.Errorf("sql/sqlclient: unexpected missing opener %q", u.Scheme) } drv := v.(*driver) ur := drv.parser.ParseURL(u) db, err := sql.Open(drv.name, ur.DSN) if err != nil { return nil, err } mdr, err := open(db) if err != nil { if cerr := db.Close(); cerr != nil { err = fmt.Errorf("%w: %v", err, cerr) } return nil, err } return &Client{ Name: drv.name, DB: db, URL: ur, Driver: mdr, openDriver: open, openTx: drv.txOpener, }, nil }) } type ( // Tx wraps sql.Tx with optional custom Commit and Rollback functions. Tx struct { *sql.Tx CommitFn func() error // override default commit behavior RollbackFn func() error // override default rollback behavior } // TxOpener opens a transaction with optional closer. TxOpener func(context.Context, *sql.DB, *sql.TxOptions) (*Tx, error) ) // Commit the transaction. func (tx *Tx) Commit() error { fn := tx.CommitFn if fn == nil { fn = tx.Tx.Commit } return fn() } // Rollback the transaction. func (tx *Tx) Rollback() error { fn := tx.RollbackFn if fn == nil { fn = tx.Tx.Rollback } return fn() } // RegisterTxOpener allows registering a custom transaction opener with an optional close function. func RegisterTxOpener(open TxOpener) RegisterOption { return func(opts *registerOptions) { opts.txOpener = open } } // Register registers a client Opener (i.e. creator) with the given name. func Register(name string, opener Opener, opts ...RegisterOption) { if opener == nil { panic("sql/sqlclient: Register opener is nil") } opt := ®isterOptions{ // Default URL parser uses the URL as the DSN. parser: URLParserFunc(func(u *url.URL) *URL { return &URL{URL: u, DSN: u.String()} }), } for i := range opts { opts[i](opt) } if opt.codec != nil { f := opener opener = OpenerFunc(func(ctx context.Context, u *url.URL) (*Client, error) { c, err := f.Open(ctx, u) if err != nil { return nil, err } c.Marshaler, c.Evaluator = opt.codec, opt.codec return c, nil }) } // If there was a driver opener registered by a call to RegisterDriverOpener, it has precedence. if opt.openDriver != nil { f := opener opener = OpenerFunc(func(ctx context.Context, u *url.URL) (*Client, error) { c, err := f.Open(ctx, u) if err != nil { return nil, err } c.openDriver = opt.openDriver return c, err }) } drv := &driver{Opener: opener, name: name, parser: opt.parser, txOpener: opt.txOpener} for _, f := range append(opt.flavours, name) { if _, ok := drivers.Load(f); ok { panic("sql/sqlclient: Register called twice for " + f) } drivers.Store(f, drv) } } atlas-0.7.2/sql/sqlclient/client_test.go000066400000000000000000000076451431455511600203060ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlclient_test import ( "context" "database/sql" "net/url" "testing" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" "ariga.io/atlas/sql/sqlclient" ) func TestRegisterOpen(t *testing.T) { c := &sqlclient.Client{} sqlclient.Register( "mysql", sqlclient.OpenerFunc(func(ctx context.Context, url *url.URL) (*sqlclient.Client, error) { return c, nil }), sqlclient.RegisterFlavours("maria"), sqlclient.RegisterURLParser(sqlclient.URLParserFunc(func(u *url.URL) *sqlclient.URL { return &sqlclient.URL{URL: u, DSN: "dsn", Schema: "schema"} })), ) require.PanicsWithValue( t, "sql/sqlclient: Register opener is nil", func() { sqlclient.Register("mysql", nil) }, ) require.PanicsWithValue( t, "sql/sqlclient: Register called twice for mysql", func() { sqlclient.Register("mysql", sqlclient.OpenerFunc(func(ctx context.Context, url *url.URL) (*sqlclient.Client, error) { return c, nil })) }, ) c1, err := sqlclient.Open(context.Background(), "mysql://:3306") require.NoError(t, err) require.True(t, c == c1) require.Equal(t, "dsn", c.URL.DSN) require.Equal(t, "schema", c.URL.Schema) c1, err = sqlclient.Open(context.Background(), "maria://:3306") require.NoError(t, err) require.True(t, c == c1) require.Equal(t, "dsn", c.URL.DSN) require.Equal(t, "schema", c.URL.Schema) c1, err = sqlclient.Open(context.Background(), "postgres://:3306") require.EqualError(t, err, `sql/sqlclient: no opener was register with name "postgres"`) } func TestClient_AddClosers(t *testing.T) { var ( i int c = &sqlclient.Client{DB: sql.OpenDB(nil)} f = closerFunc(func() error { i++; return nil }) ) c.AddClosers(f, f, f) require.NoError(t, c.Close()) require.Equal(t, 3, i) } type closerFunc func() error func (f closerFunc) Close() error { return f() } func TestClient_Tx(t *testing.T) { db, mock, err := sqlmock.New() require.NoError(t, err) defer db.Close() const stmt = "create database `test`" mock.ExpectBegin() mock.ExpectExec(stmt).WillReturnResult(sqlmock.NewResult(0, 1)) mock.ExpectCommit() mock.ExpectBegin() mock.ExpectExec(stmt).WillReturnResult(sqlmock.NewResult(0, 1)) mock.ExpectRollback() var cC, rC bool sqlclient.Register( "tx", sqlclient.OpenerFunc(func(context.Context, *url.URL) (*sqlclient.Client, error) { return &sqlclient.Client{Name: "tx", DB: db, Driver: &mockDriver{db: db}}, nil }), sqlclient.RegisterDriverOpener(func(db schema.ExecQuerier) (migrate.Driver, error) { return &mockDriver{db: db}, nil }), sqlclient.RegisterTxOpener(func(ctx context.Context, db *sql.DB, opts *sql.TxOptions) (*sqlclient.Tx, error) { tx, err := db.BeginTx(ctx, opts) require.NoError(t, err) return &sqlclient.Tx{ Tx: tx, CommitFn: func() error { cC = true return tx.Commit() }, RollbackFn: func() error { rC = true return tx.Rollback() }, }, nil }), ) c, err := sqlclient.Open(context.Background(), "tx://") require.NoError(t, err) // Commit works. tx, err := c.Tx(context.Background(), nil) require.NoError(t, err) _, err = tx.ExecContext(context.Background(), stmt) require.NoError(t, err) require.NoError(t, tx.Commit()) require.True(t, cC) // Rollback works as well. tx, err = c.Tx(context.Background(), nil) require.NoError(t, err) _, err = tx.ExecContext(context.Background(), stmt) require.NoError(t, err) require.NoError(t, tx.Rollback()) require.True(t, rC) require.NoError(t, mock.ExpectationsWereMet()) } type mockDriver struct { migrate.Driver db schema.ExecQuerier } func (m *mockDriver) ExecContext(ctx context.Context, query string, args ...any) (sql.Result, error) { return m.db.ExecContext(ctx, query, args...) } atlas-0.7.2/sql/sqlite/000077500000000000000000000000001431455511600147315ustar00rootroot00000000000000atlas-0.7.2/sql/sqlite/convert.go000066400000000000000000000061351431455511600167450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "fmt" "strconv" "strings" "ariga.io/atlas/sql/schema" ) // FormatType converts types to one format. A lowered format. // This is due to SQLite flexibility to allow any data types // and use a set of rules to define the type affinity. // See: https://www.sqlite.org/datatype3.html func FormatType(t schema.Type) (string, error) { var f string switch t := t.(type) { case *schema.BoolType: f = strings.ToLower(t.T) case *schema.BinaryType: f = strings.ToLower(t.T) case *schema.EnumType: f = t.T case *schema.IntegerType: f = strings.ToLower(t.T) case *schema.StringType: f = strings.ToLower(t.T) case *schema.TimeType: f = strings.ToLower(t.T) case *schema.FloatType: f = strings.ToLower(t.T) case *schema.DecimalType: f = strings.ToLower(t.T) case *schema.JSONType: f = strings.ToLower(t.T) case *schema.SpatialType: f = strings.ToLower(t.T) case *UUIDType: f = strings.ToLower(t.T) case *schema.UnsupportedType: return "", fmt.Errorf("sqlite: unsupported type: %q", t.T) default: return "", fmt.Errorf("sqlite: invalid schema type: %T", t) } return f, nil } // ParseType returns the schema.Type value represented by the given raw type. // It is expected to be one of the types in https://www.sqlite.org/datatypes.html, // or some of the common types used by ORMs like Ent. func ParseType(c string) (schema.Type, error) { // A datatype may be zero or more names. if c == "" { return &schema.UnsupportedType{}, nil } parts := columnParts(c) switch t := parts[0]; t { case "bool", "boolean": return &schema.BoolType{T: t}, nil case "blob": return &schema.BinaryType{T: t}, nil case "int2", "int8", "int", "integer", "tinyint", "smallint", "mediumint", "bigint", "unsigned big int": // All integer types have the same "type affinity". return &schema.IntegerType{T: t}, nil case "real", "double", "double precision", "float": return &schema.FloatType{T: t}, nil case "numeric", "decimal": ct := &schema.DecimalType{T: t} if len(parts) > 1 { p, err := strconv.ParseInt(parts[1], 10, 64) if err != nil { return nil, fmt.Errorf("parse precision %q", parts[1]) } ct.Precision = int(p) } if len(parts) > 2 { s, err := strconv.ParseInt(parts[2], 10, 64) if err != nil { return nil, fmt.Errorf("parse scale %q", parts[1]) } ct.Scale = int(s) } return ct, nil case "char", "character", "varchar", "varying character", "nchar", "native character", "nvarchar", "text", "clob": ct := &schema.StringType{T: t} if len(parts) > 1 { p, err := strconv.ParseInt(parts[1], 10, 64) if err != nil { return nil, fmt.Errorf("parse size %q", parts[1]) } ct.Size = int(p) } return ct, nil case "json": return &schema.JSONType{T: t}, nil case "date", "datetime", "time", "timestamp": return &schema.TimeType{T: t}, nil case "uuid": return &UUIDType{T: t}, nil default: return &schema.UnsupportedType{T: t}, nil } } atlas-0.7.2/sql/sqlite/diff.go000066400000000000000000000124031431455511600161700ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "fmt" "reflect" "strconv" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // A Diff provides a SQLite implementation for sqlx.DiffDriver. type Diff struct{} // SchemaAttrDiff returns a changeset for migrating schema attributes from one state to the other. func (d *Diff) SchemaAttrDiff(_, _ *schema.Schema) []schema.Change { // No special schema attribute diffing for SQLite. return nil } // TableAttrDiff returns a changeset for migrating table attributes from one state to the other. func (d *Diff) TableAttrDiff(from, to *schema.Table) ([]schema.Change, error) { var changes []schema.Change switch { case sqlx.Has(from.Attrs, &WithoutRowID{}) && !sqlx.Has(to.Attrs, &WithoutRowID{}): changes = append(changes, &schema.DropAttr{ A: &WithoutRowID{}, }) case !sqlx.Has(from.Attrs, &WithoutRowID{}) && sqlx.Has(to.Attrs, &WithoutRowID{}): changes = append(changes, &schema.AddAttr{ A: &WithoutRowID{}, }) } return append(changes, sqlx.CheckDiff(from, to)...), nil } // ColumnChange returns the schema changes (if any) for migrating one column to the other. func (d *Diff) ColumnChange(_ *schema.Table, from, to *schema.Column) (schema.ChangeKind, error) { change := sqlx.CommentChange(from.Attrs, to.Attrs) if from.Type.Null != to.Type.Null { change |= schema.ChangeNull } changed, err := d.typeChanged(from, to) if err != nil { return schema.NoChange, err } if changed { change |= schema.ChangeType } if changed := d.defaultChanged(from, to); changed { change |= schema.ChangeDefault } if d.generatedChanged(from, to) { change |= schema.ChangeGenerated } return change, nil } // typeChanged reports if the column type was changed. func (d *Diff) typeChanged(from, to *schema.Column) (bool, error) { fromT, toT := from.Type.Type, to.Type.Type if fromT == nil || toT == nil { return false, fmt.Errorf("sqlite: missing type information for column %q", from.Name) } // Types are mismatched if they do not have the same "type affinity". return reflect.TypeOf(fromT) != reflect.TypeOf(toT), nil } // defaultChanged reports if the default value of a column was changed. func (d *Diff) defaultChanged(from, to *schema.Column) bool { d1, ok1 := sqlx.DefaultValue(from) d2, ok2 := sqlx.DefaultValue(to) if ok1 != ok2 { return true } if d1 == d2 { return false } x1, err1 := sqlx.Unquote(d1) x2, err2 := sqlx.Unquote(d2) return err1 != nil || err2 != nil || x1 != x2 } // generatedChanged reports if the generated expression of a column was changed. func (*Diff) generatedChanged(from, to *schema.Column) bool { var ( fromX, toX schema.GeneratedExpr fromHas, toHas = sqlx.Has(from.Attrs, &fromX), sqlx.Has(to.Attrs, &toX) ) return fromHas != toHas || fromHas && (sqlx.MayWrap(fromX.Expr) != sqlx.MayWrap(toX.Expr) || storedOrVirtual(fromX.Type) != storedOrVirtual(toX.Type)) } // IsGeneratedIndexName reports if the index name was generated by the database. // See: https://github.com/sqlite/sqlite/blob/e937df8/src/build.c#L3583. func (d *Diff) IsGeneratedIndexName(t *schema.Table, idx *schema.Index) bool { p := fmt.Sprintf("sqlite_autoindex_%s_", t.Name) if !strings.HasPrefix(idx.Name, p) { return false } i, err := strconv.ParseInt(strings.TrimPrefix(idx.Name, p), 10, 64) return err == nil && i > 0 } // IndexAttrChanged reports if the index attributes were changed. func (*Diff) IndexAttrChanged(from, to []schema.Attr) bool { var p1, p2 IndexPredicate return sqlx.Has(from, &p1) != sqlx.Has(to, &p2) || (p1.P != p2.P && p1.P != sqlx.MayWrap(p2.P)) } // IndexPartAttrChanged reports if the index-part attributes were changed. func (*Diff) IndexPartAttrChanged(_, _ *schema.IndexPart) bool { return false } // ReferenceChanged reports if the foreign key referential action was changed. func (*Diff) ReferenceChanged(from, to schema.ReferenceOption) bool { // According to SQLite, if an action is not explicitly // specified, it defaults to "NO ACTION". if from == "" { from = schema.NoAction } if to == "" { to = schema.NoAction } return from != to } // Normalize implements the sqlx.Normalizer interface. func (d *Diff) Normalize(from, to *schema.Table) error { used := make([]bool, len(to.ForeignKeys)) // In SQLite, there is no easy way to get the foreign-key constraint // name, except for parsing the CREATE statement. Therefore, we check // if there is a foreign-key with identical properties. for _, fk1 := range from.ForeignKeys { for i, fk2 := range to.ForeignKeys { if used[i] { continue } if fk2.Symbol == fk1.Symbol && !isNumber(fk1.Symbol) || sameFK(fk1, fk2) { fk1.Symbol = fk2.Symbol used[i] = true } } } return nil } func sameFK(fk1, fk2 *schema.ForeignKey) bool { if fk1.Table.Name != fk2.Table.Name || fk1.RefTable.Name != fk2.RefTable.Name || len(fk1.Columns) != len(fk2.Columns) || len(fk1.RefColumns) != len(fk2.RefColumns) { return false } for i, c1 := range fk1.Columns { if c1.Name != fk2.Columns[i].Name { return false } } for i, c1 := range fk1.RefColumns { if c1.Name != fk2.RefColumns[i].Name { return false } } return true } atlas-0.7.2/sql/sqlite/diff_test.go000066400000000000000000000254101431455511600172310ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "testing" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestDiff_TableDiff(t *testing.T) { type testcase struct { name string from, to *schema.Table wantChanges []schema.Change wantErr bool } tests := []testcase{ { name: "no changes", from: &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "users"}, }, { name: "change primary key", from: func() *schema.Table { t := &schema.Table{Name: "users", Schema: &schema.Schema{Name: "public"}, Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}}} t.PrimaryKey = &schema.Index{ Parts: []*schema.IndexPart{{C: t.Columns[0]}}, } return t }(), to: &schema.Table{Name: "users"}, wantErr: true, }, { name: "add attr", from: &schema.Table{Name: "t1", Schema: &schema.Schema{Name: "public"}}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&WithoutRowID{}}}, wantChanges: []schema.Change{ &schema.AddAttr{ A: &WithoutRowID{}, }, }, }, { name: "drop attr", from: &schema.Table{Name: "t1", Attrs: []schema.Attr{&WithoutRowID{}}}, to: &schema.Table{Name: "t1"}, wantChanges: []schema.Change{ &schema.DropAttr{ A: &WithoutRowID{}, }, }, }, { name: "add check", from: &schema.Table{Name: "t1"}, to: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}}}, wantChanges: []schema.Change{ &schema.AddCheck{ C: &schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}, }, }, }, { name: "drop check", from: &schema.Table{Name: "t1", Attrs: []schema.Attr{&schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}}}, to: &schema.Table{Name: "t1"}, wantChanges: []schema.Change{ &schema.DropCheck{ C: &schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}, }, }, }, { name: "find check by expr", from: &schema.Table{ Name: "t1", Attrs: []schema.Attr{ &schema.Check{Name: "t1_c1_check", Expr: "(c1 > 1)"}, &schema.Check{Expr: "(d1 > 1)"}, }, }, to: &schema.Table{ Name: "t1", Attrs: []schema.Attr{ &schema.Check{Expr: "(c1 > 1)"}, &schema.Check{Name: "add_name_to_check", Expr: "(d1 > 1)"}, }, }, }, func() testcase { var ( from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int8", Type: &schema.IntegerType{T: "int8"}}}, }, } to = &schema.Table{ Name: "t1", Columns: []*schema.Column{ { Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}, Null: true}, Default: &schema.RawExpr{X: "{}"}, Attrs: []schema.Attr{&schema.Comment{Text: "json comment"}}, }, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) return testcase{ name: "columns", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{ From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeNull | schema.ChangeComment | schema.ChangeDefault, }, &schema.DropColumn{C: from.Columns[1]}, &schema.AddColumn{C: to.Columns[1]}, }, } }(), func() testcase { var ( s = schema.New("public") from = schema.NewTable("t1"). SetSchema(s). AddColumns( schema.NewIntColumn("c1", "int"), schema.NewIntColumn("c2", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), schema.NewIntColumn("c3", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"}), schema.NewIntColumn("c4", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "VIRTUAL"}), ) to = schema.NewTable("t1"). SetSchema(s). AddColumns( // Add generated expression. schema.NewIntColumn("c1", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), // Drop generated expression. schema.NewIntColumn("c2", "int"), // Modify generated expression. schema.NewIntColumn("c3", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "2"}), // No change. schema.NewIntColumn("c4", "int"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"}), ) ) return testcase{ name: "modify column generated", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyColumn{From: from.Columns[0], To: to.Columns[0], Change: schema.ChangeGenerated}, &schema.ModifyColumn{From: from.Columns[1], To: to.Columns[1], Change: schema.ChangeGenerated}, &schema.ModifyColumn{From: from.Columns[2], To: to.Columns[2], Change: schema.ChangeGenerated}, }, } }(), func() testcase { var ( from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int8", Type: &schema.IntegerType{T: "int8"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } to = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Raw: "json", Type: &schema.JSONType{T: "json"}}}, {Name: "c2", Type: &schema.ColumnType{Raw: "int8", Type: &schema.IntegerType{T: "int8"}}}, {Name: "c3", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) from.Indexes = []*schema.Index{ {Name: "c1_index", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0]}}}, {Name: "c2_unique", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, {Name: "c3_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}}, {Name: "c3_desc", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: to.Columns[1]}}}, {Name: "c4_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexPredicate{P: "(c4 <> NULL)"}}}, } to.Indexes = []*schema.Index{ {Name: "c1_index", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[0]}}}, {Name: "c3_unique", Unique: true, Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: to.Columns[1]}}}, {Name: "c3_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexPredicate{P: "c3 <> NULL"}}}, {Name: "c3_desc", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, Desc: true, C: to.Columns[1]}}}, {Name: "c4_predicate", Table: from, Parts: []*schema.IndexPart{{SeqNo: 1, C: from.Columns[1]}}, Attrs: []schema.Attr{&IndexPredicate{P: "c4 <> NULL"}}}, } return testcase{ name: "indexes", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyIndex{From: from.Indexes[0], To: to.Indexes[0], Change: schema.ChangeUnique}, &schema.DropIndex{I: from.Indexes[1]}, &schema.ModifyIndex{From: from.Indexes[2], To: to.Indexes[2], Change: schema.ChangeAttr}, &schema.ModifyIndex{From: from.Indexes[3], To: to.Indexes[3], Change: schema.ChangeParts}, &schema.AddIndex{I: to.Indexes[1]}, }, } }(), func() testcase { var ( ref = &schema.Table{ Name: "t2", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, {Name: "ref_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } from = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } to = &schema.Table{ Name: "t1", Schema: &schema.Schema{ Name: "public", }, Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, } ) from.ForeignKeys = []*schema.ForeignKey{ {Symbol: "fk1", Table: to, Columns: to.Columns, RefTable: ref, RefColumns: ref.Columns[1:]}, {Symbol: "1", Table: from, Columns: from.Columns, RefTable: ref, RefColumns: ref.Columns[:1]}, } to.ForeignKeys = []*schema.ForeignKey{ {Symbol: "fk1", Table: to, Columns: to.Columns, RefTable: ref, RefColumns: ref.Columns[:1]}, // The below "constraint" is identical to "0" above, therefore, the differ does not report a change. {Symbol: "constraint", Table: from, Columns: from.Columns, RefTable: ref, RefColumns: ref.Columns[:1]}, } return testcase{ name: "foreign-keys", from: from, to: to, wantChanges: []schema.Change{ &schema.ModifyForeignKey{ From: from.ForeignKeys[0], To: to.ForeignKeys[0], Change: schema.ChangeRefColumn, }, }, } }(), } for _, tt := range tests { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.systemVars("3.36.0") drv, err := Open(db) require.NoError(t, err) t.Run(tt.name, func(t *testing.T) { changes, err := drv.TableDiff(tt.from, tt.to) require.Equal(t, tt.wantErr, err != nil, err) require.EqualValues(t, tt.wantChanges, changes) }) } } func TestDiff_SchemaDiff(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mock{m}.systemVars("3.36.0") drv, err := Open(db) require.NoError(t, err) from := &schema.Schema{ Tables: []*schema.Table{ {Name: "users"}, {Name: "pets"}, }, } to := &schema.Schema{ Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ {Name: "t2_id", Type: &schema.ColumnType{Raw: "int", Type: &schema.IntegerType{T: "int"}}}, }, }, {Name: "groups"}, }, } from.Tables[0].Schema = from from.Tables[1].Schema = from changes, err := drv.SchemaDiff(from, to) require.NoError(t, err) require.EqualValues(t, []schema.Change{ &schema.ModifyTable{T: to.Tables[0], Changes: []schema.Change{&schema.AddColumn{C: to.Tables[0].Columns[0]}}}, &schema.DropTable{T: from.Tables[1]}, &schema.AddTable{T: to.Tables[1]}, }, changes) } atlas-0.7.2/sql/sqlite/driver.go000066400000000000000000000230701431455511600165550ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "context" "database/sql" "errors" "fmt" "net/url" "os" "path/filepath" "strconv" "strings" "time" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlclient" ) type ( // Driver represents a SQLite driver for introspecting database schemas, // generating diff between schema elements and apply migrations changes. Driver struct { conn schema.Differ schema.Inspector migrate.PlanApplier } // database connection and its information. conn struct { schema.ExecQuerier // System variables that are set on `Open`. version string collations []string } ) // DriverName holds the name used for registration. const DriverName = "sqlite3" func init() { sqlclient.Register( DriverName, sqlclient.DriverOpener(Open), sqlclient.RegisterTxOpener(OpenTx), sqlclient.RegisterCodec(MarshalHCL, EvalHCL), sqlclient.RegisterFlavours("sqlite"), sqlclient.RegisterURLParser(sqlclient.URLParserFunc(func(u *url.URL) *sqlclient.URL { uc := &sqlclient.URL{URL: u, DSN: strings.TrimPrefix(u.String(), u.Scheme+"://"), Schema: mainFile} if mode := u.Query().Get("mode"); mode == "memory" { // The "file:" prefix is mandatory for memory modes. uc.DSN = "file:" + uc.DSN } return uc })), ) } // Open opens a new SQLite driver. func Open(db schema.ExecQuerier) (migrate.Driver, error) { var ( c = conn{ExecQuerier: db} ctx = context.Background() ) rows, err := db.QueryContext(ctx, "SELECT sqlite_version()") if err != nil { return nil, fmt.Errorf("sqlite: query version pragma: %w", err) } if err := sqlx.ScanOne(rows, &c.version); err != nil { return nil, fmt.Errorf("sqlite: scan version pragma: %w", err) } if rows, err = db.QueryContext(ctx, "SELECT name FROM pragma_collation_list()"); err != nil { return nil, fmt.Errorf("sqlite: query collation_list pragma: %w", err) } if c.collations, err = sqlx.ScanStrings(rows); err != nil { return nil, fmt.Errorf("sqlite: scanning database collations: %w", err) } return &Driver{ conn: c, Differ: &sqlx.Diff{DiffDriver: &Diff{}}, Inspector: &inspect{c}, PlanApplier: &planApply{c}, }, nil } // Snapshot implements migrate.Snapshoter. func (d *Driver) Snapshot(ctx context.Context) (migrate.RestoreFunc, error) { r, err := d.InspectRealm(ctx, nil) if err != nil { return nil, err } if !(r == nil || (len(r.Schemas) == 1 && r.Schemas[0].Name == mainFile && len(r.Schemas[0].Tables) == 0)) { return nil, migrate.NotCleanError{Reason: fmt.Sprintf("found table %q", r.Schemas[0].Tables[0].Name)} } return func(ctx context.Context) error { for _, stmt := range []string{ "PRAGMA writable_schema = 1;", "DELETE FROM sqlite_master WHERE type IN ('table', 'index', 'trigger');", "PRAGMA writable_schema = 0;", "VACUUM;", } { if _, err := d.ExecContext(ctx, stmt); err != nil { return err } } return nil }, nil } // CheckClean implements migrate.CleanChecker. func (d *Driver) CheckClean(ctx context.Context, revT *migrate.TableIdent) error { r, err := d.InspectRealm(ctx, nil) if err != nil { return err } switch n := len(r.Schemas); { case n > 1: return migrate.NotCleanError{Reason: fmt.Sprintf("found multiple schemas: %d", len(r.Schemas))} case n == 1 && r.Schemas[0].Name != mainFile: return migrate.NotCleanError{Reason: fmt.Sprintf("found schema %q", r.Schemas[0].Name)} case n == 1 && len(r.Schemas[0].Tables) > 1: return migrate.NotCleanError{Reason: fmt.Sprintf("found multiple tables: %d", len(r.Schemas[0].Tables))} case n == 1 && len(r.Schemas[0].Tables) == 1 && (revT == nil || r.Schemas[0].Tables[0].Name != revT.Name): return migrate.NotCleanError{Reason: fmt.Sprintf("found table %q", r.Schemas[0].Tables[0].Name)} } return nil } // Lock implements the schema.Locker interface. func (d *Driver) Lock(_ context.Context, name string, timeout time.Duration) (schema.UnlockFunc, error) { path := filepath.Join(os.TempDir(), name+".lock") c, err := os.ReadFile(path) if errors.Is(err, os.ErrNotExist) { return acquireLock(path, timeout) } if err != nil { return nil, fmt.Errorf("sql/sqlite: reading lock dir: %w", err) } expires, err := strconv.ParseInt(string(c), 10, 64) if err != nil { return nil, fmt.Errorf("sql/sqlite: invalid lock file format: parsing expiration date: %w", err) } if time.Unix(0, expires).After(time.Now()) { // Lock is still valid. return nil, fmt.Errorf("sql/sqlite: lock on %q already taken", name) } return acquireLock(path, timeout) } func acquireLock(path string, timeout time.Duration) (schema.UnlockFunc, error) { lock, err := os.Create(path) if err != nil { return nil, fmt.Errorf("sql/sqlite: creating lockfile %q: %w", path, err) } if _, err := lock.Write([]byte(strconv.FormatInt(time.Now().Add(timeout).UnixNano(), 10))); err != nil { return nil, fmt.Errorf("sql/sqlite: writing to lockfile %q: %w", path, err) } defer lock.Close() return func() error { return os.Remove(path) }, nil } type violation struct { tbl, ref string row, index int } // OpenTx opens a transaction. If foreign keys are enabled, it disables them, checks for constraint violations, // opens the transaction and before committing ensures no new violations have been introduced by whatever Atlas was // doing. func OpenTx(ctx context.Context, db *sql.DB, opts *sql.TxOptions) (*sqlclient.Tx, error) { var on sql.NullBool if err := db.QueryRowContext(ctx, "PRAGMA foreign_keys").Scan(&on); err != nil { return nil, fmt.Errorf("sql/sqlite: querying 'foreign_keys' pragma: %w", err) } // Disable the foreign_keys pragma in case it is enabled, and // toggle it back after transaction is committed or rolled back. if on.Bool { _, err := db.ExecContext(ctx, "PRAGMA foreign_keys = off") if err != nil { return nil, fmt.Errorf("sql/sqlite: set 'foreign_keys = off': %w", err) } } tx, err := db.BeginTx(ctx, opts) if err != nil { return nil, err } cm, err := CommitFunc(ctx, db, tx, on.Bool) if err != nil { return nil, err } return &sqlclient.Tx{ Tx: tx, CommitFn: cm, RollbackFn: RollbackFunc(ctx, db, tx, on.Bool), }, nil } // Tx wraps schema.ExecQuerier with the transaction methods. type Tx interface { schema.ExecQuerier Commit() error Rollback() error } // CommitFunc takes a transaction and ensures to toggle foreign keys back on after tx.Commit is called. func CommitFunc(ctx context.Context, db schema.ExecQuerier, tx Tx, on bool) (func() error, error) { var ( before []violation err error ) if on { before, err = violations(ctx, tx) if err != nil { return nil, err } } return func() error { if on { after, err := violations(ctx, tx) if err != nil { if err2 := tx.Rollback(); err2 != nil { err = fmt.Errorf("%v: %w", err2, err) } return enableFK(ctx, db, on, err) } if vs := violationsDiff(before, after); len(vs) > 0 { err := fmt.Errorf("sql/sqlite: foreign key mismatch: %+v", vs) if err2 := tx.Rollback(); err2 != nil { err = fmt.Errorf("%v: %w", err2, err) } return enableFK(ctx, db, on, err) } } return enableFK(ctx, db, on, tx.Commit()) }, nil } // RollbackFunc takes a transaction and ensures to toggle foreign keys back on after tx.Rollback is called. func RollbackFunc(ctx context.Context, db schema.ExecQuerier, tx Tx, on bool) func() error { return func() error { return enableFK(ctx, db, on, tx.Rollback()) } } func enableFK(ctx context.Context, db schema.ExecQuerier, do bool, err error) error { if do { // Re-enable foreign key checks if they were enabled before. if _, err2 := db.ExecContext(ctx, "PRAGMA foreign_keys = on"); err2 != nil { err2 = fmt.Errorf("sql/sqlite: set 'foreign_keys = on': %w", err2) if err != nil { return fmt.Errorf("%v: %w", err2, err) } return err2 } } return err } func violations(ctx context.Context, conn schema.ExecQuerier) ([]violation, error) { rows, err := conn.QueryContext(ctx, "PRAGMA foreign_key_check") if err != nil { return nil, fmt.Errorf("sql/sqlite: querying 'foreign_key_check' pragma: %w", err) } defer rows.Close() var vs []violation for rows.Next() { var v violation if err := rows.Scan(&v.tbl, &v.row, &v.ref, &v.index); err != nil { return nil, fmt.Errorf("sql/sqlite: querying 'foreign_key_check' pragma: scanning rows: %w", err) } vs = append(vs, v) } if err := rows.Err(); err != nil { return nil, fmt.Errorf("sql/sqlite: querying 'foreign_key_check' pragma: scanning rows: %w", err) } return vs, nil } // equalViolations compares the foreign key violations before starting a transaction with the ones afterwards. // It returns violations found in v2 that are not in v1. func violationsDiff(v1, v2 []violation) (vs []violation) { for _, v := range v2 { if !contains(v1, v) { vs = append(vs, v) } } return vs } func contains(hs []violation, n violation) bool { for _, v := range hs { if v.row == n.row && v.ref == n.ref && v.index == n.index && v.tbl == n.tbl { return true } } return false } // SQLite standard data types as defined in its codebase and documentation. // https://www.sqlite.org/datatype3.html // https://github.com/sqlite/sqlite/blob/master/src/global.c const ( TypeInteger = "integer" // SQLITE_TYPE_INTEGER TypeReal = "real" // SQLITE_TYPE_REAL TypeText = "text" // SQLITE_TYPE_TEXT TypeBlob = "blob" // SQLITE_TYPE_BLOB ) // SQLite generated columns types. const ( virtual = "VIRTUAL" stored = "STORED" ) atlas-0.7.2/sql/sqlite/driver_test.go000066400000000000000000000052111431455511600176110ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "context" "os" "path/filepath" "strconv" "testing" "time" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestDriver_LockAcquired(t *testing.T) { drv := &Driver{} // Acquiring a lock does work. unlock, err := drv.Lock(context.Background(), "lock", time.Second) require.NoError(t, err) require.NotNil(t, unlock) // Acquiring a lock on the same value will fail. _, err = drv.Lock(context.Background(), "lock", time.Second) require.Error(t, err) // After unlock it will succeed again. require.NoError(t, unlock()) _, err = drv.Lock(context.Background(), "lock", time.Second) require.NoError(t, err) require.NotNil(t, unlock) // Acquiring a lock on a value that has been expired works. dir, err := os.UserCacheDir() require.NoError(t, err) require.NoError(t, os.WriteFile( filepath.Join(dir, "lock.lock"), []byte(strconv.FormatInt(time.Now().Add(-time.Second).UnixNano(), 10)), 0666, )) _, err = drv.Lock(context.Background(), "lock", time.Second) // Acquiring a lock on another value works as well. _, err = drv.Lock(context.Background(), "another", time.Second) } func TestDriver_CheckClean(t *testing.T) { var ( r = schema.NewRealm() drv = &Driver{Inspector: &mockInspector{realm: r}} ) // Empty realm. err := drv.CheckClean(context.Background(), nil) require.NoError(t, err) // Empty schema. r.AddSchemas(schema.New("main")) err = drv.CheckClean(context.Background(), nil) require.NoError(t, err) // Schema with revisions table only. r.Schemas[0].AddTables(schema.NewTable("revisions")) err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions"}) require.NoError(t, err) // Unknown table. r.Schemas[0].Tables[0].Name = "unknown" err = drv.CheckClean(context.Background(), &migrate.TableIdent{Name: "revisions"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found table "unknown"`) // Multiple tables. r.Schemas[0].Tables = []*schema.Table{schema.NewTable("a"), schema.NewTable("revisions")} err = drv.CheckClean(context.Background(), &migrate.TableIdent{Schema: "test", Name: "revisions"}) require.EqualError(t, err, `sql/migrate: connected database is not clean: found multiple tables: 2`) } type mockInspector struct { schema.Inspector realm *schema.Realm } func (m *mockInspector) InspectRealm(context.Context, *schema.InspectRealmOption) (*schema.Realm, error) { return m.realm, nil } atlas-0.7.2/sql/sqlite/inspect.go000066400000000000000000000512001431455511600167230ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "context" "database/sql" "fmt" "regexp" "strconv" "strings" "unicode" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" ) // A diff provides an SQLite implementation for schema.Inspector. type inspect struct{ conn } var _ schema.Inspector = (*inspect)(nil) // InspectRealm returns schema descriptions of all resources in the given realm. func (i *inspect) InspectRealm(ctx context.Context, opts *schema.InspectRealmOption) (*schema.Realm, error) { schemas, err := i.databases(ctx, opts) if err != nil { return nil, err } if len(schemas) > 1 { return nil, fmt.Errorf("sqlite: multiple database files are not supported by the driver. got: %d", len(schemas)) } if opts == nil { opts = &schema.InspectRealmOption{} } r := schema.NewRealm(schemas...) if !sqlx.ModeInspectRealm(opts).Is(schema.InspectTables) { return sqlx.ExcludeRealm(r, opts.Exclude) } for _, s := range schemas { tables, err := i.tables(ctx, nil) if err != nil { return nil, err } s.AddTables(tables...) for _, t := range tables { if err := i.inspectTable(ctx, t); err != nil { return nil, err } } } sqlx.LinkSchemaTables(r.Schemas) return sqlx.ExcludeRealm(r, opts.Exclude) } // InspectSchema returns schema descriptions of the tables in the given schema. // If the schema name is empty, the "main" database is used. func (i *inspect) InspectSchema(ctx context.Context, name string, opts *schema.InspectOptions) (*schema.Schema, error) { if name == "" { name = mainFile } schemas, err := i.databases(ctx, &schema.InspectRealmOption{ Schemas: []string{name}, }) if err != nil { return nil, err } if len(schemas) == 0 { return nil, &schema.NotExistError{ Err: fmt.Errorf("sqlite: schema %q was not found", name), } } if opts == nil { opts = &schema.InspectOptions{} } r := schema.NewRealm(schemas...) if !sqlx.ModeInspectSchema(opts).Is(schema.InspectTables) { return sqlx.ExcludeSchema(r.Schemas[0], opts.Exclude) } tables, err := i.tables(ctx, opts) if err != nil { return nil, err } r.Schemas[0].AddTables(tables...) for _, t := range tables { if err := i.inspectTable(ctx, t); err != nil { return nil, err } } sqlx.LinkSchemaTables(schemas) return sqlx.ExcludeSchema(r.Schemas[0], opts.Exclude) } func (i *inspect) inspectTable(ctx context.Context, t *schema.Table) error { if err := i.columns(ctx, t); err != nil { return err } if err := i.indexes(ctx, t); err != nil { return err } if err := i.fks(ctx, t); err != nil { return err } if err := fillChecks(t); err != nil { return err } return nil } // columns queries and appends the columns of the given table. func (i *inspect) columns(ctx context.Context, t *schema.Table) error { rows, err := i.QueryContext(ctx, fmt.Sprintf(columnsQuery, t.Name)) if err != nil { return fmt.Errorf("sqlite: querying %q columns: %w", t.Name, err) } defer rows.Close() for rows.Next() { if err := i.addColumn(t, rows); err != nil { return fmt.Errorf("sqlite: %w", err) } } return autoinc(t) } // addColumn scans the current row and adds a new column from it to the table. func (i *inspect) addColumn(t *schema.Table, rows *sql.Rows) error { var ( nullable, primary bool hidden sql.NullInt64 name, typ, defaults sql.NullString err error ) if err = rows.Scan(&name, &typ, &nullable, &defaults, &primary, &hidden); err != nil { return err } c := &schema.Column{ Name: name.String, Type: &schema.ColumnType{ Raw: typ.String, Null: nullable, }, } c.Type.Type, err = ParseType(typ.String) if err != nil { return err } if defaults.Valid { c.Default = defaultExpr(defaults.String) } // The hidden flag is set to 2 for VIRTUAL columns, and to // 3 for STORED columns. See: sqlite/pragma.c#sqlite3Pragma. if hidden.Int64 >= 2 { if err := setGenExpr(t, c, hidden.Int64); err != nil { return err } } t.Columns = append(t.Columns, c) if primary { if t.PrimaryKey == nil { t.PrimaryKey = &schema.Index{ Name: "PRIMARY", Unique: true, Table: t, } } // Columns are ordered by the `pk` field. t.PrimaryKey.Parts = append(t.PrimaryKey.Parts, &schema.IndexPart{ C: c, SeqNo: len(t.PrimaryKey.Parts) + 1, }) } return nil } // indexes queries and appends the indexes of the given table. func (i *inspect) indexes(ctx context.Context, t *schema.Table) error { rows, err := i.QueryContext(ctx, fmt.Sprintf(indexesQuery, t.Name)) if err != nil { return fmt.Errorf("sqlite: querying %q indexes: %w", t.Name, err) } if err := i.addIndexes(t, rows); err != nil { return fmt.Errorf("sqlite: scan %q indexes: %w", t.Name, err) } for _, idx := range t.Indexes { if err := i.indexInfo(ctx, t, idx); err != nil { return err } } return nil } // addIndexes scans the rows and adds the indexes to the table. func (i *inspect) addIndexes(t *schema.Table, rows *sql.Rows) error { defer rows.Close() for rows.Next() { var ( uniq, partial bool name, origin, stmt sql.NullString ) if err := rows.Scan(&name, &uniq, &origin, &partial, &stmt); err != nil { return err } if origin.String == "pk" { continue } idx := &schema.Index{ Name: name.String, Unique: uniq, Table: t, Attrs: []schema.Attr{ &CreateStmt{S: stmt.String}, &IndexOrigin{O: origin.String}, }, } if partial { i := strings.Index(stmt.String, "WHERE") if i == -1 { return fmt.Errorf("missing partial WHERE clause in: %s", stmt.String) } idx.Attrs = append(idx.Attrs, &IndexPredicate{ P: strings.TrimSpace(stmt.String[i+5:]), }) } t.Indexes = append(t.Indexes, idx) } return nil } // A regexp to extract index parts. var reIdxParts = regexp.MustCompile("(?i)ON\\s+[\"`]*(?:\\w+)[\"`]*\\s*\\((.+)\\)") func (i *inspect) indexInfo(ctx context.Context, t *schema.Table, idx *schema.Index) error { var ( hasExpr bool rows, err = i.QueryContext(ctx, fmt.Sprintf(indexColumnsQuery, idx.Name)) ) if err != nil { return fmt.Errorf("sqlite: querying %q indexes: %w", t.Name, err) } defer rows.Close() for rows.Next() { var ( desc sql.NullBool name sql.NullString ) if err := rows.Scan(&name, &desc); err != nil { return fmt.Errorf("sqlite: scanning index names: %w", err) } part := &schema.IndexPart{ SeqNo: len(idx.Parts) + 1, Desc: desc.Bool, } switch c, ok := t.Column(name.String); { case ok: part.C = c // NULL name indicates that the index-part is an expression and we // should extract it from the `CREATE INDEX` statement (not supported atm). case !sqlx.ValidString(name): hasExpr = true part.X = &schema.RawExpr{X: ""} default: return fmt.Errorf("sqlite: column %q was not found for index %q", name.String, idx.Name) } idx.Parts = append(idx.Parts, part) } if !hasExpr { return nil } var c CreateStmt if !sqlx.Has(idx.Attrs, &c) || !reIdxParts.MatchString(c.S) { return nil } parts := strings.Split(reIdxParts.FindStringSubmatch(c.S)[1], ",") // Unable to parse index parts correctly. if len(parts) != len(idx.Parts) { return nil } for i, p := range idx.Parts { if p.X != nil { p.X.(*schema.RawExpr).X = strings.TrimSpace(parts[i]) } } return nil } // fks queries and appends the foreign-keys of the given table. func (i *inspect) fks(ctx context.Context, t *schema.Table) error { rows, err := i.QueryContext(ctx, fmt.Sprintf(fksQuery, t.Name)) if err != nil { return fmt.Errorf("sqlite: querying %q foreign-keys: %w", t.Name, err) } if err := i.addFKs(t, rows); err != nil { return fmt.Errorf("sqlite: scan %q foreign-keys: %w", t.Name, err) } return fillConstName(t) } func (i *inspect) addFKs(t *schema.Table, rows *sql.Rows) error { ids := make(map[int]*schema.ForeignKey) for rows.Next() { var ( id int column, refColumn, refTable, updateRule, deleteRule string ) if err := rows.Scan(&id, &column, &refColumn, &refTable, &updateRule, &deleteRule); err != nil { return err } fk, ok := ids[id] if !ok { fk = &schema.ForeignKey{ Symbol: strconv.Itoa(id), Table: t, RefTable: t, OnDelete: schema.ReferenceOption(deleteRule), OnUpdate: schema.ReferenceOption(updateRule), } if refTable != t.Name { fk.RefTable = &schema.Table{Name: refTable, Schema: &schema.Schema{Name: t.Schema.Name}} } ids[id] = fk t.ForeignKeys = append(t.ForeignKeys, fk) } c, ok := t.Column(column) if !ok { return fmt.Errorf("column %q was not found for fk %q", column, fk.Symbol) } // Rows are ordered by SEQ that specifies the // position of the column in the FK definition. if _, ok := fk.Column(c.Name); !ok { fk.Columns = append(fk.Columns, c) c.ForeignKeys = append(c.ForeignKeys, fk) } // Stub referenced columns or link if it is a self-reference. var rc *schema.Column if fk.Table != fk.RefTable { rc = &schema.Column{Name: refColumn} } else if c, ok := t.Column(refColumn); ok { rc = c } else { return fmt.Errorf("referenced column %q was not found for fk %q", refColumn, fk.Symbol) } if _, ok := fk.RefColumn(rc.Name); !ok { fk.RefColumns = append(fk.RefColumns, rc) } } return nil } // tableNames returns a list of all tables exist in the schema. func (i *inspect) tables(ctx context.Context, opts *schema.InspectOptions) ([]*schema.Table, error) { var ( args []any query = tablesQuery ) if opts != nil && len(opts.Tables) > 0 { query += " AND name IN (" + strings.Repeat("?, ", len(opts.Tables)-1) + "?)" for _, s := range opts.Tables { args = append(args, s) } } rows, err := i.QueryContext(ctx, query, args...) if err != nil { return nil, fmt.Errorf("sqlite: querying schema tables: %w", err) } defer rows.Close() var tables []*schema.Table for rows.Next() { var name, stmt string if err := rows.Scan(&name, &stmt); err != nil { return nil, fmt.Errorf("sqlite: scanning table: %w", err) } stmt = strings.TrimSpace(stmt) t := &schema.Table{ Name: name, Attrs: []schema.Attr{ &CreateStmt{S: strings.TrimSpace(stmt)}, }, } if strings.HasSuffix(stmt, "WITHOUT ROWID") || strings.HasSuffix(stmt, "without rowid") { t.Attrs = append(t.Attrs, &WithoutRowID{}) } tables = append(tables, t) } return tables, nil } // schemas returns the list of the schemas in the database. func (i *inspect) databases(ctx context.Context, opts *schema.InspectRealmOption) ([]*schema.Schema, error) { var ( args []any query = databasesQuery ) if opts != nil && len(opts.Schemas) > 0 { query = fmt.Sprintf(databasesQueryArgs, strings.Repeat("?, ", len(opts.Schemas)-1)+"?") for _, s := range opts.Schemas { args = append(args, s) } } rows, err := i.QueryContext(ctx, query, args...) if err != nil { return nil, fmt.Errorf("sqlite: querying schemas: %w", err) } defer rows.Close() var schemas []*schema.Schema for rows.Next() { var name, file sql.NullString if err := rows.Scan(&name, &file); err != nil { return nil, err } // File is missing if the database is not // associated with a file (:memory: mode). if file.String == "" { file.String = ":memory:" } schemas = append(schemas, &schema.Schema{ Name: name.String, Attrs: []schema.Attr{&File{Name: file.String}}, }) } return schemas, nil } type ( // File describes a database file. File struct { schema.Attr Name string } // CreateStmt describes the SQL statement used to create a resource. CreateStmt struct { schema.Attr S string } // AutoIncrement describes the `AUTOINCREMENT` configuration. // https://www.sqlite.org/autoinc.html AutoIncrement struct { schema.Attr // Seq represents the value in sqlite_sequence table. // i.e. https://www.sqlite.org/fileformat2.html#seqtab. // // Setting this value manually to > 0 indicates that // a custom value is necessary and should be handled // on migrate. Seq int64 } // WithoutRowID describes the `WITHOUT ROWID` configuration. // See: https://sqlite.org/withoutrowid.html WithoutRowID struct { schema.Attr } // IndexPredicate describes a partial index predicate. // See: https://www.sqlite.org/partialindex.html IndexPredicate struct { schema.Attr P string } // IndexOrigin describes how the index was created. // See: https://www.sqlite.org/pragma.html#pragma_index_list IndexOrigin struct { schema.Attr O string } // A UUIDType defines a UUID type. UUIDType struct { schema.Type T string } ) func columnParts(t string) []string { t = strings.TrimSpace(strings.ToLower(t)) parts := strings.FieldsFunc(t, func(r rune) bool { return r == '(' || r == ')' || r == ' ' || r == ',' }) for k := 0; k < 2; k++ { // Join the type back if it was separated with space (e.g. 'varying character'). if len(parts) > 1 && !isNumber(parts[0]) && !isNumber(parts[1]) { parts[1] = parts[0] + " " + parts[1] parts = parts[1:] } } return parts } func defaultExpr(x string) schema.Expr { switch { // Literals definition. // https://www.sqlite.org/syntax/literal-value.html case sqlx.IsLiteralBool(x), sqlx.IsLiteralNumber(x), sqlx.IsQuoted(x, '"', '\''), isBlob(x): return &schema.Literal{V: x} default: // We wrap the CURRENT_TIMESTAMP literals in raw-expressions // as they are not parsable in most decoders. return &schema.RawExpr{X: x} } } // isNumber reports whether the string is a number (category N). func isNumber(s string) bool { for _, r := range s { if !unicode.IsNumber(r) { return false } } return true } // blob literals are hex strings preceded by 'x' (or 'X). func isBlob(s string) bool { if (strings.HasPrefix(s, "x'") || strings.HasPrefix(s, "X'")) && strings.HasSuffix(s, "'") { _, err := strconv.ParseUint(s[2:len(s)-1], 16, 64) return err == nil } return false } var reAutoinc = regexp.MustCompile("(?i)(?:[(,]\\s*)[\"`]?(\\w+)[\"`]?\\s+INTEGER\\s+[^,]*PRIMARY\\s+KEY\\s+[^,]*AUTOINCREMENT") // autoinc checks if the table contains a "PRIMARY KEY AUTOINCREMENT" on its // CREATE statement, according to https://www.sqlite.org/syntax/column-constraint.html. // This is a workaround until we will embed a proper SQLite parser in atlas. func autoinc(t *schema.Table) error { var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE statement for table: %q", t.Name) } if t.PrimaryKey == nil || len(t.PrimaryKey.Parts) != 1 || t.PrimaryKey.Parts[0].C == nil { return nil } matches := reAutoinc.FindStringSubmatch(c.S) if len(matches) != 2 { return nil } pkc, ok := t.Column(matches[1]) if !ok { return fmt.Errorf("sqlite: column %q was not found for AUTOINCREMENT", matches[1]) } if t.PrimaryKey == nil || len(t.PrimaryKey.Parts) != 1 || t.PrimaryKey.Parts[0].C != pkc { return fmt.Errorf("sqlite: unexpected primary key: %v", t.PrimaryKey) } inc := &AutoIncrement{} // Annotate table elements with "AUTOINCREMENT". t.PrimaryKey.Attrs = append(t.PrimaryKey.Attrs, inc) pkc.Attrs = append(pkc.Attrs, inc) return nil } // setGenExpr extracts the generated expression from the CREATE statement // and appends it to the column. func setGenExpr(t *schema.Table, c *schema.Column, f int64) error { var s CreateStmt if !sqlx.Has(t.Attrs, &s) { return fmt.Errorf("missing CREATE statement for table: %q", t.Name) } re, err := regexp.Compile(fmt.Sprintf("(?:[(,]\\s*)[\"`]*(%s)[\"`]*[^,]*(?i:GENERATED\\s+ALWAYS)*\\s*(?i:AS){1}\\s*\\(", c.Name)) if err != nil { return err } idx := re.FindAllStringIndex(s.S, 1) if len(idx) != 1 || len(idx[0]) != 2 { return fmt.Errorf("sqlite: generation expression for column %q was not found in create statement", c.Name) } expr := scanExpr(s.S[idx[0][1]-1:]) if expr == "" { return fmt.Errorf("sqlite: unexpected empty generation expression for column %q", c.Name) } typ := virtual if f == 3 { typ = stored } c.SetGeneratedExpr(&schema.GeneratedExpr{Expr: expr, Type: typ}) return nil } // The following regexes extract named FKs and CHECK constraints defined in table-constraints or inlined // as column-constraints. Note, we assume the SQL statements are valid as they are returned by SQLite. var ( reFKC = regexp.MustCompile("(?i)(?:[(,]\\s*)[\"`]*(\\w+)[\"`]*[^,]*\\s+CONSTRAINT\\s+[\"`]*(\\w+)[\"`]*\\s+REFERENCES\\s+[\"`]*(\\w+)[\"`]*\\s*\\(([,\"` \\w]+)\\)") reFKT = regexp.MustCompile("(?i)CONSTRAINT\\s+[\"`]*(\\w+)[\"`]*\\s+FOREIGN\\s+KEY\\s*\\(([,\"` \\w]+)\\)\\s+REFERENCES\\s+[\"`]*(\\w+)[\"`]*\\s*\\(([,\"` \\w]+)\\)") reCheck = regexp.MustCompile("(?i)(?:CONSTRAINT\\s+[\"`]?(\\w+)[\"`]?\\s+)?CHECK\\s*\\(") ) // fillConstName fills foreign-key constrain names from CREATE TABLE statement. func fillConstName(t *schema.Table) error { var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE statement for table: %q", t.Name) } // Loop over table constraints. for _, m := range reFKT.FindAllStringSubmatch(c.S, -1) { if len(m) != 5 { return fmt.Errorf("unexpected number of matches for a table constraint: %q", m) } // Pattern matches "constraint_name", "columns", "ref_table" and "ref_columns". for _, fk := range t.ForeignKeys { // Found a foreign-key match for the constraint. if matchFK(fk, columns(m[2]), m[3], columns(m[4])) { fk.Symbol = m[1] break } } } // Loop over inlined column constraints. for _, m := range reFKC.FindAllStringSubmatch(c.S, -1) { if len(m) != 5 { return fmt.Errorf("unexpected number of matches for a column constraint: %q", m) } // Pattern matches "column", "constraint_name", "ref_table" and "ref_columns". for _, fk := range t.ForeignKeys { // Found a foreign-key match for the constraint. if matchFK(fk, columns(m[1]), m[3], columns(m[4])) { fk.Symbol = m[2] break } } } return nil } // columns from the matched regex above. func columns(s string) []string { names := strings.Split(s, ",") for i := range names { names[i] = strings.Trim(strings.TrimSpace(names[i]), "`\"") } return names } // matchFK reports if the foreign-key matches the given attributes. func matchFK(fk *schema.ForeignKey, columns []string, refTable string, refColumns []string) bool { if len(fk.Columns) != len(columns) || fk.RefTable.Name != refTable || len(fk.RefColumns) != len(refColumns) { return false } for i := range columns { if fk.Columns[i].Name != columns[i] { return false } } for i := range refColumns { if fk.RefColumns[i].Name != refColumns[i] { return false } } return true } // fillChecks extracts the CHECK constrains from the CREATE TABLE statement, // and appends them to the table attributes. func fillChecks(t *schema.Table) error { var c CreateStmt if !sqlx.Has(t.Attrs, &c) { return fmt.Errorf("missing CREATE statement for table: %q", t.Name) } for i := 0; i < len(c.S); { idx := reCheck.FindStringSubmatchIndex(c.S[i:]) // No more matches. if len(idx) != 4 { break } check := &schema.Check{Expr: scanExpr(c.S[idx[1]-1:])} // Matching group for constraint name. if idx[2] != -1 && idx[3] != -1 { check.Name = c.S[idx[2]:idx[3]] } t.Attrs = append(t.Attrs, check) c.S = c.S[idx[1]+len(check.Expr)-1:] } return nil } // scanExpr scans the expression string (wrapped with parens) // until its end in the given string. e.g. "(a+1), c int ...". func scanExpr(expr string) string { var r, l int for i := 0; i < len(expr); i++ { switch expr[i] { case '(': r++ case ')': l++ case '\'', '"': // Skip unescaped strings. if j := strings.IndexByte(expr[i+1:], expr[i]); j != -1 { i += j + 1 } } // Balanced parens. if r == l { return expr[:i+1] } } return "" } const ( // Name of main database file. mainFile = "main" // Query to list attached database files. databasesQuery = "SELECT `name`, `file` FROM pragma_database_list() WHERE `name` <> 'temp'" databasesQueryArgs = "SELECT `name`, `file` FROM pragma_database_list() WHERE `name` IN (%s)" // Query to list database tables. tablesQuery = "SELECT `name`, `sql` FROM sqlite_master WHERE `type` = 'table' AND `name` NOT LIKE 'sqlite_%'" // Query to list table information. columnsQuery = "SELECT `name`, `type`, (not `notnull`) AS `nullable`, `dflt_value`, (`pk` <> 0) AS `pk`, `hidden` FROM pragma_table_xinfo('%s') ORDER BY `pk`, `cid`" // Query to list table indexes. indexesQuery = "SELECT `il`.`name`, `il`.`unique`, `il`.`origin`, `il`.`partial`, `m`.`sql` FROM pragma_index_list('%s') AS il JOIN sqlite_master AS m ON il.name = m.name" // Query to list index columns. indexColumnsQuery = "SELECT name, desc FROM pragma_index_xinfo('%s') WHERE key = 1 ORDER BY seqno" // Query to list table foreign-keys. fksQuery = "SELECT `id`, `from`, `to`, `table`, `on_update`, `on_delete` FROM pragma_foreign_key_list('%s') ORDER BY id, seq" ) atlas-0.7.2/sql/sqlite/inspect_test.go000066400000000000000000000462741431455511600200010ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "context" "fmt" "testing" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestDriver_InspectTable(t *testing.T) { tests := []struct { name string before func(mock) expect func(*require.Assertions, *schema.Table, error) }{ { name: "table columns", before: func(m mock) { m.tableExists("users", true, "CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT, w INT GENERATED ALWAYS AS (a*10), x TEXT AS (typeof(c)) STORED, y TEXT AS (substr(b,a,a+2)))") m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsQuery, "users"))). WillReturnRows(sqltest.Rows(` name | type | nullable | dflt_value | primary | hidden ------+--------------+----------+ ------------+----------+---------- c1 | int | 1 | a | 0 | 0 c2 | integer | 0 | 97 | 0 | 0 c3 | varchar(100) | 1 | 'A' | 0 | 0 c4 | boolean | 0 | | 0 | 0 c5 | json | 0 | | 0 | 0 c6 | datetime | 0 | | 0 | 0 c7 | blob | 0 | x'a' | 0 | 0 c8 | text | 0 | | 0 | 0 c9 | numeric(10,2) | 0 | | 0 | 0 c10 | real | 0 | | 0 | 0 w | int | 0 | | 0 | 2 x | text | 0 | | 0 | 3 y | text | 0 | | 0 | 2 id | integer | 0 | 0x1 | 1 | 0 `)) m.noIndexes("users") m.noFKs("users") }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) columns := []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Null: true, Type: &schema.IntegerType{T: "int"}, Raw: "int"}, Default: &schema.RawExpr{X: "a"}}, {Name: "c2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer"}, Default: &schema.Literal{V: "97"}}, {Name: "c3", Type: &schema.ColumnType{Null: true, Type: &schema.StringType{T: "varchar", Size: 100}, Raw: "varchar(100)"}, Default: &schema.Literal{V: "'A'"}}, {Name: "c4", Type: &schema.ColumnType{Type: &schema.BoolType{T: "boolean"}, Raw: "boolean"}}, {Name: "c5", Type: &schema.ColumnType{Type: &schema.JSONType{T: "json"}, Raw: "json"}}, {Name: "c6", Type: &schema.ColumnType{Type: &schema.TimeType{T: "datetime"}, Raw: "datetime"}}, {Name: "c7", Type: &schema.ColumnType{Type: &schema.BinaryType{T: "blob"}, Raw: "blob"}, Default: &schema.Literal{V: "x'a'"}}, {Name: "c8", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Raw: "text"}}, {Name: "c9", Type: &schema.ColumnType{Type: &schema.DecimalType{T: "numeric", Precision: 10, Scale: 2}, Raw: "numeric(10,2)"}}, {Name: "c10", Type: &schema.ColumnType{Type: &schema.FloatType{T: "real"}, Raw: "real"}}, {Name: "w", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}, Raw: "int"}, Attrs: []schema.Attr{&schema.GeneratedExpr{Type: "VIRTUAL", Expr: "(a*10)"}}}, {Name: "x", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Raw: "text"}, Attrs: []schema.Attr{&schema.GeneratedExpr{Type: "STORED", Expr: "(typeof(c))"}}}, {Name: "y", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Raw: "text"}, Attrs: []schema.Attr{&schema.GeneratedExpr{Type: "VIRTUAL", Expr: "(substr(b,a,a+2))"}}}, {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer"}, Attrs: []schema.Attr{&AutoIncrement{}}, Default: &schema.Literal{V: "0x1"}}, } require.Equal(t.Columns, columns) require.EqualValues(&schema.Index{ Name: "PRIMARY", Unique: true, Table: t, Parts: []*schema.IndexPart{{SeqNo: 1, C: columns[len(columns)-1]}}, Attrs: []schema.Attr{&AutoIncrement{}}, }, t.PrimaryKey) }, }, { name: "table indexes", before: func(m mock) { m.tableExists("users", true, "CREATE TABLE users(id INTEGER PRIMARY KEY)") m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsQuery, "users"))). WillReturnRows(sqltest.Rows(` name | type | nullable | dflt_value | primary | hidden ------+--------------+----------+ ------------+----------+---------- c1 | int | 1 | | 0 | 0 c2 | integer | 0 | | 0 | 0 `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexesQuery, "users"))). WillReturnRows(sqltest.Rows(` name | unique | origin | partial | sql -------+--------------+--------+----------+------------------------------------------------------- c1u | 1 | c | 0 | CREATE UNIQUE INDEX c1u on users(c1, c2) c1_c2 | 0 | c | 1 | CREATE INDEX c1_c2 on users(c1, c2*2) WHERE c1 <> NULL c1_x | 0 | c | 0 | CREATE INDEX c1_x ON users (f(c1)) `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexColumnsQuery, "c1u"))). WillReturnRows(sqltest.Rows(` name | desc | -------+--------+ c1 | 1 | c2 | 0 | `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexColumnsQuery, "c1_c2"))). WillReturnRows(sqltest.Rows(` name | desc | -------+--------+ c1 | 0 | nil | 0 | `)) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexColumnsQuery, "c1_x"))). WillReturnRows(sqltest.Rows(` name | desc | -------+--------+ nil | 0 | `)) m.noFKs("users") }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) columns := []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Null: true, Type: &schema.IntegerType{T: "int"}, Raw: "int"}}, {Name: "c2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer"}}, } indexes := []*schema.Index{ { Name: "c1u", Unique: true, Table: t, Parts: []*schema.IndexPart{ {SeqNo: 1, C: columns[0], Desc: true}, {SeqNo: 2, C: columns[1]}, }, Attrs: []schema.Attr{ &CreateStmt{S: "CREATE UNIQUE INDEX c1u on users(c1, c2)"}, &IndexOrigin{O: "c"}, }, }, { Name: "c1_c2", Table: t, Parts: []*schema.IndexPart{ {SeqNo: 1, C: columns[0]}, {SeqNo: 2, X: &schema.RawExpr{X: "c2*2"}}, }, Attrs: []schema.Attr{ &CreateStmt{S: "CREATE INDEX c1_c2 on users(c1, c2*2) WHERE c1 <> NULL"}, &IndexOrigin{O: "c"}, &IndexPredicate{P: "c1 <> NULL"}, }, }, { Name: "c1_x", Table: t, Parts: []*schema.IndexPart{ {SeqNo: 1, X: &schema.RawExpr{X: "f(c1)"}}, }, Attrs: []schema.Attr{ &CreateStmt{S: "CREATE INDEX c1_x ON users (f(c1))"}, &IndexOrigin{O: "c"}, }, }, } require.Equal(t.Columns, columns) require.Equal(t.Indexes, indexes) }, }, { name: "table constraints", before: func(m mock) { m.tableExists("users", true, ` CREATE TABLE users( id INTEGER PRIMARY KEY, c1 int CHECK (c1 > 10), c2 integer NOT NULL CONSTRAINT c2_fk REFERENCES users (c1) ON DELETE SET NULL constraint "ck1" CHECK ((c1 + c2) % 2 = 0), c3 integer NOT NULL REFERENCES users (c1) ON DELETE SET NULL, CONSTRAINT "c1_c2_fk" FOREIGN KEY (c1, c2) REFERENCES t2 (id, c1), CONSTRAINT "id_nonzero" CHECK (id <> 0) ) `) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsQuery, "users"))). WillReturnRows(sqltest.Rows(` name | type | nullable | dflt_value | primary | hidden ------+--------------+----------+ ------------+----------+---------- c1 | int | 1 | | 0 | 0 c2 | integer | 0 | | 0 | 0 c3 | integer | 0 | | 0 | 0 `)) m.noIndexes("users") m.ExpectQuery(sqltest.Escape(fmt.Sprintf(fksQuery, "users"))). WillReturnRows(sqltest.Rows(` id | from | to | table | on_update | on_delete ----+-----------+-------------+-------------+----------- 0 | c1 | id | t2 | NO ACTION | CASCADE 0 | c2 | c1 | t2 | NO ACTION | CASCADE 1 | c2 | c1 | users | NO ACTION | CASCADE `)) }, expect: func(require *require.Assertions, t *schema.Table, err error) { require.NoError(err) fks := []*schema.ForeignKey{ {Symbol: "c1_c2_fk", Table: t, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: &schema.Table{Name: "t2", Schema: &schema.Schema{Name: "main"}}, RefColumns: []*schema.Column{{Name: "id"}, {Name: "c1"}}}, {Symbol: "c2_fk", Table: t, OnUpdate: schema.NoAction, OnDelete: schema.Cascade, RefTable: t}, } columns := []*schema.Column{ {Name: "c1", Type: &schema.ColumnType{Null: true, Type: &schema.IntegerType{T: "int"}, Raw: "int"}, ForeignKeys: fks[:1]}, {Name: "c2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer"}, ForeignKeys: fks}, {Name: "c3", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Raw: "integer"}}, } fks[0].Columns = columns[:2] fks[1].Columns = columns[1:2] fks[1].RefColumns = columns[:1] checks := []schema.Attr{ &schema.Check{Expr: "(c1 > 10)"}, &schema.Check{Name: "ck1", Expr: "((c1 + c2) % 2 = 0)"}, &schema.Check{Name: "id_nonzero", Expr: "(id <> 0)"}, } require.Equal(t.Columns, columns) require.Equal(t.ForeignKeys, fks) require.Equal(t.Attrs[1:], checks) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.systemVars("3.36.0") drv, err := Open(db) require.NoError(t, err) tt.before(mk) s, err := drv.InspectSchema(context.Background(), "", &schema.InspectOptions{ Tables: []string{"users"}, }) require.NoError(t, err) tt.expect(require.New(t), s.Tables[0], err) }) } } func TestRegex_TableFK(t *testing.T) { tests := []struct { input string matches []string }{ { input: `CREATE TABLE pets (id int NOT NULL, owner_id int, CONSTRAINT "owner_fk" FOREIGN KEY(owner_id) REFERENCES users(id))`, matches: []string{"owner_fk", "owner_id", "users", "id"}, }, { input: `CREATE TABLE pets (id int NOT NULL, owner_id int, CONSTRAINT "owner_fk" FOREIGN KEY (owner_id) REFERENCES users(id))`, matches: []string{"owner_fk", "owner_id", "users", "id"}, }, { input: ` CREATE TABLE pets ( id int NOT NULL, owner_id int, CONSTRAINT owner_fk FOREIGN KEY ("owner_id") REFERENCES "users" (id) )`, matches: []string{"owner_fk", `"owner_id"`, "users", "id"}, }, { input: ` CREATE TABLE pets ( id int NOT NULL, c int, d int, CONSTRAINT "c_d_fk" FOREIGN KEY (c, d) REFERENCES "users" (a, b) )`, matches: []string{"c_d_fk", "c, d", "users", "a, b"}, }, { input: `CREATE TABLE pets (id int NOT NULL,c int,d int,CONSTRAINT "c_d_fk" FOREIGN KEY (c, "d") REFERENCES "users" (a, "b"))`, matches: []string{"c_d_fk", `c, "d"`, "users", `a, "b"`}, }, { input: `CREATE TABLE pets (id int NOT NULL,c int,d int,CONSTRAINT FOREIGN KEY (c, "d") REFERENCES "users" (a, "b"))`, }, { input: `CREATE TABLE pets (id int NOT NULL,c int,d int,CONSTRAINT name FOREIGN KEY c REFERENCES "users" (a, "b"))`, }, { input: `CREATE TABLE pets (id int NOT NULL,c int,d int,CONSTRAINT name FOREIGN KEY c REFERENCES (a, "b"))`, }, } for _, tt := range tests { m := reFKT.FindStringSubmatch(tt.input) require.Equal(t, len(m) != 0, len(tt.matches) != 0) if len(m) > 0 { require.Equal(t, tt.matches, m[1:]) } } } func TestRegex_ColumnFK(t *testing.T) { tests := []struct { input string matches []string }{ { input: `CREATE TABLE pets (id int NOT NULL, owner_id int CONSTRAINT "owner_fk" REFERENCES users(id))`, matches: []string{"owner_id", "owner_fk", "users", "id"}, }, { input: `CREATE TABLE pets (id int NOT NULL, owner_id int CONSTRAINT "owner_fk" REFERENCES users(id))`, matches: []string{"owner_id", "owner_fk", "users", "id"}, }, { input: ` CREATE TABLE pets ( id int NOT NULL, c int REFERENCES users(id), d int CONSTRAINT "dfk" REFERENCES users(id) )`, matches: []string{"d", "dfk", "users", "id"}, }, { input: ` CREATE TABLE t1 ( c int REFERENCES users(id), d text CONSTRAINT "dfk" CHECK (d <> '') REFERENCES t2(d) )`, }, } for _, tt := range tests { m := reFKC.FindStringSubmatch(tt.input) require.Equal(t, len(m) != 0, len(tt.matches) != 0) if len(m) > 0 { require.Equal(t, tt.matches, m[1:]) } } } func TestRegex_Checks(t *testing.T) { tests := []struct { input string checks []*schema.Check }{ { input: `CREATE TABLE pets (id int NOT NULL, owner_id int CONSTRAINT "ck1" CHECK (owner_id <> 0))`, checks: []*schema.Check{ {Name: "ck1", Expr: "(owner_id <> 0)"}, }, }, { input: `CREATE TABLE pets (id int NOT NULL, owner_id int CHECK (owner_id <> 0) CONSTRAINT "ck1")`, checks: []*schema.Check{ {Expr: "(owner_id <> 0)"}, }, }, { input: `CREATE TABLE pets (id int NOT NULL CHECK ("id" <> 0), owner_id int CONSTRAINT "ck1" CHECK ((owner_id) <> 0))`, checks: []*schema.Check{ {Expr: `("id" <> 0)`}, {Name: "ck1", Expr: "((owner_id) <> 0)"}, }, }, { input: `CREATE TABLE pets (id int NOT NULL CHECK ("(" <> ')'), owner_id int CONSTRAINT "ck1" CHECK ((owner_id) <> 0))`, checks: []*schema.Check{ {Expr: `("(" <> ')')`}, {Name: "ck1", Expr: "((owner_id) <> 0)"}, }, }, { input: "CREATE TABLE pets (\n\tid int NOT NULL CHECK (id <> 0) CHECK ((id % 2) = 0)\n,\n\towner_id int CHECK ((owner_id) <> 0)\n)", checks: []*schema.Check{ {Expr: "(id <> 0)"}, {Expr: "((id % 2) = 0)"}, {Expr: "((owner_id) <> 0)"}, }, }, { input: `CREATE TABLE t1( x INTEGER CHECK( x<5 ), y REAL CHECK( y>x ))`, checks: []*schema.Check{ {Expr: "( x<5 )"}, {Expr: "( y>x )"}, }, }, { input: `CREATE TABLE t( x INTEGER CONSTRAINT one CHECK( typeof(coalesce(x,0))=="integer" ), y NUMERIC CONSTRAINT two CHECK( typeof(coalesce(y,0.1))=='real' ), z TEXT CONSTRAINT three CHECK( typeof(coalesce(z,''))=='text' ) )`, checks: []*schema.Check{ {Name: "one", Expr: `( typeof(coalesce(x,0))=="integer" )`}, {Name: "two", Expr: `( typeof(coalesce(y,0.1))=='real' )`}, {Name: "three", Expr: `( typeof(coalesce(z,''))=='text' )`}, }, }, { input: `CREATE TABLE t( x char check ('foo''(' <> 1) )`, checks: []*schema.Check{ {Expr: `('foo''(' <> 1)`}, }, }, // Invalid inputs. { input: "CREATE TABLE t(x char check)", }, { input: "CREATE TABLE t(x char constraint x check)", }, } for _, tt := range tests { const name = "users" db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.systemVars("3.36.0") mk.tableExists(name, true, tt.input) mk.noColumns(name) mk.noIndexes(name) mk.noFKs(name) drv, err := Open(db) require.NoError(t, err) s, err := drv.InspectSchema(context.Background(), "", &schema.InspectOptions{ Tables: []string{name}, }) require.NoError(t, err) table := s.Tables[0] require.Equal(t, len(table.Attrs[1:]), len(tt.checks)) for i := range tt.checks { require.Equal(t, tt.checks[i], table.Attrs[i+1]) } } } func TestRegex_GeneratedExpr(t *testing.T) { tests := []struct { input string column *schema.Column }{ { input: "CREATE TABLE t1( a NOT NULL DEFAULT 'aaa', b AS(c) NOT NULL, c NOT NULL DEFAULT 'ccc');", column: schema.NewColumn("b"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "(c)", Type: "VIRTUAL"}), }, { input: "CREATE TABLE t4(a NOT NULL DEFAULT 123, b AS(a*10+4) STORED UNIQUE);", column: schema.NewColumn("b"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "(a*10+4)", Type: "VIRTUAL"}), }, { input: "CREATE TABLE t1(aa , bb AS (17) UNIQUE);", column: schema.NewColumn("bb"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "(17)", Type: "VIRTUAL"}), }, { input: "CREATE TABLE t1( a integer primary key, b int generated always as (a+5), c text GENERATED ALWAYS as (printf('%08x',a)));", column: schema.NewColumn("c"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "(printf('%08x',a))", Type: "VIRTUAL"}), }, { input: "CREATE TABLE t1( a integer primary key, b int generated always as (a+5), c text GENERATED ALWAYS as (printf('%08x',a)), d Generated\nAlways\nAS\n('xy\\()zzy'));", column: schema.NewColumn("d"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "('xy\\()zzy')", Type: "VIRTUAL"}), }, { input: "CREATE TABLE t0( c0 AS (('a', 9) < ('b', c1)), c1 AS (1), c2 CHECK (1 = c1) );", column: schema.NewColumn("c0"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "(('a', 9) < ('b', c1))", Type: "VIRTUAL"}), }, } for _, tt := range tests { const name = "users" db, m, err := sqlmock.New() require.NoError(t, err) mk := mock{m} mk.systemVars("3.36.0") mk.tableExists(name, true, tt.input) m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsQuery, name))). WillReturnRows(sqltest.Rows(fmt.Sprintf(` name | type | nullable | dflt_value | primary | hidden ------+--------------+----------+ ------------+----------+---------- %s | int | 1 | a | 0 | 2 `, tt.column.Name))) mk.noIndexes(name) mk.noFKs(name) drv, err := Open(db) require.NoError(t, err) s, err := drv.InspectSchema(context.Background(), "", &schema.InspectOptions{ Tables: []string{name}, }) require.NoError(t, err) require.Equal(t, tt.column.Attrs, s.Tables[0].Columns[0].Attrs) } } type mock struct { sqlmock.Sqlmock } func (m mock) systemVars(version string) { m.ExpectQuery(sqltest.Escape("SELECT sqlite_version()")). WillReturnRows(sqltest.Rows(` version ---------------- ` + version + ` `)) m.ExpectQuery(sqltest.Escape("SELECT name FROM pragma_collation_list()")). WillReturnRows(sqltest.Rows(` pragma_collation_list ------------------------ decimal uint RTRIM NOCASE BINARY `)) } func (m mock) tableExists(table string, exists bool, stmt ...string) { m.ExpectQuery(sqltest.Escape(fmt.Sprintf(databasesQueryArgs, "?"))). WithArgs("main"). WillReturnRows(sqltest.Rows(` name | file ------+----------- main | `)) rows := sqlmock.NewRows([]string{"name", "sql"}) if exists { rows.AddRow(table, stmt[0]) } m.ExpectQuery(sqltest.Escape(tablesQuery + " AND name IN (?)")). WithArgs(table). WillReturnRows(rows) } func (m mock) noColumns(table string) { m.ExpectQuery(sqltest.Escape(fmt.Sprintf(columnsQuery, table))). WillReturnRows(sqlmock.NewRows([]string{"name", "type", "nullable", "dflt_value", "primary"})) } func (m mock) noIndexes(table string) { m.ExpectQuery(sqltest.Escape(fmt.Sprintf(indexesQuery, table))). WillReturnRows(sqlmock.NewRows([]string{"name", "unique", "origin", "partial", "sql"})) } func (m mock) noFKs(table string) { m.ExpectQuery(sqltest.Escape(fmt.Sprintf(fksQuery, table))). WillReturnRows(sqlmock.NewRows([]string{"id", "from", "to", "table", "on_update", "on_delete"})) } atlas-0.7.2/sql/sqlite/migrate.go000066400000000000000000000423211431455511600167120ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "context" "fmt" "strings" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" ) // A planApply provides migration capabilities for schema elements. type planApply struct{ conn } // PlanChanges returns a migration plan for the given schema changes. func (p *planApply) PlanChanges(ctx context.Context, name string, changes []schema.Change, opts ...migrate.PlanOption) (*migrate.Plan, error) { s := &state{ conn: p.conn, Plan: migrate.Plan{ Name: name, Reversible: true, Transactional: true, }, PlanOptions: migrate.PlanOptions{ // Currently, the driver does not support attached // schemas and assumed the connected schema is "main". SchemaQualifier: new(string), }, } for _, o := range opts { o(&s.PlanOptions) } if err := s.plan(ctx, changes); err != nil { return nil, err } for _, c := range s.Changes { if c.Reverse == "" { s.Reversible = false } } return &s.Plan, nil } // ApplyChanges applies the changes on the database. An error is returned // if the driver is unable to produce a plan to it, or one of the statements // is failed or unsupported. func (p *planApply) ApplyChanges(ctx context.Context, changes []schema.Change, opts ...migrate.PlanOption) error { return sqlx.ApplyChanges(ctx, changes, p, opts...) } // state represents the state of a planning. It's not part of // planApply so that multiple planning/applying can be called // in parallel. type state struct { conn migrate.Plan migrate.PlanOptions skipFKs bool } // Exec executes the changes on the database. An error is returned // if one of the operations fail, or a change is not supported. func (s *state) plan(ctx context.Context, changes []schema.Change) (err error) { for _, c := range changes { switch c := c.(type) { case *schema.AddTable: err = s.addTable(ctx, c) case *schema.DropTable: err = s.dropTable(c) case *schema.ModifyTable: err = s.modifyTable(ctx, c) case *schema.RenameTable: s.renameTable(c) default: err = fmt.Errorf("unsupported change %T", c) } if err != nil { return err } } // Disable foreign-keys enforcement if it is required // by one of the changes in the plan. if s.skipFKs { // Callers should note that these 2 pragmas are no-op in transactions, // See: https://sqlite.org/pragma.html#pragma_foreign_keys. s.Changes = append([]*migrate.Change{{Cmd: "PRAGMA foreign_keys = off", Comment: "disable the enforcement of foreign-keys constraints"}}, s.Changes...) s.append(&migrate.Change{Cmd: "PRAGMA foreign_keys = on", Comment: "enable back the enforcement of foreign-keys constraints"}) } return nil } // addTable builds and executes the query for creating a table in a schema. func (s *state) addTable(ctx context.Context, add *schema.AddTable) error { var ( errs []string b = s.Build("CREATE TABLE").Table(add.T) ) if sqlx.Has(add.Extra, &schema.IfNotExists{}) { b.P("IF NOT EXISTS") } b.Wrap(func(b *sqlx.Builder) { b.MapComma(add.T.Columns, func(i int, b *sqlx.Builder) { if err := s.column(b, add.T.Columns[i]); err != nil { errs = append(errs, err.Error()) } }) // Primary keys with auto-increment are inlined on the column definition. if pk := add.T.PrimaryKey; pk != nil && !autoincPK(pk) { b.Comma().P("PRIMARY KEY") s.indexParts(b, pk.Parts) } if len(add.T.ForeignKeys) > 0 { b.Comma() s.fks(b, add.T.ForeignKeys...) } for _, attr := range add.T.Attrs { if c, ok := attr.(*schema.Check); ok { b.Comma() check(b, c) } } }) if len(errs) > 0 { return fmt.Errorf("create table %q: %s", add.T.Name, strings.Join(errs, ", ")) } if p := (WithoutRowID{}); sqlx.Has(add.T.Attrs, &p) { b.P("WITHOUT ROWID") } s.append(&migrate.Change{ Cmd: b.String(), Source: add, Reverse: s.Build("DROP TABLE").Table(add.T).String(), Comment: fmt.Sprintf("create %q table", add.T.Name), }) if err := s.tableSeq(ctx, add); err != nil { return err } return s.addIndexes(add.T, add.T.Indexes...) } // dropTable builds and executes the query for dropping a table from a schema. func (s *state) dropTable(drop *schema.DropTable) error { s.skipFKs = true b := s.Build("DROP TABLE").Ident(drop.T.Name) if sqlx.Has(drop.Extra, &schema.IfExists{}) { b.P("IF EXISTS") } s.append(&migrate.Change{ Cmd: b.String(), Source: drop, Comment: fmt.Sprintf("drop %q table", drop.T.Name), }) return nil } // modifyTable builds and executes the queries for bringing the table into its modified state. // If the modification contains changes that are not index creation/deletion or a simple column // addition, the changes are applied using a temporary table following the procedure mentioned // in: https://www.sqlite.org/lang_altertable.html#making_other_kinds_of_table_schema_changes. func (s *state) modifyTable(ctx context.Context, modify *schema.ModifyTable) error { if alterable(modify) { return s.alterTable(modify) } s.skipFKs = true newT := *modify.T indexes := newT.Indexes newT.Indexes = nil newT.Name = "new_" + newT.Name // Create a new table with a temporary name, and copy the existing rows to it. if err := s.addTable(ctx, &schema.AddTable{T: &newT}); err != nil { return err } copied, err := s.copyRows(modify.T, &newT, modify.Changes) if err != nil { return err } // Drop the current table, and rename the new one to its real name. s.append(&migrate.Change{ Cmd: s.Build("DROP TABLE").Ident(modify.T.Name).String(), Source: modify, Comment: fmt.Sprintf("drop %q table %s", modify.T.Name, func() string { if copied { return "after copying rows" } return "without copying rows (no columns)" }()), }) s.append(&migrate.Change{ Cmd: s.Build("ALTER TABLE").Ident(newT.Name).P("RENAME TO").Ident(modify.T.Name).String(), Source: modify, Comment: fmt.Sprintf("rename temporary table %q to %q", newT.Name, modify.T.Name), }) return s.addIndexes(modify.T, indexes...) } func (s *state) renameTable(c *schema.RenameTable) { s.append(&migrate.Change{ Source: c, Comment: fmt.Sprintf("rename a table from %q to %q", c.From.Name, c.To.Name), Cmd: s.Build("ALTER TABLE").Table(c.From).P("RENAME TO").Table(c.To).String(), Reverse: s.Build("ALTER TABLE").Table(c.To).P("RENAME TO").Table(c.From).String(), }) } func (s *state) column(b *sqlx.Builder, c *schema.Column) error { t, err := FormatType(c.Type.Type) if err != nil { return err } b.Ident(c.Name).P(t) if !c.Type.Null { b.P("NOT") } b.P("NULL") if c.Default != nil { x, err := defaultValue(c) if err != nil { return err } b.P("DEFAULT", x) } switch hasA, hasX := sqlx.Has(c.Attrs, &AutoIncrement{}), sqlx.Has(c.Attrs, &schema.GeneratedExpr{}); { case hasA && hasX: return fmt.Errorf("both autoincrement and generation expression specified for column %q", c.Name) case hasA: b.P("PRIMARY KEY AUTOINCREMENT") case hasX: x := &schema.GeneratedExpr{} sqlx.Has(c.Attrs, x) b.P("AS", sqlx.MayWrap(x.Expr), x.Type) } return nil } func (s *state) dropIndexes(t *schema.Table, indexes ...*schema.Index) error { rs := &state{conn: s.conn} if err := rs.addIndexes(t, indexes...); err != nil { return err } for i := range rs.Changes { s.append(&migrate.Change{ Cmd: rs.Changes[i].Reverse, Reverse: rs.Changes[i].Cmd, Comment: fmt.Sprintf("drop index %q from table: %q", indexes[i].Name, t.Name), }) } return nil } func (s *state) addIndexes(t *schema.Table, indexes ...*schema.Index) error { for _, idx := range indexes { // PRIMARY KEY or UNIQUE columns automatically create indexes with the generated name. // See: sqlite/build.c#sqlite3CreateIndex. Therefore, we ignore such PKs, but create // the inlined UNIQUE constraints manually with custom name, because SQLite does not // allow creating indexes with such names manually. Note, this case is possible if // "apply" schema that was inspected from the database as-is. if strings.HasPrefix(idx.Name, "sqlite_autoindex") { if i := (IndexOrigin{}); sqlx.Has(idx.Attrs, &i) && i.O == "p" { continue } // Use the following format:
_. names := make([]string, len(idx.Parts)+1) names[0] = t.Name for i, p := range idx.Parts { if p.C == nil { return fmt.Errorf("unexpected index part %s (%d)", idx.Name, i) } names[i+1] = p.C.Name } idx.Name = strings.Join(names, "_") } b := s.Build("CREATE") if idx.Unique { b.P("UNIQUE") } b.P("INDEX") if idx.Name != "" { b.Ident(idx.Name) } b.P("ON").Ident(t.Name) s.indexParts(b, idx.Parts) if p := (IndexPredicate{}); sqlx.Has(idx.Attrs, &p) { b.P("WHERE").P(p.P) } s.append(&migrate.Change{ Cmd: b.String(), Source: &schema.AddIndex{I: idx}, Reverse: s.Build("DROP INDEX").Ident(idx.Name).String(), Comment: fmt.Sprintf("create index %q to table: %q", idx.Name, t.Name), }) } return nil } func (s *state) indexParts(b *sqlx.Builder, parts []*schema.IndexPart) { b.Wrap(func(b *sqlx.Builder) { b.MapComma(parts, func(i int, b *sqlx.Builder) { switch part := parts[i]; { case part.C != nil: b.Ident(part.C.Name) case part.X != nil: b.WriteString(sqlx.MayWrap(part.X.(*schema.RawExpr).X)) } if parts[i].Desc { b.P("DESC") } }) }) } func (s *state) fks(b *sqlx.Builder, fks ...*schema.ForeignKey) { b.MapComma(fks, func(i int, b *sqlx.Builder) { fk := fks[i] if fk.Symbol != "" { b.P("CONSTRAINT").Ident(fk.Symbol) } b.P("FOREIGN KEY") b.Wrap(func(b *sqlx.Builder) { b.MapComma(fk.Columns, func(i int, b *sqlx.Builder) { b.Ident(fk.Columns[i].Name) }) }) b.P("REFERENCES").Ident(fk.RefTable.Name) b.Wrap(func(b *sqlx.Builder) { b.MapComma(fk.RefColumns, func(i int, b *sqlx.Builder) { b.Ident(fk.RefColumns[i].Name) }) }) if fk.OnUpdate != "" { b.P("ON UPDATE", string(fk.OnUpdate)) } if fk.OnDelete != "" { b.P("ON DELETE", string(fk.OnDelete)) } }) } func (s *state) copyRows(from *schema.Table, to *schema.Table, changes []schema.Change) (bool, error) { var fromC, toC []string for _, column := range to.Columns { // Skip generated columns in INSERT as they are computed. if sqlx.Has(column.Attrs, &schema.GeneratedExpr{}) { continue } // Find a change that associated with this column, if exists. var change schema.Change for i := range changes { switch c := changes[i].(type) { case *schema.AddColumn: if c.C.Name != column.Name { break } if change != nil { return false, fmt.Errorf("duplicate changes for column: %q: %T, %T", column.Name, change, c) } change = changes[i] case *schema.ModifyColumn: if c.To.Name != column.Name { break } if change != nil { return false, fmt.Errorf("duplicate changes for column: %q: %T, %T", column.Name, change, c) } change = changes[i] case *schema.DropColumn: if c.C.Name == column.Name { return false, fmt.Errorf("unexpected drop column: %q", column.Name) } } } switch change := change.(type) { // We expect that new columns are added with DEFAULT/GENERATED // values or defined as nullable if the table is not empty. case *schema.AddColumn: // Column modification requires special handling if it was // converted from nullable to non-nullable with default value. case *schema.ModifyColumn: toC = append(toC, column.Name) if !column.Type.Null && column.Default != nil && change.Change.Is(schema.ChangeNull|schema.ChangeDefault) { x, err := defaultValue(column) if err != nil { return false, err } fromC = append(fromC, fmt.Sprintf("IFNULL(`%[1]s`, %s) AS `%[1]s`", column.Name, x)) } else { fromC = append(fromC, column.Name) } // Columns without changes should be transferred as-is. case nil: toC = append(toC, column.Name) fromC = append(fromC, column.Name) } } insert := len(toC) > 0 if insert { s.append(&migrate.Change{ Cmd: fmt.Sprintf( "INSERT INTO `%s` (%s) SELECT %s FROM `%s`", to.Name, identComma(toC), identComma(fromC), from.Name, ), Comment: fmt.Sprintf("copy rows from old table %q to new temporary table %q", from.Name, to.Name), }) } return insert, nil } // alterTable alters the table with the given changes. Assuming the changes are "alterable". func (s *state) alterTable(modify *schema.ModifyTable) error { for _, change := range modify.Changes { switch change := change.(type) { case *schema.AddIndex: if err := s.addIndexes(modify.T, change.I); err != nil { return err } case *schema.DropIndex: if err := s.dropIndexes(modify.T, change.I); err != nil { return err } case *schema.RenameIndex: if err := s.addIndexes(modify.T, change.To); err != nil { return err } if err := s.dropIndexes(modify.T, change.From); err != nil { return err } case *schema.AddColumn: b := s.Build("ALTER TABLE").Ident(modify.T.Name) r := b.Clone() if err := s.column(b.P("ADD COLUMN"), change.C); err != nil { return err } s.append(&migrate.Change{ Source: change, Cmd: b.String(), Reverse: r.P("DROP COLUMN").Ident(change.C.Name).String(), Comment: fmt.Sprintf("add column %q to table: %q", change.C.Name, modify.T.Name), }) case *schema.RenameColumn: b := s.Build("ALTER TABLE").Ident(modify.T.Name).P("RENAME COLUMN") r := b.Clone() s.append(&migrate.Change{ Source: change, Cmd: b.Ident(change.From.Name).P("TO").Ident(change.To.Name).String(), Reverse: r.Ident(change.To.Name).P("TO").Ident(change.From.Name).String(), Comment: fmt.Sprintf("rename a column from %q to %q", change.From.Name, change.To.Name), }) default: return fmt.Errorf("unexpected change in alter table: %T", change) } } return nil } // tableSeq sets the sequence value of the table if it was provided by // the user on table creation. func (s *state) tableSeq(ctx context.Context, add *schema.AddTable) error { var inc AutoIncrement switch pk := add.T.PrimaryKey; { // Sequence was set on the table. case sqlx.Has(add.T.Attrs, &inc) && inc.Seq > 0: // Sequence was set on table primary-key (a single column PK). case pk != nil && len(pk.Parts) == 1 && pk.Parts[0].C != nil && sqlx.Has(pk.Parts[0].C.Attrs, &inc) && inc.Seq > 0: default: return nil } // SQLite tracks the AUTOINCREMENT in the "sqlite_sequence" table that is created and initialized automatically // whenever the first "PRIMARY KEY AUTOINCREMENT" is created. However, rows in this table are populated after the // first insertion to the associated table (name, seq). Therefore, we check if the sequence table and the row exist, // and in case they are not, we insert a new non-zero sequence to it. rows, err := s.QueryContext(ctx, "SELECT seq FROM sqlite_sequence WHERE name = ?", add.T.Name) if err != nil || !rows.Next() { s.append(&migrate.Change{ Cmd: fmt.Sprintf("INSERT INTO sqlite_sequence (name, seq) VALUES (%q, %d)", add.T.Name, inc.Seq), Source: add, Reverse: fmt.Sprintf("UPDATE sqlite_sequence SET seq = 0 WHERE name = %q", add.T.Name), Comment: fmt.Sprintf("set sequence for %q table", add.T.Name), }) } if rows != nil { return rows.Close() } return nil } func (s *state) append(c *migrate.Change) { s.Changes = append(s.Changes, c) } func alterable(modify *schema.ModifyTable) bool { for _, change := range modify.Changes { switch change := change.(type) { case *schema.RenameColumn, *schema.RenameIndex, *schema.DropIndex, *schema.AddIndex: case *schema.AddColumn: if len(change.C.Indexes) > 0 || len(change.C.ForeignKeys) > 0 || change.C.Default != nil { return false } // Only VIRTUAL generated columns can be added using ALTER TABLE. if x := (schema.GeneratedExpr{}); sqlx.Has(change.C.Attrs, &x) && storedOrVirtual(x.Type) == stored { return false } default: return false } } return true } // checks writes the CHECK constraint to the builder. func check(b *sqlx.Builder, c *schema.Check) { expr := c.Expr // Expressions should be wrapped with parens. if t := strings.TrimSpace(expr); !strings.HasPrefix(t, "(") || !strings.HasSuffix(t, ")") { expr = "(" + t + ")" } if c.Name != "" { b.P("CONSTRAINT").Ident(c.Name) } b.P("CHECK", expr) } func autoincPK(pk *schema.Index) bool { return sqlx.Has(pk.Attrs, &AutoIncrement{}) || len(pk.Parts) == 1 && pk.Parts[0].C != nil && sqlx.Has(pk.Parts[0].C.Attrs, &AutoIncrement{}) } // Build instantiates a new builder and writes the given phrase to it. func (s *state) Build(phrases ...string) *sqlx.Builder { b := &sqlx.Builder{QuoteChar: '`', Schema: s.SchemaQualifier} return b.P(phrases...) } func defaultValue(c *schema.Column) (string, error) { switch x := c.Default.(type) { case *schema.Literal: switch c.Type.Type.(type) { case *schema.BoolType, *schema.DecimalType, *schema.IntegerType, *schema.FloatType: return x.V, nil default: return sqlx.SingleQuote(x.V) } case *schema.RawExpr: return x.X, nil default: return "", fmt.Errorf("unexpected default value type: %T", x) } } func identComma(c []string) string { b := &sqlx.Builder{QuoteChar: '`'} b.MapComma(c, func(i int, b *sqlx.Builder) { if strings.ContainsRune(c[i], '`') { b.WriteString(c[i]) } else { b.Ident(c[i]) } }) return b.String() } atlas-0.7.2/sql/sqlite/migrate_test.go000066400000000000000000000266401431455511600177570ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "context" "database/sql" "strconv" "testing" "ariga.io/atlas/sql/internal/sqltest" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) func TestPlanChanges(t *testing.T) { tests := []struct { changes []schema.Change options []migrate.PlanOption mock func(mock) plan *migrate.Plan }{ { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, }, Attrs: []schema.Attr{ &schema.Check{Expr: "(text <> '')"}, &schema.Check{Name: "positive_id", Expr: "(id <> 0)"}, }, }, }, }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{{Cmd: "CREATE TABLE `posts` (`id` integer NOT NULL, `text` text NULL, CHECK (text <> ''), CONSTRAINT `positive_id` CHECK (id <> 0))", Reverse: "DROP TABLE `posts`"}}, }, }, { changes: []schema.Change{ &schema.AddTable{ T: &schema.Table{ Name: "posts", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}, Attrs: []schema.Attr{&AutoIncrement{Seq: 1024}}}, {Name: "text", Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}, Null: true}}, }, Attrs: []schema.Attr{ &schema.Check{Expr: "(text <> '')"}, &schema.Check{Name: "positive_id", Expr: "(id <> 0)"}, }, }, }, }, mock: func(m mock) { m.ExpectQuery(sqltest.Escape("SELECT seq FROM sqlite_sequence WHERE name = ?")). WithArgs("posts"). WillReturnError(sql.ErrNoRows) }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: "CREATE TABLE `posts` (`id` integer NOT NULL PRIMARY KEY AUTOINCREMENT, `text` text NULL, CHECK (text <> ''), CONSTRAINT `positive_id` CHECK (id <> 0))", Reverse: "DROP TABLE `posts`"}, {Cmd: `INSERT INTO sqlite_sequence (name, seq) VALUES ("posts", 1024)`, Reverse: `UPDATE sqlite_sequence SET seq = 0 WHERE name = "posts"`}, }, }, }, { changes: []schema.Change{ &schema.DropTable{T: &schema.Table{Name: "posts"}}, }, plan: &migrate.Plan{ Transactional: true, Changes: []*migrate.Change{ {Cmd: "PRAGMA foreign_keys = off"}, {Cmd: "DROP TABLE `posts`"}, {Cmd: "PRAGMA foreign_keys = on"}, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddColumn{ C: &schema.Column{Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}}, }, &schema.AddIndex{ I: &schema.Index{ Name: "id_key", Parts: []*schema.IndexPart{ {C: users.Columns[0]}, }, Attrs: []schema.Attr{ &schema.Comment{Text: "comment"}, }, }, }, }, } }(), }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: "ALTER TABLE `users` ADD COLUMN `name` varchar(255) NOT NULL", Reverse: "ALTER TABLE `users` DROP COLUMN `name`"}, {Cmd: "CREATE INDEX `id_key` ON `users` (`id`)", Reverse: "DROP INDEX `id_key`"}, }, }, }, // Add VIRTUAL column. { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). AddColumns(schema.NewIntColumn("id", "bigint")) return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddColumn{ C: schema.NewIntColumn("nid", "bigint"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1"}), }, }, } }(), }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ {Cmd: "ALTER TABLE `users` ADD COLUMN `nid` bigint NOT NULL AS (1)", Reverse: "ALTER TABLE `users` DROP COLUMN `nid`"}, }, }, }, // Add STORED column. { changes: []schema.Change{ func() schema.Change { users := schema.NewTable("users"). AddColumns( schema.NewIntColumn("id", "bigint"), schema.NewIntColumn("nid", "bigint"). SetGeneratedExpr(&schema.GeneratedExpr{Expr: "1", Type: "STORED"}), ) return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.AddColumn{ C: users.Columns[1], }, }, } }(), }, plan: &migrate.Plan{ Transactional: true, Changes: []*migrate.Change{ {Cmd: "PRAGMA foreign_keys = off"}, {Cmd: "CREATE TABLE `new_users` (`id` bigint NOT NULL, `nid` bigint NOT NULL AS (1) STORED)", Reverse: "DROP TABLE `new_users`"}, {Cmd: "INSERT INTO `new_users` (`id`) SELECT `id` FROM `users`"}, {Cmd: "DROP TABLE `users`"}, {Cmd: "ALTER TABLE `new_users` RENAME TO `users`"}, {Cmd: "PRAGMA foreign_keys = on"}, }, }, }, { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ schema.NewIntColumn("id", "bigint"), schema.NewIntColumn("rank", "int").SetDefault(&schema.Literal{V: "1"}), schema.NewStringColumn("nick", "text").SetDefault(&schema.Literal{V: "a8m"}), }, Attrs: []schema.Attr{ &schema.Check{Expr: "(id <> 0)"}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.ModifyColumn{ From: schema.NewNullIntColumn("id", "bigint"), To: users.Columns[1], Change: schema.ChangeNull | schema.ChangeDefault, }, &schema.ModifyColumn{ From: schema.NewNullStringColumn("nick", "text"), To: users.Columns[2], Change: schema.ChangeNull | schema.ChangeDefault, }, &schema.DropColumn{ C: &schema.Column{Name: "name", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}}, }, &schema.AddCheck{ C: &schema.Check{Expr: "(id <> 0)"}, }, }, } }(), }, plan: &migrate.Plan{ Transactional: true, Changes: []*migrate.Change{ {Cmd: "PRAGMA foreign_keys = off"}, {Cmd: "CREATE TABLE `new_users` (`id` bigint NOT NULL, `rank` int NOT NULL DEFAULT 1, `nick` text NOT NULL DEFAULT 'a8m', CHECK (id <> 0))", Reverse: "DROP TABLE `new_users`"}, {Cmd: "INSERT INTO `new_users` (`id`, `rank`, `nick`) SELECT `id`, IFNULL(`rank`, 1) AS `rank`, IFNULL(`nick`, 'a8m') AS `nick` FROM `users`"}, {Cmd: "DROP TABLE `users`"}, {Cmd: "ALTER TABLE `new_users` RENAME TO `users`"}, {Cmd: "PRAGMA foreign_keys = on"}, }, }, }, // Nothing to INSERT. { changes: []schema.Change{ func() schema.Change { users := &schema.Table{ Name: "users", Columns: []*schema.Column{ {Name: "c2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, } return &schema.ModifyTable{ T: users, Changes: []schema.Change{ &schema.DropColumn{ C: &schema.Column{Name: "c1", Type: &schema.ColumnType{Type: &schema.StringType{T: "varchar(255)"}}}, }, &schema.AddColumn{ C: &schema.Column{Name: "c2", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "bigint"}}}, }, }, } }(), }, plan: &migrate.Plan{ Transactional: true, Changes: []*migrate.Change{ {Cmd: "PRAGMA foreign_keys = off"}, {Cmd: "CREATE TABLE `new_users` (`c2` bigint NOT NULL)", Reverse: "DROP TABLE `new_users`"}, /* Nothing to INSERT from `users` as `c1` was dropped. */ {Cmd: "DROP TABLE `users`"}, {Cmd: "ALTER TABLE `new_users` RENAME TO `users`"}, {Cmd: "PRAGMA foreign_keys = on"}, }, }, }, { changes: []schema.Change{ &schema.RenameTable{ From: schema.NewTable("t1"), To: schema.NewTable("t2"), }, }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `t1` RENAME TO `t2`", Reverse: "ALTER TABLE `t2` RENAME TO `t1`", }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1"), Changes: []schema.Change{ &schema.RenameColumn{ From: schema.NewColumn("a"), To: schema.NewColumn("b"), }, }, }, }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: "ALTER TABLE `t1` RENAME COLUMN `a` TO `b`", Reverse: "ALTER TABLE `t1` RENAME COLUMN `b` TO `a`", }, }, }, }, { changes: []schema.Change{ &schema.ModifyTable{ T: schema.NewTable("t1"), Changes: []schema.Change{ &schema.RenameIndex{ From: schema.NewIndex("a").AddColumns(schema.NewColumn("a")), To: schema.NewIndex("b").AddColumns(schema.NewColumn("a")), }, }, }, }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: "CREATE INDEX `b` ON `t1` (`a`)", Reverse: "DROP INDEX `b`", }, { Cmd: "DROP INDEX `a`", Reverse: "CREATE INDEX `a` ON `t1` (`a`)", }, }, }, }, // The default is no qualifier. { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("t").SetSchema(schema.New("main")).AddColumns(schema.NewIntColumn("a", "int"))}, }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: "CREATE TABLE `t` (`a` int NOT NULL)", Reverse: "DROP TABLE `t`", }, }, }, }, // Custom qualifier. { changes: []schema.Change{ &schema.AddTable{T: schema.NewTable("t").SetSchema(schema.New("d")).AddColumns(schema.NewIntColumn("a", "int"))}, }, options: []migrate.PlanOption{ func(o *migrate.PlanOptions) { s := "other" o.SchemaQualifier = &s }, }, plan: &migrate.Plan{ Reversible: true, Transactional: true, Changes: []*migrate.Change{ { Cmd: "CREATE TABLE `other`.`t` (`a` int NOT NULL)", Reverse: "DROP TABLE `other`.`t`", }, }, }, }, } for i, tt := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { db, mk, err := sqlmock.New() require.NoError(t, err) m := mock{mk} m.systemVars("3.36.0") if tt.mock != nil { tt.mock(m) } drv, err := Open(db) require.NoError(t, err) plan, err := drv.PlanChanges(context.Background(), "plan", tt.changes, tt.options...) require.NoError(t, err) require.Equal(t, tt.plan.Reversible, plan.Reversible) require.Equal(t, tt.plan.Transactional, plan.Transactional) for i, c := range plan.Changes { require.Equal(t, tt.plan.Changes[i].Cmd, c.Cmd) require.Equal(t, tt.plan.Changes[i].Reverse, c.Reverse) } }) } } atlas-0.7.2/sql/sqlite/sqlitecheck/000077500000000000000000000000001431455511600172305ustar00rootroot00000000000000atlas-0.7.2/sql/sqlite/sqlitecheck/sqlitecheck.go000066400000000000000000000100411431455511600220520ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlitecheck import ( "context" "fmt" "strings" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlcheck/datadepend" "ariga.io/atlas/sql/sqlcheck/destructive" "ariga.io/atlas/sql/sqlite" ) // codeModNotNullC is an SQLite specific code for reporting modifying nullable columns to non-nullable. var codeModNotNullC = sqlcheck.Code("LT101") func addNotNull(p *datadepend.ColumnPass) (diags []sqlcheck.Diagnostic, err error) { tt, err := sqlite.FormatType(p.Column.Type.Type) if err != nil { return nil, err } return []sqlcheck.Diagnostic{ { Pos: p.Change.Stmt.Pos, Text: fmt.Sprintf( "Adding a non-nullable %q column %q will fail in case table %q is not empty", tt, p.Column.Name, p.Table.Name, ), }, }, nil } func modifyNotNull(p *datadepend.ColumnPass) (diags []sqlcheck.Diagnostic, err error) { if p.Column.Default != nil || datadepend.ColumnFilled(p.File, p.Table, p.Column, p.Change.Stmt.Pos) { return nil, nil } return []sqlcheck.Diagnostic{ { Pos: p.Change.Stmt.Pos, Code: codeModNotNullC, Text: fmt.Sprintf("Modifying nullable column %q to non-nullable without default value might fail in case it contains NULL values", p.Column.Name), }, }, nil } func init() { sqlcheck.Register(sqlite.DriverName, func(r *schemahcl.Resource) ([]sqlcheck.Analyzer, error) { ds, err := destructive.New(r) if err != nil { return nil, err } dd, err := datadepend.New(r, datadepend.Handler{ AddNotNull: addNotNull, ModifyNotNull: modifyNotNull, }) if err != nil { return nil, err } return []sqlcheck.Analyzer{ sqlcheck.AnalyzerFunc(func(ctx context.Context, p *sqlcheck.Pass) error { var changes []*sqlcheck.Change // Detect sequence of changes using temporary table and transform them to one ModifyTable change. // See: https://www.sqlite.org/lang_altertable.html#making_other_kinds_of_table_schema_changes. for i := 0; i < len(p.File.Changes); i++ { if i+3 >= len(p.File.Changes) || !modifyUsingTemp(p.File.Changes[i], p.File.Changes[i+2], p.File.Changes[i+3]) { changes = append(changes, p.File.Changes[i]) continue } prevT, currT := p.File.Changes[i+2].Changes[0].(*schema.DropTable).T, p.File.Changes[i+3].Changes[1].(*schema.AddTable).T diff, err := p.Dev.Driver.TableDiff(prevT, currT) if err != nil { return nil } changes = append(changes, &sqlcheck.Change{ Stmt: &migrate.Stmt{ // Use the position of the first statement. Pos: p.File.Changes[i].Stmt.Pos, // A combined statement. Text: strings.Join([]string{ p.File.Changes[i].Stmt.Text, p.File.Changes[i+2].Stmt.Text, p.File.Changes[i+3].Stmt.Text, }, "\n"), }, Changes: schema.Changes{ &schema.ModifyTable{ T: currT, Changes: diff, }, }, }) i += 3 } p.File.Changes = changes return nil }), ds, dd, }, nil }) } // modifyUsingTemp indicates if the 3 changes represents a table modification using // the pattern mentioned in the link below: "CREATE", "INSERT", "DROP" and "RENAME". func modifyUsingTemp(c1, c2, c3 *sqlcheck.Change) bool { if len(c1.Changes) != 1 || !isAddT(c1.Changes[0], "new_") || len(c2.Changes) != 1 || len(c3.Changes) != 2 { return false } add := c1.Changes[0].(*schema.AddTable) name := strings.TrimPrefix(add.T.Name, "new_") // "DROP T" and "RENAME new_T to T". return isDropT(c2.Changes[0], name) && isDropT(c3.Changes[0], add.T.Name) && isAddT(c3.Changes[1], name) } func isAddT(c schema.Change, prefix string) bool { a, ok := c.(*schema.AddTable) return ok && strings.HasPrefix(a.T.Name, prefix) } func isDropT(c schema.Change, prefix string) bool { d, ok := c.(*schema.DropTable) return ok && strings.HasPrefix(d.T.Name, prefix) } atlas-0.7.2/sql/sqlite/sqlitecheck/sqlitecheck_test.go000066400000000000000000000101241431455511600231130ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlitecheck_test import ( "context" "testing" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlcheck" "ariga.io/atlas/sql/sqlclient" "ariga.io/atlas/sql/sqlite" _ "ariga.io/atlas/sql/sqlite/sqlitecheck" "github.com/stretchr/testify/require" ) func TestDetectModifyTable(t *testing.T) { var ( report *sqlcheck.Report pass = &sqlcheck.Pass{ Dev: &sqlclient.Client{ Driver: func() migrate.Driver { drv := &sqlite.Driver{} drv.Differ = &sqlx.Diff{DiffDriver: &sqlite.Diff{}} return drv }(), }, File: &sqlcheck.File{ File: testFile{name: "1.sql"}, Changes: []*sqlcheck.Change{ // A real drop. { Stmt: &migrate.Stmt{ Text: "DROP TABLE `users`", }, Changes: schema.Changes{ &schema.DropTable{ T: schema.NewTable("users"). SetSchema(schema.New("main")), }, }, }, // Table modification using a temporary table. { Stmt: &migrate.Stmt{ Text: "PRAGMA foreign_keys = off;", }, }, { Stmt: &migrate.Stmt{ Text: "CREATE TABLE `new_posts` (`text` text NOT NULL);", }, Changes: schema.Changes{ &schema.AddTable{ T: schema.NewTable("new_posts"). SetSchema(schema.New("main")). AddColumns(schema.NewStringColumn("text", "text")), }, }, }, { Stmt: &migrate.Stmt{ Text: "INSERT INTO `new_posts` (`text`) SELECT `text` FROM `posts`;", }, }, { Stmt: &migrate.Stmt{ Text: "DROP TABLE `posts`", }, Changes: schema.Changes{ &schema.DropTable{ T: schema.NewTable("posts"). SetSchema(schema.New("main")). AddColumns( schema.NewNullStringColumn("text", "text"), schema.NewTimeColumn("posted_at", "timestamp"), ), }, }, }, { Stmt: &migrate.Stmt{ Text: "ALTER TABLE `new_posts` RENAME TO `posts`;", }, Changes: schema.Changes{ &schema.DropTable{ T: schema.NewTable("new_posts"). SetSchema(schema.New("main")). AddColumns(schema.NewStringColumn("text", "text")), }, &schema.AddTable{ T: schema.NewTable("posts"). SetSchema(schema.New("main")). AddColumns(schema.NewStringColumn("text", "text")), }, }, }, { Stmt: &migrate.Stmt{ Text: "PRAGMA foreign_keys = on;", }, }, // Another real drop. { Stmt: &migrate.Stmt{ Text: "DROP TABLE `pets`", }, Changes: schema.Changes{ &schema.DropTable{ T: schema.NewTable("pets"). SetSchema(schema.New("main")), }, }, }, }, }, Reporter: sqlcheck.ReportWriterFunc(func(r sqlcheck.Report) { report = &r }), } ) azs, err := sqlcheck.AnalyzerFor(sqlite.DriverName, nil) require.NoError(t, err) require.Len(t, azs, 3) require.NoError(t, azs[0].Analyze(context.Background(), pass)) err = azs[1].Analyze(context.Background(), pass) require.EqualError(t, err, "destructive changes detected") require.Equal(t, report.Text, "destructive changes detected") require.Len(t, report.Diagnostics, 3) require.Equal(t, report.Diagnostics[0].Text, `Dropping table "users"`) require.Equal(t, report.Diagnostics[1].Text, `Dropping non-virtual column "posted_at"`) require.Equal(t, report.Diagnostics[2].Text, `Dropping table "pets"`) require.NoError(t, azs[2].Analyze(context.Background(), pass)) require.Equal(t, report.Text, "data dependent changes detected") require.Len(t, report.Diagnostics, 1) require.Equal(t, report.Diagnostics[0].Text, `Modifying nullable column "text" to non-nullable without default value might fail in case it contains NULL values`) } type testFile struct { name string migrate.File } func (t testFile) Name() string { return t.name } atlas-0.7.2/sql/sqlite/sqlspec.go000066400000000000000000000214411431455511600167340ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "fmt" "reflect" "strconv" "strings" "ariga.io/atlas/schemahcl" "ariga.io/atlas/sql/internal/specutil" "ariga.io/atlas/sql/internal/sqlx" "ariga.io/atlas/sql/schema" "ariga.io/atlas/sql/sqlspec" "github.com/hashicorp/hcl/v2/hclparse" ) // evalSpec evaluates an Atlas DDL document using an unmarshaler into v by using the input. func evalSpec(p *hclparse.Parser, v any, input map[string]string) error { var d doc if err := hclState.Eval(p, &d, input); err != nil { return err } switch v := v.(type) { case *schema.Realm: err := specutil.Scan(v, d.Schemas, d.Tables, convertTable) if err != nil { return fmt.Errorf("specutil: failed converting to *schema.Realm: %w", err) } case *schema.Schema: if len(d.Schemas) != 1 { return fmt.Errorf("specutil: expecting document to contain a single schema, got %d", len(d.Schemas)) } var r schema.Realm if err := specutil.Scan(&r, d.Schemas, d.Tables, convertTable); err != nil { return err } r.Schemas[0].Realm = nil *v = *r.Schemas[0] default: return fmt.Errorf("specutil: failed unmarshaling spec. %T is not supported", v) } return nil } // MarshalSpec marshals v into an Atlas DDL document using a schemahcl.Marshaler. func MarshalSpec(v any, marshaler schemahcl.Marshaler) ([]byte, error) { return specutil.Marshal(v, marshaler, schemaSpec) } // convertTable converts a sqlspec.Table to a schema.Table. Table conversion is done without converting // ForeignKeySpecs into ForeignKeys, as the target tables do not necessarily exist in the schema // at this point. Instead, the linking is done by the convertSchema function. func convertTable(spec *sqlspec.Table, parent *schema.Schema) (*schema.Table, error) { return specutil.Table(spec, parent, convertColumn, specutil.PrimaryKey, convertIndex, specutil.Check) } // convertIndex converts a sqlspec.Index into a schema.Index. func convertIndex(spec *sqlspec.Index, t *schema.Table) (*schema.Index, error) { idx, err := specutil.Index(spec, t) if err != nil { return nil, err } if attr, ok := spec.Attr("where"); ok { p, err := attr.String() if err != nil { return nil, err } idx.Attrs = append(idx.Attrs, &IndexPredicate{P: p}) } return idx, nil } // convertColumn converts a sqlspec.Column into a schema.Column. func convertColumn(spec *sqlspec.Column, _ *schema.Table) (*schema.Column, error) { c, err := specutil.Column(spec, convertColumnType) if err != nil { return nil, err } if attr, ok := spec.Attr("auto_increment"); ok { b, err := attr.Bool() if err != nil { return nil, err } if b { c.AddAttrs(&AutoIncrement{}) } } if err := specutil.ConvertGenExpr(spec.Remain(), c, storedOrVirtual); err != nil { return nil, err } return c, nil } // convertColumnType converts a sqlspec.Column into a concrete SQLite schema.Type. func convertColumnType(spec *sqlspec.Column) (schema.Type, error) { return TypeRegistry.Type(spec.Type, spec.Extra.Attrs) } // schemaSpec converts from a concrete SQLite schema to Atlas specification. func schemaSpec(schem *schema.Schema) (*sqlspec.Schema, []*sqlspec.Table, error) { return specutil.FromSchema(schem, tableSpec) } // tableSpec converts from a concrete SQLite sqlspec.Table to a schema.Table. func tableSpec(tab *schema.Table) (*sqlspec.Table, error) { return specutil.FromTable( tab, columnSpec, specutil.FromPrimaryKey, indexSpec, specutil.FromForeignKey, specutil.FromCheck, ) } func indexSpec(idx *schema.Index) (*sqlspec.Index, error) { spec, err := specutil.FromIndex(idx) if err != nil { return nil, err } if i := (IndexPredicate{}); sqlx.Has(idx.Attrs, &i) && i.P != "" { spec.Extra.Attrs = append(spec.Extra.Attrs, specutil.VarAttr("where", strconv.Quote(i.P))) } return spec, nil } // columnSpec converts from a concrete SQLite schema.Column into a sqlspec.Column. func columnSpec(c *schema.Column, _ *schema.Table) (*sqlspec.Column, error) { s, err := specutil.FromColumn(c, columnTypeSpec) if err != nil { return nil, err } if sqlx.Has(c.Attrs, &AutoIncrement{}) { s.Extra.Attrs = append(s.Extra.Attrs, specutil.BoolAttr("auto_increment", true)) } if x := (schema.GeneratedExpr{}); sqlx.Has(c.Attrs, &x) { s.Extra.Children = append(s.Extra.Children, specutil.FromGenExpr(x, storedOrVirtual)) } return s, nil } // columnTypeSpec converts from a concrete MySQL schema.Type into sqlspec.Column Type. func columnTypeSpec(t schema.Type) (*sqlspec.Column, error) { st, err := TypeRegistry.Convert(t) if err != nil { return nil, err } return &sqlspec.Column{Type: st}, nil } // TypeRegistry contains the supported TypeSpecs for the sqlite driver. var TypeRegistry = schemahcl.NewRegistry( schemahcl.WithFormatter(FormatType), schemahcl.WithParser(ParseType), schemahcl.WithSpecs( schemahcl.NewTypeSpec(TypeReal, schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec(TypeBlob, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeText, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec(TypeInteger, schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("int", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("tinyint", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("smallint", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("mediumint", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("bigint", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.AliasTypeSpec("unsigned_big_int", "unsigned big int", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("int2", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("int8", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("double", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.AliasTypeSpec("double_precision", "double precision", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("float", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("character", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("varchar", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.AliasTypeSpec("varying_character", "varying character", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("nchar", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.AliasTypeSpec("native_character", "native character", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("nvarchar", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("clob", schemahcl.WithAttributes(schemahcl.SizeTypeAttr(false))), schemahcl.NewTypeSpec("numeric", schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec("decimal", schemahcl.WithAttributes(&schemahcl.TypeAttr{Name: "precision", Kind: reflect.Int, Required: false}, &schemahcl.TypeAttr{Name: "scale", Kind: reflect.Int, Required: false})), schemahcl.NewTypeSpec("bool"), schemahcl.NewTypeSpec("boolean"), schemahcl.NewTypeSpec("date"), schemahcl.NewTypeSpec("datetime"), schemahcl.NewTypeSpec("json"), schemahcl.NewTypeSpec("uuid"), ), ) var ( hclState = schemahcl.New( schemahcl.WithTypes(TypeRegistry.Specs()), schemahcl.WithScopedEnums("table.column.as.type", stored, virtual), schemahcl.WithScopedEnums("table.foreign_key.on_update", specutil.ReferenceVars...), schemahcl.WithScopedEnums("table.foreign_key.on_delete", specutil.ReferenceVars...), ) // MarshalHCL marshals v into an Atlas HCL DDL document. MarshalHCL = schemahcl.MarshalerFunc(func(v any) ([]byte, error) { return MarshalSpec(v, hclState) }) // EvalHCL implements the schemahcl.Evaluator interface. EvalHCL = schemahcl.EvalFunc(evalSpec) // EvalHCLBytes is a helper that evaluates an HCL document from a byte slice instead // of from an hclparse.Parser instance. EvalHCLBytes = specutil.HCLBytesFunc(EvalHCL) ) // storedOrVirtual returns a STORED or VIRTUAL // generated type option based on the given string. func storedOrVirtual(s string) string { if s = strings.ToUpper(s); s == "" { return virtual } return s } type doc struct { Tables []*sqlspec.Table `spec:"table"` Schemas []*sqlspec.Schema `spec:"schema"` } atlas-0.7.2/sql/sqlite/sqlspec_test.go000066400000000000000000000204301431455511600177700ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlite import ( "fmt" "testing" "ariga.io/atlas/sql/internal/spectest" "ariga.io/atlas/sql/schema" "github.com/stretchr/testify/require" ) func TestSQLSpec(t *testing.T) { f := ` schema "schema" { } table "table" { schema = schema.schema column "id" { type = integer auto_increment = true } column "age" { type = integer } column "price" { type = integer } column "account_name" { type = varchar(32) } primary_key { columns = [table.table.column.id] } index "index" { unique = true columns = [ table.table.column.id, table.table.column.age, ] where = "age <> 0" } foreign_key "accounts" { columns = [ table.table.column.account_name, ] ref_columns = [ table.accounts.column.name, ] on_delete = SET_NULL } check "positive price" { expr = "price > 0" } } table "accounts" { schema = schema.schema column "name" { type = varchar(32) } primary_key { columns = [table.accounts.column.name] } } ` exp := &schema.Schema{ Name: "schema", } exp.Tables = []*schema.Table{ { Name: "table", Schema: exp, Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "integer", }, }, Attrs: []schema.Attr{ &AutoIncrement{}, }, }, { Name: "age", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "integer", }, }, }, { Name: "price", Type: &schema.ColumnType{ Type: &schema.IntegerType{ T: "integer", }, }, }, { Name: "account_name", Type: &schema.ColumnType{ Type: &schema.StringType{ T: "varchar", Size: 32, }, }, }, }, Attrs: []schema.Attr{ &schema.Check{ Name: "positive price", Expr: "price > 0", }, }, }, { Name: "accounts", Schema: exp, Columns: []*schema.Column{ { Name: "name", Type: &schema.ColumnType{ Type: &schema.StringType{ T: "varchar", Size: 32, }, }, }, }, }, } exp.Tables[0].PrimaryKey = &schema.Index{ Table: exp.Tables[0], Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[0].Columns[0]}, }, } exp.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: exp.Tables[0], Unique: true, Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[0].Columns[0]}, {SeqNo: 1, C: exp.Tables[0].Columns[1]}, }, Attrs: []schema.Attr{ &IndexPredicate{P: "age <> 0"}, }, }, } exp.Tables[0].ForeignKeys = []*schema.ForeignKey{ { Symbol: "accounts", Table: exp.Tables[0], Columns: []*schema.Column{exp.Tables[0].Columns[3]}, RefTable: exp.Tables[1], RefColumns: []*schema.Column{exp.Tables[1].Columns[0]}, OnDelete: schema.SetNull, }, } exp.Tables[1].PrimaryKey = &schema.Index{ Table: exp.Tables[1], Parts: []*schema.IndexPart{ {SeqNo: 0, C: exp.Tables[1].Columns[0]}, }, } var s schema.Schema err := EvalHCLBytes([]byte(f), &s, nil) require.NoError(t, err) require.EqualValues(t, exp, &s) } func TestMarshalSpec_AutoIncrement(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{ &AutoIncrement{}, }, }, }, }, }, } s.Tables[0].Schema = s buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "id" { null = false type = int auto_increment = true } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestMarshalSpec_IndexPredicate(t *testing.T) { s := &schema.Schema{ Name: "test", Tables: []*schema.Table{ { Name: "users", Columns: []*schema.Column{ { Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Attrs: []schema.Attr{ &AutoIncrement{}, }, }, }, }, }, } s.Tables[0].Schema = s s.Tables[0].Schema = s s.Tables[0].Indexes = []*schema.Index{ { Name: "index", Table: s.Tables[0], Unique: true, Parts: []*schema.IndexPart{ {SeqNo: 0, C: s.Tables[0].Columns[0]}, }, Attrs: []schema.Attr{ &IndexPredicate{P: "id <> 0"}, }, }, } buf, err := MarshalSpec(s, hclState) require.NoError(t, err) const expected = `table "users" { schema = schema.test column "id" { null = false type = int auto_increment = true } index "index" { unique = true columns = [column.id] where = "id <> 0" } } schema "test" { } ` require.EqualValues(t, expected, string(buf)) } func TestTypes(t *testing.T) { for _, tt := range []struct { typeExpr string expected schema.Type }{ { typeExpr: "integer(10)", expected: &schema.IntegerType{T: "integer"}, }, { typeExpr: "int(10)", expected: &schema.IntegerType{T: "int"}, }, { typeExpr: `sql("custom")`, expected: &schema.UnsupportedType{T: "custom"}, }, { typeExpr: "tinyint(10)", expected: &schema.IntegerType{T: "tinyint"}, }, { typeExpr: "smallint(10)", expected: &schema.IntegerType{T: "smallint"}, }, { typeExpr: "mediumint(10)", expected: &schema.IntegerType{T: "mediumint"}, }, { typeExpr: "bigint(10)", expected: &schema.IntegerType{T: "bigint"}, }, { typeExpr: "unsigned_big_int(10)", expected: &schema.IntegerType{T: "unsigned big int"}, }, { typeExpr: "int2(10)", expected: &schema.IntegerType{T: "int2"}, }, { typeExpr: "int8(10)", expected: &schema.IntegerType{T: "int8"}, }, { typeExpr: "real", expected: &schema.FloatType{T: "real"}, }, { typeExpr: "double", expected: &schema.FloatType{T: "double"}, }, { typeExpr: "double_precision", expected: &schema.FloatType{T: "double precision"}, }, { typeExpr: "float(10)", expected: &schema.FloatType{T: "float"}, }, { typeExpr: "text(10)", expected: &schema.StringType{T: "text", Size: 10}, }, { typeExpr: "character(10)", expected: &schema.StringType{T: "character", Size: 10}, }, { typeExpr: "varchar(10)", expected: &schema.StringType{T: "varchar", Size: 10}, }, { typeExpr: "varying_character", expected: &schema.StringType{T: "varying character"}, }, { typeExpr: "nchar(10)", expected: &schema.StringType{T: "nchar", Size: 10}, }, { typeExpr: "native_character", expected: &schema.StringType{T: "native character"}, }, { typeExpr: "nvarchar(10)", expected: &schema.StringType{T: "nvarchar", Size: 10}, }, { typeExpr: "clob(10)", expected: &schema.StringType{T: "clob", Size: 10}, }, { typeExpr: "blob(10)", expected: &schema.BinaryType{T: "blob"}, }, { typeExpr: "numeric(10)", expected: &schema.DecimalType{T: "numeric", Precision: 10}, }, { typeExpr: "decimal(10,5)", expected: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 5}, }, { typeExpr: "boolean", expected: &schema.BoolType{T: "boolean"}, }, { typeExpr: "date", expected: &schema.TimeType{T: "date"}, }, { typeExpr: "datetime", expected: &schema.TimeType{T: "datetime"}, }, { typeExpr: "json", expected: &schema.JSONType{T: "json"}, }, { typeExpr: "uuid", expected: &UUIDType{T: "uuid"}, }, } { t.Run(tt.typeExpr, func(t *testing.T) { var test schema.Schema doc := fmt.Sprintf(`table "test" { schema = schema.test column "test" { null = false type = %s } } schema "test" { } `, tt.typeExpr) err := EvalHCLBytes([]byte(doc), &test, nil) require.NoError(t, err) colspec := test.Tables[0].Columns[0] require.EqualValues(t, tt.expected, colspec.Type.Type) spec, err := MarshalSpec(&test, hclState) require.NoError(t, err) var after schema.Schema err = EvalHCLBytes(spec, &after, nil) require.NoError(t, err) require.EqualValues(t, tt.expected, after.Tables[0].Columns[0].Type.Type) }) } } func TestInputVars(t *testing.T) { spectest.TestInputVars(t, EvalHCL) } atlas-0.7.2/sql/sqlspec/000077500000000000000000000000001431455511600151025ustar00rootroot00000000000000atlas-0.7.2/sql/sqlspec/sqlspec.go000066400000000000000000000047641431455511600171160ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqlspec import ( "ariga.io/atlas/schemahcl" ) type ( // Schema holds a specification for a Schema. Schema struct { Name string `spec:"name,name"` schemahcl.DefaultExtension } // Table holds a specification for an SQL table. Table struct { Name string `spec:",name"` Qualifier string `spec:",qualifier"` Schema *schemahcl.Ref `spec:"schema"` Columns []*Column `spec:"column"` PrimaryKey *PrimaryKey `spec:"primary_key"` ForeignKeys []*ForeignKey `spec:"foreign_key"` Indexes []*Index `spec:"index"` Checks []*Check `spec:"check"` schemahcl.DefaultExtension } // Column holds a specification for a column in an SQL table. Column struct { Name string `spec:",name"` Null bool `spec:"null"` Type *schemahcl.Type `spec:"type"` Default schemahcl.Value `spec:"default"` schemahcl.DefaultExtension } // PrimaryKey holds a specification for the primary key of a table. PrimaryKey struct { Columns []*schemahcl.Ref `spec:"columns"` schemahcl.DefaultExtension } // Index holds a specification for the index key of a table. Index struct { Name string `spec:",name"` Unique bool `spec:"unique,omitempty"` Parts []*IndexPart `spec:"on"` Columns []*schemahcl.Ref `spec:"columns"` schemahcl.DefaultExtension } // IndexPart holds a specification for the index key part. IndexPart struct { Desc bool `spec:"desc,omitempty"` Column *schemahcl.Ref `spec:"column"` Expr string `spec:"expr,omitempty"` schemahcl.DefaultExtension } // Check holds a specification for a check constraint on a table. Check struct { Name string `spec:",name"` Expr string `spec:"expr"` schemahcl.DefaultExtension } // ForeignKey holds a specification for the Foreign key of a table. ForeignKey struct { Symbol string `spec:",name"` Columns []*schemahcl.Ref `spec:"columns"` RefColumns []*schemahcl.Ref `spec:"ref_columns"` OnUpdate *schemahcl.Ref `spec:"on_update"` OnDelete *schemahcl.Ref `spec:"on_delete"` schemahcl.DefaultExtension } // Type represents a database agnostic column type. Type string ) func init() { schemahcl.Register("table", &Table{}) schemahcl.Register("schema", &Schema{}) } atlas-0.7.2/sql/sqltool/000077500000000000000000000000001431455511600151255ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/doc.go000066400000000000000000000005031431455511600162170ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. // Package sqltool contains logic to integrate existing tools like Flyway or Liquibase with the Atlas CLI. package sqltool atlas-0.7.2/sql/sqltool/hidden.go000066400000000000000000000005341431455511600167110ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. //go:build !windows package sqltool import "path/filepath" func hidden(path string) (bool, error) { return filepath.Base(path)[0] == '.', nil } atlas-0.7.2/sql/sqltool/hidden_windows.go000066400000000000000000000011171431455511600204610ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqltool import ( "path/filepath" "syscall" ) func hidden(path string) (bool, error) { abs, err := filepath.Abs(path) if err != nil { return false, err } p, err := syscall.UTF16PtrFromString(abs) if err != nil { return false, err } attr, err := syscall.GetFileAttributes(p) if err != nil { return false, err } return attr&syscall.FILE_ATTRIBUTE_HIDDEN != 0, nil } atlas-0.7.2/sql/sqltool/testdata/000077500000000000000000000000001431455511600167365ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/testdata/dbmate/000077500000000000000000000000001431455511600201725ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/testdata/dbmate/1_initial.sql000066400000000000000000000004531431455511600225660ustar00rootroot00000000000000-- migrate:up CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); /* Multiline comment ... */ ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; INSERT INTO post (title) VALUES ( 'This is my multiline value'); -- migrate:down DROP TABLE post;atlas-0.7.2/sql/sqltool/testdata/dbmate/2_second_migration.sql000066400000000000000000000000621431455511600244560ustar00rootroot00000000000000 -- migrate:up CREATE TABLE tbl_2 (col INT);atlas-0.7.2/sql/sqltool/testdata/flyway/000077500000000000000000000000001431455511600202515ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/testdata/flyway/B2__baseline.sql000066400000000000000000000003331431455511600232350ustar00rootroot00000000000000CREATE TABLE post ( id int NOT NULL, title text, body text, created_at TIMESTAMP NOT NULL PRIMARY KEY (id) ); INSERT INTO post (title, created_at) VALUES ( 'This is my multiline value', NOW());atlas-0.7.2/sql/sqltool/testdata/flyway/R__views.sql000066400000000000000000000000561431455511600225500ustar00rootroot00000000000000CREATE VIEW `my_view` AS SELECT * FROM `post`;atlas-0.7.2/sql/sqltool/testdata/flyway/U1__initial.sql000066400000000000000000000000171431455511600231250ustar00rootroot00000000000000DROP TABLE tbl;atlas-0.7.2/sql/sqltool/testdata/flyway/V1__initial.sql000066400000000000000000000003721431455511600231320ustar00rootroot00000000000000-- comment CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; INSERT INTO post (title, created_at) VALUES ( 'This is my multiline value', NOW()); atlas-0.7.2/sql/sqltool/testdata/flyway/V2__second_migration.sql000066400000000000000000000000621431455511600250220ustar00rootroot00000000000000 -- migrate:up CREATE TABLE tbl_2 (col INT);atlas-0.7.2/sql/sqltool/testdata/flyway/V3__third_migration.sql000066400000000000000000000000551431455511600246640ustar00rootroot00000000000000ALTER TABLE tbl_2 ADD col_1 INTEGER NOT NULL;atlas-0.7.2/sql/sqltool/testdata/golang-migrate/000077500000000000000000000000001431455511600216335ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/testdata/golang-migrate/1_initial.down.sql000066400000000000000000000000171431455511600251710ustar00rootroot00000000000000DROP TABLE tbl;atlas-0.7.2/sql/sqltool/testdata/golang-migrate/1_initial.up.sql000066400000000000000000000000411431455511600246430ustar00rootroot00000000000000CREATE TABLE tbl ( col INT );atlas-0.7.2/sql/sqltool/testdata/golang-migrate/2_second_migration.down.sql000066400000000000000000000000211431455511600270600ustar00rootroot00000000000000DROP TABLE tbl_2;atlas-0.7.2/sql/sqltool/testdata/golang-migrate/2_second_migration.up.sql000066400000000000000000000000351431455511600265420ustar00rootroot00000000000000CREATE TABLE tbl_2 (col INT);atlas-0.7.2/sql/sqltool/testdata/goose/000077500000000000000000000000001431455511600200525ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/testdata/goose/1_initial.sql000066400000000000000000000004111431455511600224400ustar00rootroot00000000000000-- +goose Up CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; INSERT INTO post (title) VALUES ( 'This is my multiline value'); -- +goose Down DROP TABLE post;atlas-0.7.2/sql/sqltool/testdata/goose/2_second_migration.sql000066400000000000000000000014371431455511600243450ustar00rootroot00000000000000 -- +goose Up ALTER TABLE post ADD updated_at TIMESTAMP NOT NULL; -- +goose StatementBegin -- Comment for the function declaration. CREATE OR REPLACE FUNCTION histories_partition_creation( DATE, DATE ) returns void AS $$ DECLARE create_query text; BEGIN FOR create_query IN SELECT 'CREATE TABLE IF NOT EXISTS histories_' || TO_CHAR(d, 'YYYY_MM') || ' ( CHECK( created_at >= timestamp ''' || TO_CHAR(d, 'YYYY-MM-DD 00:00:00') || ''' AND created_at < timestamp ''' || TO_CHAR(d + INTERVAL '1 month', 'YYYY-MM-DD 00:00:00') || ''' ) ) inherits ( histories );' FROM generate_series($1, $2, '1 month') AS d LOOP EXECUTE create_query; END LOOP; -- LOOP END END; -- FUNCTION END $$ language plpgsql; -- +goose StatementEndatlas-0.7.2/sql/sqltool/testdata/liquibase/000077500000000000000000000000001431455511600207145ustar00rootroot00000000000000atlas-0.7.2/sql/sqltool/testdata/liquibase/1_initial.sql000066400000000000000000000006041431455511600233060ustar00rootroot00000000000000--liquibase formatted sql --changeset atlas:1-1 CREATE TABLE post ( id int NOT NULL, title text, body text, PRIMARY KEY (id) ); --rollback: DROP TABLE post; --changeset atlas:1-2 ALTER TABLE post ADD created_at TIMESTAMP NOT NULL; --rollback: ALTER TABLE post DROP created_at; --changeset atlas:1-3 INSERT INTO post (title) VALUES ( 'This is my multiline value'); atlas-0.7.2/sql/sqltool/testdata/liquibase/2_second_migration.sql000066400000000000000000000001541431455511600252020ustar00rootroot00000000000000--liquibase formatted sql --changeset atlas:2-1 CREATE TABLE tbl_2 (col INT); --rollback DROP TABLE tbl_2; atlas-0.7.2/sql/sqltool/tool.go000066400000000000000000000376601431455511600164450ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqltool import ( "bufio" "bytes" "fmt" "io/fs" "path/filepath" "regexp" "sort" "strings" "text/template" "time" "unicode" "ariga.io/atlas/sql/migrate" ) var ( // GolangMigrateFormatter returns migrate.Formatter compatible with golang-migrate/migrate. GolangMigrateFormatter = templateFormatter( "{{ now }}{{ with .Name }}_{{ . }}{{ end }}.up.sql", `{{ range .Changes }}{{ with .Comment }}-- {{ println . }}{{ end }}{{ printf "%s;\n" .Cmd }}{{ end }}`, "{{ now }}{{ with .Name }}_{{ . }}{{ end }}.down.sql", `{{ range rev .Changes }}{{ if .Reverse }}{{ with .Comment }}-- reverse: {{ println . }}{{ end }}{{ printf "%s;\n" .Reverse }}{{ end }}{{ end }}`, ) // GooseFormatter returns migrate.Formatter compatible with pressly/goose. GooseFormatter = templateFormatter( "{{ now }}{{ with .Name }}_{{ . }}{{ end }}.sql", `-- +goose Up {{ range .Changes }}{{ with .Comment }}-- {{ println . }}{{ end }}{{ printf "%s;\n" .Cmd }}{{ end }} -- +goose Down {{ range rev .Changes }}{{ if .Reverse }}{{ with .Comment }}-- reverse: {{ println . }}{{ end }}{{ printf "%s;\n" .Reverse }}{{ end }}{{ end }}`, ) // FlywayFormatter returns migrate.Formatter compatible with Flyway. FlywayFormatter = templateFormatter( "V{{ now }}{{ with .Name }}__{{ . }}{{ end }}.sql", `{{ range .Changes }}{{ with .Comment }}-- {{ println . }}{{ end }}{{ printf "%s;\n" .Cmd }}{{ end }}`, "U{{ now }}{{ with .Name }}__{{ . }}{{ end }}.sql", `{{ range rev .Changes }}{{ if .Reverse }}{{ with .Comment }}-- reverse: {{ println . }}{{ end }}{{ printf "%s;\n" .Reverse }}{{ end }}{{ end }}`, ) // LiquibaseFormatter returns migrate.Formatter compatible with Liquibase. LiquibaseFormatter = templateFormatter( "{{ now }}{{ with .Name }}_{{ . }}{{ end }}.sql", `{{- $now := now -}} --liquibase formatted sql {{- range $index, $change := .Changes }} --changeset atlas:{{ $now }}-{{ inc $index }} {{ with $change.Comment }}--comment: {{ . }}{{ end }} {{ $change.Cmd }}; {{ with $change.Reverse }}--rollback: {{ . }};{{ end }} {{ end }}`, ) // DBMateFormatter returns migrate.Formatter compatible with amacneil/dbmate. DBMateFormatter = templateFormatter( "{{ now }}{{ with .Name }}_{{ . }}{{ end }}.sql", `-- migrate:up {{ range .Changes }}{{ with .Comment }}-- {{ println . }}{{ end }}{{ printf "%s;\n" .Cmd }}{{ end }} -- migrate:down {{ range rev .Changes }}{{ if .Reverse }}{{ with .Comment }}-- reverse: {{ println . }}{{ end }}{{ printf "%s;\n" .Reverse }}{{ end }}{{ end }}`, ) // DbmateFormatter is the same as DBMateFormatter. // Deprecated: Use DBMateFormatter instead. DbmateFormatter = DBMateFormatter ) type ( // GolangMigrateDir wraps migrate.LocalDir and provides a migrate.Scanner implementation able to understand files // generated by the GolangMigrateFormatter for migration directory replaying. GolangMigrateDir struct{ *migrate.LocalDir } // GolangMigrateFile wraps migrate.LocalFile with custom description function. GolangMigrateFile struct{ *migrate.LocalFile } ) // NewGolangMigrateDir returns a new GolangMigrateDir. func NewGolangMigrateDir(path string) (*GolangMigrateDir, error) { dir, err := migrate.NewLocalDir(path) if err != nil { return nil, err } return &GolangMigrateDir{dir}, nil } // Files implements Scanner.Files. It looks for all files with up.sql suffix and orders them by filename. func (d *GolangMigrateDir) Files() ([]migrate.File, error) { names, err := fs.Glob(d, "*.up.sql") if err != nil { return nil, err } // Sort files lexicographically. sort.Slice(names, func(i, j int) bool { return names[i] < names[j] }) ret := make([]migrate.File, len(names)) for i, n := range names { b, err := fs.ReadFile(d, n) if err != nil { return nil, fmt.Errorf("sql/migrate: read file %q: %w", n, err) } ret[i] = &GolangMigrateFile{LocalFile: migrate.NewLocalFile(n, b)} } return ret, nil } // Desc implements File.Desc. func (f *GolangMigrateFile) Desc() string { return strings.TrimSuffix(f.LocalFile.Desc(), ".up") } type ( // GooseDir wraps migrate.LocalDir and provides a migrate.Scanner implementation able to understand files // generated by the GooseFormatter for migration directory replaying. GooseDir struct{ *migrate.LocalDir } // GooseFile wraps migrate.LocalFile with custom statements function. GooseFile struct{ *migrate.LocalFile } ) // NewGooseDir returns a new GooseDir. func NewGooseDir(path string) (*GooseDir, error) { dir, err := migrate.NewLocalDir(path) if err != nil { return nil, err } return &GooseDir{dir}, nil } // Files looks for all files with .sql suffix and orders them by filename. func (d *GooseDir) Files() ([]migrate.File, error) { files, err := d.LocalDir.Files() if err != nil { return nil, err } for i, f := range files { files[i] = &GooseFile{f.(*migrate.LocalFile)} } return files, nil } // StmtDecls understands the migration format used by pressly/goose sql migration files. func (f *GooseFile) StmtDecls() ([]*migrate.Stmt, error) { // Atlas custom delimiter is per file, goose has pragma do mark start and end of a delimiter. // In order to use the Atlas lexer, we define a custom delimiter for the source SQL and edit it to use the // custom delimiter. const delim = "-- ATLAS_DELIM_END" var ( state, lineCount int lines = []string{"-- atlas:delimiter " + delim, ""} sc = bufio.NewScanner(bytes.NewReader(f.Bytes())) ) Scan: for sc.Scan() { lineCount++ line := sc.Text() // Handle goose custom delimiters. if strings.HasPrefix(line, goosePragma) { switch strings.TrimSpace(strings.TrimPrefix(line, goosePragma)) { case "Up": switch state { case none: // found the "up" part of the file state = up default: return nil, unexpectedPragmaErr(f, lineCount, "Up") } case "Down": switch state { case up: // found the "down" part break Scan default: return nil, unexpectedPragmaErr(f, lineCount, "Down") } case "StatementBegin": switch state { case up: state = begin // begin of a statement default: return nil, unexpectedPragmaErr(f, lineCount, "StatementBegin") } case "StatementEnd": switch state { case begin: state = end // end of a statement default: return nil, unexpectedPragmaErr(f, lineCount, "StatementEnd") } } } // Write the line of the statement. if !reGoosePragma.MatchString(line) && state != end { // end of statement if line ends with semicolon line = strings.TrimRightFunc(line, unicode.IsSpace) lines = append(lines, line) if state == up && strings.HasSuffix(line, ";") && !strings.HasPrefix(line, "--") { lines = append(lines, delim) } } if state == end { state = up lines = append(lines, delim) } } return migrate.Stmts(strings.Join(lines, "\n")) } // Stmts understands the migration format used by pressly/goose sql migration files. func (f *GooseFile) Stmts() ([]string, error) { s, err := f.StmtDecls() if err != nil { return nil, err } stmts := make([]string, len(s)) for i := range s { stmts[i] = s[i].Text } return stmts, nil } type ( // DBMateDir wraps migrate.LocalDir and provides a migrate.Scanner implementation able to understand files // generated by the DBMateFormatter for migration directory replaying. DBMateDir struct{ *migrate.LocalDir } // DBMateFile wraps migrate.LocalFile with custom statements function. DBMateFile struct{ *migrate.LocalFile } ) // NewDBMateDir returns a new DBMateDir. func NewDBMateDir(path string) (*DBMateDir, error) { dir, err := migrate.NewLocalDir(path) if err != nil { return nil, err } return &DBMateDir{dir}, nil } // Files looks for all files with up.sql suffix and orders them by filename. func (d *DBMateDir) Files() ([]migrate.File, error) { files, err := d.LocalDir.Files() if err != nil { return nil, err } for i, f := range files { files[i] = &DBMateFile{f.(*migrate.LocalFile)} } return files, nil } // StmtDecls understands the migration format used by amacneil/dbmate sql migration files. func (f *DBMateFile) StmtDecls() ([]*migrate.Stmt, error) { var ( state, lineCount int lines []string sc = bufio.NewScanner(bytes.NewReader(f.Bytes())) ) Scan: for sc.Scan() { lineCount++ line := sc.Text() // Handle pragmas. if strings.HasPrefix(line, dbmatePragma) { switch strings.TrimSpace(strings.TrimPrefix(line, dbmatePragma)) { case "up": state = up case "down": break Scan } } // Write the line of the statement. if !reDBMatePragma.MatchString(line) && state == up { lines = append(lines, line) } } return migrate.Stmts(strings.Join(lines, "\n")) } // Stmts understands the migration format used by amacneil/dbmate sql migration files. func (f *DBMateFile) Stmts() ([]string, error) { s, err := f.StmtDecls() if err != nil { return nil, err } stmts := make([]string, len(s)) for i := range s { stmts[i] = s[i].Text } return stmts, nil } type ( // FlywayDir wraps migrate.LocalDir and provides a migrate.Scanner implementation able to understand files // generated by the FlywayFormatter for migration directory replaying. FlywayDir struct{ *migrate.LocalDir } // FlywayFile wraps migrate.LocalFile with custom statements function. FlywayFile struct{ *migrate.LocalFile } ) // NewFlywayDir returns a new FlywayDir. func NewFlywayDir(path string) (*FlywayDir, error) { dir, err := migrate.NewLocalDir(path) if err != nil { return nil, err } return &FlywayDir{dir}, nil } // Files implements Scanner.Files. It looks for all files with .sql suffix. The given directory is recursively scanned // for non-hidden subdirectories. All found files will be ordered by migration type (Baseline, Versioned, Repeatable) // and filename. func (d *FlywayDir) Files() ([]migrate.File, error) { var ff flywayFiles if err := fs.WalkDir(d, "", func(path string, e fs.DirEntry, err error) error { if err != nil { return err } if path != "" && e.IsDir() { h, err := hidden(filepath.Join(d.Path(), path)) if err != nil { return err } if h { return fs.SkipDir } return nil } var ( pfx = e.Name()[0] base = filepath.Base(e.Name()) ext = filepath.Ext(e.Name()) ) if ext != ".sql" || len(base) < 4 || (pfx != 'V' && pfx != 'B' && pfx != 'R') { return nil } return ff.add(path) }); err != nil { return nil, err } var ( names = ff.names() ret = make([]migrate.File, len(names)) ) for i, n := range names { b, err := fs.ReadFile(d, n) if err != nil { return nil, fmt.Errorf("sql/migrate: read file %q: %w", n, err) } ret[i] = &FlywayFile{migrate.NewLocalFile(n, b)} } return ret, nil } // Desc implements File.Desc. func (f FlywayFile) Desc() string { return flywayDesc(f.Name()) } // Version implements File.Version. func (f FlywayFile) Version() string { return flywayVersion(f.Name()) } // SetRepeatableVersion iterates over the migration files and assigns repeatable migrations a version number since // Atlas does not have the concept of repeatable migrations. Each repeatable migration file gets assigned the version // of the preceding migration file (or 0) followed by an 'R'. func SetRepeatableVersion(ff []migrate.File) { // First find the index of the first repeatable migration file (if any). var ( v string // last versioned migration version idx = func() int { for i, f := range ff { if f.Version() == "" { return i } } return -1 }() ) switch idx { case -1: // No repeatable migration does exist. return case 0: // There is no preceding migration. Use Version "0". v = "0" default: v = ff[idx-1].Version() } if v != "" { // Every migration file following the first repeatable found are repeatable as well. for i, f := range ff[idx:] { ff[idx+i] = &FlywayFile{migrate.NewLocalFile( fmt.Sprintf("V%sR__%s", v, f.Desc()), f.Bytes(), )} } } } // LiquibaseDir wraps migrate.LocalDir and provides a migrate.Scanner implementation able to understand files // generated by the LiquibaseFormatter for migration directory replaying. type LiquibaseDir struct{ *migrate.LocalDir } // NewLiquibaseDir returns a new LiquibaseDir. func NewLiquibaseDir(path string) (*LiquibaseDir, error) { d, err := migrate.NewLocalDir(path) if err != nil { return nil, err } return &LiquibaseDir{d}, nil } const ( none int = iota up begin end goosePragma = "-- +goose" dbmatePragma = "-- migrate:" ) var ( reGoosePragma = regexp.MustCompile(regexp.QuoteMeta(goosePragma) + " Up|Down|StatementBegin|StatementEnd") reDBMatePragma = regexp.MustCompile(dbmatePragma + "up|down") ) // flywayFiles retrieves flyway migration files by calls to add(). It will only keep the latest baseline and ignore // all versioned files that are included in that baseline. type flywayFiles struct { baseline string versioned []string repeatable []string } // add the given path to the migration files according to its type. The input directory is assumed to be valid // according to the Flyway documentation (no duplicate versions, etc.). func (ff *flywayFiles) add(path string) error { switch p := filepath.Base(path)[0]; p { case 'B': if ff.baseline != "" && flywayVersion(path) < flywayVersion(ff.baseline) { return nil } ff.baseline = path // In case we set a new baseline, remove all versioned files with a version smaller than the new baseline. var ( bv = flywayVersion(ff.baseline) vs []string ) for _, v := range ff.versioned { if v > bv { vs = append(vs, v) } } ff.versioned = vs return nil case 'V': v := flywayVersion(path) if ff.baseline == "" || flywayVersion(ff.baseline) < v { ff.versioned = append(ff.versioned, path) } return nil case 'R': ff.repeatable = append(ff.repeatable, path) return nil default: return fmt.Errorf("sql/sqltool: unexpected Flyway prefix %q", p) } } func (ff *flywayFiles) names() []string { var names []string if ff.baseline != "" { names = append(names, ff.baseline) } sort.Strings(ff.versioned) sort.Strings(ff.repeatable) names = append(names, ff.versioned...) names = append(names, ff.repeatable...) return names } func flywayDesc(path string) string { parts := strings.SplitN(path, "__", 2) if len(parts) == 1 { return "" } return strings.TrimSuffix(parts[1], ".sql") } func flywayVersion(path string) string { // Repeatable migrations don't have a version. if filepath.Base(path)[0] == 'R' { return "" } return strings.SplitN(strings.TrimSuffix(filepath.Base(path), ".sql"), "__", 2)[0][1:] } func unexpectedPragmaErr(f migrate.File, line int, pragma string) error { var tool string switch f := f.(type) { case *GooseFile: tool = "goose" case *DBMateFile: tool = "dbmate" default: return fmt.Errorf("sql/migrate: unexpected migration file type '%T'", f) } return fmt.Errorf( "sql/migrate: %s: %s:%d unexpected goosePragma '%s'", tool, f.Name(), line, pragma, ) } // funcs contains the template.FuncMap for the different formatters. var funcs = template.FuncMap{ "inc": func(x int) int { return x + 1 }, // now formats the current time in a lexicographically ascending order while maintaining human readability. "now": func() string { return time.Now().UTC().Format("20060102150405") }, "rev": reverse, } // templateFormatter parses the given templates and passes them on to the migrate.NewTemplateFormatter. func templateFormatter(templates ...string) migrate.Formatter { tpls := make([]*template.Template, len(templates)) for i, t := range templates { tpls[i] = template.Must(template.New("").Funcs(funcs).Parse(t)) } tf, err := migrate.NewTemplateFormatter(tpls...) if err != nil { panic(err) } return tf } // reverse changes for the down migration. func reverse(changes []*migrate.Change) []*migrate.Change { n := len(changes) rev := make([]*migrate.Change, n) if n%2 == 1 { rev[n/2] = changes[n/2] } for i, j := 0, n-1; i < j; i, j = i+1, j-1 { rev[i], rev[j] = changes[j], changes[i] } return rev } atlas-0.7.2/sql/sqltool/tool_test.go000066400000000000000000000223241431455511600174730ustar00rootroot00000000000000// Copyright 2021-present The Atlas Authors. All rights reserved. // This source code is licensed under the Apache 2.0 license found // in the LICENSE file in the root directory of this source tree. package sqltool_test import ( "fmt" "io/fs" "testing" "time" "ariga.io/atlas/sql/migrate" "ariga.io/atlas/sql/sqltool" "github.com/stretchr/testify/require" ) var plan = &migrate.Plan{ Name: "tooling-plan", Reversible: true, Changes: []*migrate.Change{ {Cmd: "CREATE TABLE t1(c int)", Reverse: "DROP TABLE t1 IF EXISTS", Comment: "create table t1"}, {Cmd: "CREATE TABLE t2(c int)", Reverse: "DROP TABLE t2", Comment: "create table t2"}, }, } func TestFormatters(t *testing.T) { v := time.Now().UTC().Format("20060102150405") for _, tt := range []struct { name string fmt migrate.Formatter expected map[string]string }{ { "golang-migrate/migrate", sqltool.GolangMigrateFormatter, map[string]string{ v + "_tooling-plan.up.sql": `-- create table t1 CREATE TABLE t1(c int); -- create table t2 CREATE TABLE t2(c int); `, v + "_tooling-plan.down.sql": `-- reverse: create table t2 DROP TABLE t2; -- reverse: create table t1 DROP TABLE t1 IF EXISTS; `, }, }, { "pressly/goose", sqltool.GooseFormatter, map[string]string{ v + "_tooling-plan.sql": `-- +goose Up -- create table t1 CREATE TABLE t1(c int); -- create table t2 CREATE TABLE t2(c int); -- +goose Down -- reverse: create table t2 DROP TABLE t2; -- reverse: create table t1 DROP TABLE t1 IF EXISTS; `, }, }, { "flyway", sqltool.FlywayFormatter, map[string]string{ "V" + v + "__tooling-plan.sql": `-- create table t1 CREATE TABLE t1(c int); -- create table t2 CREATE TABLE t2(c int); `, "U" + v + "__tooling-plan.sql": `-- reverse: create table t2 DROP TABLE t2; -- reverse: create table t1 DROP TABLE t1 IF EXISTS; `, }, }, { "liquibase", sqltool.LiquibaseFormatter, map[string]string{ v + "_tooling-plan.sql": fmt.Sprintf(`--liquibase formatted sql --changeset atlas:%s-1 --comment: create table t1 CREATE TABLE t1(c int); --rollback: DROP TABLE t1 IF EXISTS; --changeset atlas:%s-2 --comment: create table t2 CREATE TABLE t2(c int); --rollback: DROP TABLE t2; `, v, v), }, }, { "amacneil/dbmate", sqltool.DBMateFormatter, map[string]string{ v + "_tooling-plan.sql": `-- migrate:up -- create table t1 CREATE TABLE t1(c int); -- create table t2 CREATE TABLE t2(c int); -- migrate:down -- reverse: create table t2 DROP TABLE t2; -- reverse: create table t1 DROP TABLE t1 IF EXISTS; `, }, }, } { t.Run(tt.name, func(t *testing.T) { d := dir(t) pl := migrate.NewPlanner(nil, d, migrate.PlanFormat(tt.fmt), migrate.PlanWithChecksum(false)) require.NotNil(t, pl) require.NoError(t, pl.WritePlan(plan)) require.Equal(t, len(tt.expected), countFiles(t, d)) for name, content := range tt.expected { requireFileEqual(t, d, name, content) } }) } } func TestScanners(t *testing.T) { for _, tt := range []struct { name string dir migrate.Dir versions, descriptions []string stmts [][]string }{ { name: "golang-migrate", dir: func() migrate.Dir { d, err := sqltool.NewGolangMigrateDir("testdata/golang-migrate") require.NoError(t, err) return d }(), versions: []string{"1", "2"}, descriptions: []string{"initial", "second_migration"}, stmts: [][]string{ {"CREATE TABLE tbl\n(\n col INT\n);"}, {"CREATE TABLE tbl_2 (col INT);"}, }, }, { name: "goose", dir: func() migrate.Dir { d, err := sqltool.NewGooseDir("testdata/goose") require.NoError(t, err) return d }(), versions: []string{"1", "2"}, descriptions: []string{"initial", "second_migration"}, stmts: [][]string{ { "CREATE TABLE post\n(\n id int NOT NULL,\n title text,\n body text,\n PRIMARY KEY (id)\n);", "ALTER TABLE post ADD created_at TIMESTAMP NOT NULL;", "INSERT INTO post (title) VALUES (\n'This is\nmy multiline\n\nvalue');", }, { "ALTER TABLE post ADD updated_at TIMESTAMP NOT NULL;", "CREATE\nOR REPLACE FUNCTION histories_partition_creation( DATE, DATE )\nreturns void AS $$\nDECLARE\ncreate_query text;\nBEGIN\nFOR create_query IN\nSELECT 'CREATE TABLE IF NOT EXISTS histories_'\n || TO_CHAR(d, 'YYYY_MM')\n || ' ( CHECK( created_at >= timestamp '''\n || TO_CHAR(d, 'YYYY-MM-DD 00:00:00')\n || ''' AND created_at < timestamp '''\n || TO_CHAR(d + INTERVAL '1 month', 'YYYY-MM-DD 00:00:00')\n || ''' ) ) inherits ( histories );'\nFROM generate_series($1, $2, '1 month') AS d LOOP\n EXECUTE create_query;\nEND LOOP; -- LOOP END\nEND; -- FUNCTION END\n$$\nlanguage plpgsql;", }, }, }, { name: "flyway", dir: func() migrate.Dir { d, err := sqltool.NewFlywayDir("testdata/flyway") require.NoError(t, err) return d }(), versions: []string{"2", "3", ""}, descriptions: []string{"baseline", "third_migration", "views"}, stmts: [][]string{ { "CREATE TABLE post\n(\n id int NOT NULL,\n title text,\n body text,\n created_at TIMESTAMP NOT NULL\n PRIMARY KEY (id)\n);", "INSERT INTO post (title, created_at) VALUES (\n'This is\nmy multiline\n\nvalue', NOW());", }, {"ALTER TABLE tbl_2 ADD col_1 INTEGER NOT NULL;"}, {"CREATE VIEW `my_view` AS SELECT * FROM `post`;"}, }, }, { name: "liquibase", dir: func() migrate.Dir { d, err := sqltool.NewLiquibaseDir("testdata/liquibase") require.NoError(t, err) return d }(), versions: []string{"1", "2"}, descriptions: []string{"initial", "second_migration"}, stmts: [][]string{ { "CREATE TABLE post\n(\n id int NOT NULL,\n title text,\n body text,\n PRIMARY KEY (id)\n);", "ALTER TABLE post ADD created_at TIMESTAMP NOT NULL;", "INSERT INTO post (title) VALUES (\n'This is\nmy multiline\n\nvalue');", }, {"CREATE TABLE tbl_2 (col INT);"}, }, }, { name: "dbmate", dir: func() migrate.Dir { d, err := sqltool.NewDBMateDir("testdata/dbmate") require.NoError(t, err) return d }(), versions: []string{"1", "2"}, descriptions: []string{"initial", "second_migration"}, stmts: [][]string{ { "CREATE TABLE post\n(\n id int NOT NULL,\n title text,\n body text,\n PRIMARY KEY (id)\n);", "ALTER TABLE post ADD created_at TIMESTAMP NOT NULL;", "INSERT INTO post (title) VALUES (\n'This is\nmy multiline\n\nvalue');", }, {"CREATE TABLE tbl_2 (col INT);"}, }, }, } { t.Run(tt.name, func(t *testing.T) { files, err := tt.dir.Files() require.NoError(t, err) require.Len(t, files, len(tt.versions)) for i := range tt.versions { require.Equal(t, tt.versions[i], files[i].Version()) require.Equal(t, tt.descriptions[i], files[i].Desc()) stmts, err := files[i].Stmts() require.NoError(t, err) require.Len(t, stmts, len(tt.stmts[i])) for j, stmt := range stmts { require.Equal(t, tt.stmts[i][j], stmt) } } }) } } func TestChecksum(t *testing.T) { for _, tt := range []struct { name string dir migrate.Dir files []string // files expected to be part of the checksum (in order) }{ { name: "golang-migrate", dir: func() migrate.Dir { d, err := sqltool.NewGolangMigrateDir("testdata/golang-migrate") require.NoError(t, err) return d }(), files: []string{ "1_initial.down.sql", "1_initial.up.sql", "2_second_migration.down.sql", "2_second_migration.up.sql", }, }, { name: "goose", dir: func() migrate.Dir { d, err := sqltool.NewGooseDir("testdata/goose") require.NoError(t, err) return d }(), files: []string{ "1_initial.sql", "2_second_migration.sql", }, }, { name: "flyway", dir: func() migrate.Dir { d, err := sqltool.NewFlywayDir("testdata/flyway") require.NoError(t, err) return d }(), files: []string{ "B2__baseline.sql", "R__views.sql", "U1__initial.sql", "V1__initial.sql", "V2__second_migration.sql", "V3__third_migration.sql", }, }, { name: "liquibase", dir: func() migrate.Dir { d, err := sqltool.NewLiquibaseDir("testdata/liquibase") require.NoError(t, err) return d }(), files: []string{ "1_initial.sql", "2_second_migration.sql", }, }, { name: "dbmate", dir: func() migrate.Dir { d, err := sqltool.NewDBMateDir("testdata/dbmate") require.NoError(t, err) return d }(), files: []string{ "1_initial.sql", "2_second_migration.sql", }, }, } { t.Run(tt.name, func(t *testing.T) { sum, err := tt.dir.Checksum() require.NoError(t, err) require.Len(t, sum, len(tt.files)) for i := range tt.files { require.Equal(t, tt.files[i], sum[i].N) } }) } } func dir(t *testing.T) migrate.Dir { p := t.TempDir() d, err := migrate.NewLocalDir(p) require.NoError(t, err) return d } func countFiles(t *testing.T, d migrate.Dir) int { files, err := fs.ReadDir(d, "") require.NoError(t, err) return len(files) } func requireFileEqual(t *testing.T, d migrate.Dir, name, contents string) { c, err := fs.ReadFile(d, name) require.NoError(t, err) require.Equal(t, contents, string(c)) }