pax_global_header00006660000000000000000000000064146120103130014501gustar00rootroot0000000000000052 comment=1e980a13b1603dbc36c821ed973e5a0d3aeda308 B2_Command_Line_Tool-3.19.1/000077500000000000000000000000001461201031300154415ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/.github/000077500000000000000000000000001461201031300170015ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/.github/dependabot.yml000066400000000000000000000002751461201031300216350ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" # This setting does not affect security updates open-pull-requests-limit: 0B2_Command_Line_Tool-3.19.1/.github/no-response.yml000066400000000000000000000013211461201031300217710ustar00rootroot00000000000000# Configuration for probot-no-response - https://github.com/probot/no-response # Number of days of inactivity before an Issue is closed for lack of response daysUntilClose: 14 # Label requiring a response responseRequiredLabel: more-information-needed # Comment to post when closing an Issue for lack of response. Set to `false` to disable closeComment: > This issue has been automatically closed because there has been no response to our request for more information from the original author. With only the information that is currently in the issue, we don't have enough information to take action. Please reach out if you have or find the answers we need so that we can investigate or assist you further. B2_Command_Line_Tool-3.19.1/.github/workflows/000077500000000000000000000000001461201031300210365ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/.github/workflows/cd.yml000066400000000000000000000157531461201031300221620ustar00rootroot00000000000000name: Continuous Delivery on: push: tags: 'v*' # push events to matching v*, i.e. v1.0, v20.15.10 env: CD: "true" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PYTHON_DEFAULT_VERSION: "3.12" jobs: deploy: env: B2_PYPI_PASSWORD: ${{ secrets.B2_PYPI_PASSWORD }} runs-on: ubuntu-latest outputs: version: ${{ steps.build.outputs.version }} prerelease: ${{ steps.prerelease_check.outputs.prerelease }} # publish_docker: ${{ steps.prerelease_check.outputs.prerelease == 'false' && secrets.DOCKERHUB_USERNAME != '' }} # doesn't work, hence the workaround publish_docker: ${{ steps.prerelease_check.outputs.publish_docker }} steps: - name: Determine if pre-release id: prerelease_check run: | export IS_PRERELEASE=$([[ ${{ github.ref }} =~ [^0-9]$ ]] && echo true || echo false) echo "prerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT export PUBLISH_DOCKER=$([[ $IS_PRERELEASE == 'false' && "${{ secrets.DOCKERHUB_USERNAME }}" != '' ]] && echo true || echo false) echo "publish_docker=$PUBLISH_DOCKER" >> $GITHUB_OUTPUT - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Build the distribution id: build run: nox -vs build - name: Read the Changelog id: read-changelog uses: mindsers/changelog-reader-action@v2 with: version: ${{ steps.build.outputs.version }} - name: Create GitHub release and upload the distribution id: create-release uses: softprops/action-gh-release@v2 with: name: ${{ steps.build.outputs.version }} body: ${{ steps.read-changelog.outputs.changes }} draft: false prerelease: ${{ steps.prerelease_check.outputs.prerelease }} files: ${{ steps.build.outputs.asset_path }} - name: Upload the distribution to PyPI if: ${{ env.B2_PYPI_PASSWORD != '' && steps.prerelease_check.outputs.prerelease == 'false' }} uses: pypa/gh-action-pypi-publish@v1.3.1 with: user: __token__ password: ${{ secrets.B2_PYPI_PASSWORD }} deploy-linux-bundle: needs: deploy runs-on: ubuntu-latest container: image: "python:3.12" # can not use ${{ env.PYTHON_DEFAULT_VERSION }} here env: DEBIAN_FRONTEND: noninteractive steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install dependencies run: | apt-get -y update apt-get -y install patchelf python -m pip install --upgrade nox pdm git config --global --add safe.directory '*' - name: Bundle the distribution id: bundle run: nox -vs bundle - name: Sign the bundle id: sign run: nox -vs sign - name: Generate hashes id: hashes run: nox -vs make_dist_digest - name: Upload the bundle to the GitHub release uses: softprops/action-gh-release@v2 with: name: ${{ needs.deploy.outputs.version }} draft: false prerelease: ${{ needs.deploy.outputs.prerelease }} files: ${{ steps.sign.outputs.asset_path }} deploy-windows-bundle: needs: deploy env: B2_WINDOWS_CODE_SIGNING_CERTIFICATE: ${{ secrets.B2_WINDOWS_CODE_SIGNING_CERTIFICATE }} B2_WINDOWS_CODE_SIGNING_CERTIFICATE_PASSWORD: ${{ secrets.B2_WINDOWS_CODE_SIGNING_CERTIFICATE_PASSWORD }} runs-on: windows-2019 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Bundle the distribution id: bundle shell: bash run: nox -vs bundle - name: Import certificate id: windows_import_cert if: ${{ env.B2_WINDOWS_CODE_SIGNING_CERTIFICATE != '' }} uses: timheuer/base64-to-file@v1 with: fileName: 'cert.pfx' encodedString: ${{ secrets.B2_WINDOWS_CODE_SIGNING_CERTIFICATE }} - name: Sign the bundle if: ${{ env.B2_WINDOWS_CODE_SIGNING_CERTIFICATE != '' }} id: sign shell: bash run: nox -vs sign -- '${{ steps.windows_import_cert.outputs.filePath }}' '${{ env.B2_WINDOWS_CODE_SIGNING_CERTIFICATE_PASSWORD }}' - name: Generate hashes id: hashes run: nox -vs make_dist_digest - name: Create GitHub release and upload the distribution id: create-release uses: softprops/action-gh-release@v2 with: name: ${{ needs.deploy.outputs.version }} draft: false prerelease: ${{ needs.deploy.outputs.prerelease }} files: ${{ steps.sign.outputs.asset_path || steps.bundle.outputs.asset_path }} deploy-docker: needs: deploy if: ${{ needs.deploy.outputs.publish_docker == 'true' }} runs-on: ubuntu-latest env: DEBIAN_FRONTEND: noninteractive steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Build Dockerfile run: nox -vs generate_dockerfile - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Prepare Docker tags id: docker_tags_prep run: | DOCKER_TAGS=backblazeit/b2:${{ needs.deploy.outputs.version }} if [ "${{ needs.deploy.outputs.prerelease }}" != "true" ]; then DOCKER_TAGS="$DOCKER_TAGS,backblazeit/b2:latest" fi echo DOCKER_TAGS=$DOCKER_TAGS echo "docker_tags=$DOCKER_TAGS" >> $GITHUB_OUTPUT - name: Login to Docker Hub uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push uses: docker/build-push-action@v3 with: context: . push: true tags: ${{ steps.docker_tags_prep.outputs.docker_tags }} platforms: linux/amd64,linux/arm64 - name: Update Docker Hub Description uses: peter-evans/dockerhub-description@v4 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} repository: backblazeit/b2 short-description: "Official Backblaze B2 CLI docker image" B2_Command_Line_Tool-3.19.1/.github/workflows/ci.yml000066400000000000000000000240721461201031300221610ustar00rootroot00000000000000name: Continuous Integration on: push: branches: [master] pull_request: branches: [master] env: PYTHON_DEFAULT_VERSION: "3.12" jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: codespell-project/actions-codespell@2391250ab05295bddd51e36a8c6295edb6343b0e with: ignore_words_list: datas re-use - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Run linters run: nox -vs lint - name: Validate new changelog entries if: (contains(github.event.pull_request.labels.*.name, '-changelog') == false) && (github.event.pull_request.base.ref != '') run: if [ -z "$(git diff --diff-filter=A --name-only origin/${{ github.event.pull_request.base.ref }} changelog.d)" ]; then echo no changelog item added; exit 1; fi - name: Changelog validation run: nox -vs towncrier_check build: needs: lint runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Build the distribution run: nox -vs build cleanup_buckets: needs: lint env: B2_TEST_APPLICATION_KEY: ${{ secrets.B2_TEST_APPLICATION_KEY }} B2_TEST_APPLICATION_KEY_ID: ${{ secrets.B2_TEST_APPLICATION_KEY_ID }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} # TODO: skip this whole job instead with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} # TODO: skip this whole job instead uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} cache: "pip" - name: Install dependencies if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} # TODO: skip this whole job instead run: python -m pip install --upgrade nox pdm - name: Find and remove old buckets if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} # TODO: skip this whole job instead run: nox -vs cleanup_buckets test: needs: cleanup_buckets env: B2_TEST_APPLICATION_KEY: ${{ secrets.B2_TEST_APPLICATION_KEY }} B2_TEST_APPLICATION_KEY_ID: ${{ secrets.B2_TEST_APPLICATION_KEY_ID }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy3.9", "pypy3.10"] exclude: - os: "macos-latest" python-version: "pypy3.10" - os: "windows-latest" python-version: "pypy3.10" # Workaround for https://github.com/actions/setup-python/issues/696 - os: "macos-latest" python-version: 3.7 - os: "macos-latest" python-version: 3.8 - os: "macos-latest" python-version: 3.9 include: # Workaround for https://github.com/actions/setup-python/issues/696 - os: "macos-13" python-version: 3.7 - os: "macos-13" python-version: 3.8 - os: "macos-13" python-version: 3.9 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install test binary dependencies if: startsWith(matrix.os, 'ubuntu') run: | sudo apt-get -y update sudo apt-get -y install zsh fish sudo chmod -R 755 /usr/share/zsh/vendor-completions /usr/share/zsh # Fix permissions for zsh completions - name: Install test binary dependencies (macOS) if: startsWith(matrix.os, 'macos') run: | brew install fish - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Run unit tests run: nox -vs unit -p ${{ matrix.python-version }} - name: Run integration tests (without secrets) run: nox -vs integration -p ${{ matrix.python-version }} -- -m "not require_secrets" - name: Run integration tests (with secrets) # Limit CI workload by running integration tests with secrets only on edge Python versions. if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' && contains(fromJSON('["3.7", "pypy3.10", "3.12"]'), matrix.python-version) }} run: nox -vs integration -p ${{ matrix.python-version }} -- -m "require_secrets" --cleanup test-docker: needs: cleanup_buckets env: B2_TEST_APPLICATION_KEY: ${{ secrets.B2_TEST_APPLICATION_KEY }} B2_TEST_APPLICATION_KEY_ID: ${{ secrets.B2_TEST_APPLICATION_KEY_ID }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: sudo python -m pip install --upgrade nox pdm - name: Generate Dockerfile run: nox -vs generate_dockerfile - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Build Docker uses: docker/build-push-action@v5 with: context: . load: true tags: backblazeit/b2:test platforms: linux/amd64 - name: Run tests with docker if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} run: nox -vs docker_test -- backblazeit/b2:test test-linux-bundle: needs: cleanup_buckets env: B2_TEST_APPLICATION_KEY: ${{ secrets.B2_TEST_APPLICATION_KEY }} B2_TEST_APPLICATION_KEY_ID: ${{ secrets.B2_TEST_APPLICATION_KEY_ID }} runs-on: ubuntu-latest container: image: "python:3.12" # can not use ${{ env.PYTHON_DEFAULT_VERSION }} here env: DEBIAN_FRONTEND: noninteractive steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install dependencies run: | apt-get -y update apt-get -y install patchelf python -m pip install --upgrade nox pdm git config --global --add safe.directory '*' - name: Bundle the distribution id: bundle run: nox -vs bundle - name: Generate hashes id: hashes run: nox -vs make_dist_digest - name: Run integration tests (without secrets) run: nox -vs integration -p ${{ env.PYTHON_DEFAULT_VERSION }} -- --sut=${{ steps.bundle.outputs.sut_path }} -m "not require_secrets" - name: Run integration tests (with secrets) if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} run: nox -vs integration -p ${{ env.PYTHON_DEFAULT_VERSION }} -- --sut=${{ steps.bundle.outputs.sut_path }} -m "require_secrets" --cleanup - name: Upload assets if: failure() uses: actions/upload-artifact@v2 with: path: ${{ steps.bundle.outputs.asset_path }} if-no-files-found: warn retention-days: 7 test-windows-bundle: needs: cleanup_buckets env: B2_TEST_APPLICATION_KEY: ${{ secrets.B2_TEST_APPLICATION_KEY }} B2_TEST_APPLICATION_KEY_ID: ${{ secrets.B2_TEST_APPLICATION_KEY_ID }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [windows-2019, windows-latest] steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies run: python -m pip install --upgrade nox pdm - name: Bundle the distribution id: bundle shell: bash run: nox -vs bundle - name: Generate hashes id: hashes run: nox -vs make_dist_digest - name: Run integration tests (without secrets) run: nox -vs integration -p ${{ env.PYTHON_DEFAULT_VERSION }} -- --sut=${{ steps.bundle.outputs.sut_path }} -m "not require_secrets" - name: Run integration tests (with secrets) if: ${{ env.B2_TEST_APPLICATION_KEY != '' && env.B2_TEST_APPLICATION_KEY_ID != '' }} run: nox -vs integration -p ${{ env.PYTHON_DEFAULT_VERSION }} -- --sut=${{ steps.bundle.outputs.sut_path }} -m "require_secrets" --cleanup - name: Upload assets if: failure() uses: actions/upload-artifact@v2 with: path: ${{ steps.bundle.outputs.asset_path }} if-no-files-found: warn retention-days: 7 doc: needs: build runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_DEFAULT_VERSION }} - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | sudo apt-get update -y sudo apt-get install -y graphviz plantuml python -m pip install --upgrade nox pdm - name: Build the docs run: nox --non-interactive -vs doc B2_Command_Line_Tool-3.19.1/.gitignore000066400000000000000000000003221461201031300174260ustar00rootroot00000000000000*.pyc .codacy-coverage/ .coverage .eggs/ .idea .nox/ .pdm-build/ .pdm-python .python-version b2_cli.log b2.egg-info build coverage.xml dist venv doc/source/main_help.rst Dockerfile b2/licenses_output.txt *.specB2_Command_Line_Tool-3.19.1/.readthedocs.yml000066400000000000000000000013371461201031300205330ustar00rootroot00000000000000# .readthedocs.yml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 build: os: ubuntu-22.04 tools: python: "3.12" jobs: post_create_environment: - pip install pdm - pdm export --format requirements --prod --group doc --output requirements-doc.txt --no-hashes # Build documentation in the docs/ directory with Sphinx sphinx: configuration: doc/source/conf.py # Optionally build your docs in additional formats such as PDF and ePub formats: all # Optionally set the version of Python and requirements required to build your docs python: install: - requirements: requirements-doc.txt - method: pip path: .B2_Command_Line_Tool-3.19.1/CHANGELOG.md000066400000000000000000000630601461201031300172570ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the changes for the upcoming release can be found in [changelog.d](changelog.d). ## [3.19.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.19.1) - 2024-04-23 ### Fixed - Fix `create-key --all-capabilities` error when using `b2sdk>=2.1`. ## [3.19.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.19.0) - 2024-04-15 ### Added - Add `notification-rules` commands for manipulating Bucket notification rules as part of Event Notifications feature Private Preview. See https://www.backblaze.com/blog/announcing-event-notifications/ for details. ## [3.18.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.18.0) - 2024-04-02 ### Changed - Change all `_b2v4 --camelCase` CLI flags to --kebab-case. Add deprecation warning for `b2v3 --camelCase` CLI flags. ### Fixed - Don't persist credentials provided in the Environment variables in any command other than `authorize-account` when using `b2v4`. - Fix `b2 --help` showing full binary path instead of just basename. ### Added - Add autocomplete support for `zsh` and `fish` shells. - Add support for calling `b2 ls` without arguments to list all buckets. ### Infrastructure - Add dockerhub description deployment to CD workflow. - Add support for pre-releases in CD. - Fix missing command output when running `nox` under CI. - Increase verbosity when running tests under CI. - Update to [GitHub Actions using Node 20](https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/). ## [3.17.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.17.0) - 2024-03-15 ### Fixed - Control character escaping is now enabled by default if running in a terminal for improved security. ### Added - Added `--escape-control-characters` and `--no-escape-control-characters` flags, as well as `B2_ESCAPE_CONTROL_CHARACTERS` env var to explicitly enable or disable control character escaping. ## [3.16.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.16.1) - 2024-02-26 ### Fixed - Fix `--threads` option being silently ignored in upload commands. ## [3.16.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.16.0) - 2024-02-19 ### Changed - All internal Python modules were moved to the `b2._internal` package to further discourage users from importing them. - Change `ls` and `rm` commands to use the `b2://` URI scheme in the pre-release `_b2v4` command. ### Fixed - Fix `--minPartSize` not supporting values above 100MB. - Fix a bug where `rm bucketName folderName` command without the `--recursive` flag would remove a first file from every subdirectory inside `folderName`. - Fix handling of `?` and `#` in B2 URI. ### Added - ApiVer introduced. `b2` executable points to the latest stable ApiVer version, while `b2v3` will always point to v3 ApiVer release of `b2` CLI. - Add `--include` and `--exclude` filters to the `ls` and `rm` commands. - Add support for deleting a single file by `b2id://` URI in the pre-release `_b2v4` command. - Print account info if `b2 authorize-account` is successful using the same format as `b2 get-account-info`. - Print output file path in `download-file` command. ### Infrastructure - Fix CI failing on `mkdir` when testing docker image. - Use pdm for building, testing and managing dependencies. - Remove unnecessary files (continuous integration scripts, tests) from sdist tarball. ## [3.15.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.15.0) - 2023-12-07 ### Changed - Use Python 3.12 in the official `b2` Docker image. ### Fixed - Loosen platformdirs dependency version specifier. ### Added - Whenever target filename is a directory, file is downloaded into that directory. ## [3.14.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.14.0) - 2023-12-06 ### Changed - Update b2sdk to 1.28.0 (resolves [#958](https://github.com/Backblaze/B2_Command_Line_Tool/issues/958), [#934](https://github.com/Backblaze/B2_Command_Line_Tool/issues/934)). ### Fixed - Don't print `Using https://REALM" in stderr unless explicitly set by user. ([#949](https://github.com/Backblaze/B2_Command_Line_Tool/issues/949)) - Added autocomplete suggestion caching to improve autocomplete performance. - Do not include build cache in official `b2` docker image. - Fix an error that caused multiprocessing semaphores to leak on OSX. ### Deprecated - Deprecated `download-file-by-id` and `download-file-by-name`, use `download-file` instead. Deprecated `get-file-info`, use `file-info` instead. Deprecated `make-url` and `make-friendly-url`, use `get-url` instead. ### Added - Add `--expires`, `--content-disposition`, `--content-encoding`, `--content-language` options to subcommands `upload-file`, `upload-unbound-stream`, `copy-file-by-id`. - Add `download-file`, `file-info` and `get-url` commands using new B2 URI syntax allowing for referring to file-like objects by their bucket&name or ID. ### Doc - Add `cat` command to documentation. - Add additional linebreaks to ensure lists are properly rendered. ### Infrastructure - Ensure CI checks Python package compatibility with latest setuptools. ([#952](https://github.com/Backblaze/B2_Command_Line_Tool/issues/952)) - Allow skipping changelog for PRs marked with `-changelog` label. - Changelog entries are now validated as a part of CI pipeline. - Disable dependabot requests for updates unrelated to security issues. - Fix CI badge not showing correct status in README. - Remove unused exception class and outdated todo. - Skip draft step in releases - all successful releases are public. - Update license text generation dependencies to prevent triggering security scan false-positives. - Use cpython 3.12 (not 3.11) for integration tests with secrets. ## [3.13.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.13.1) - 2023-11-21 ### Fixed - Fix "pip install" by making pyproject.toml viable. ([#952](https://github.com/Backblaze/B2_Command_Line_Tool/issues/952)) ### Doc - Fix `docker run` example in README.md ### Infrastructure - Towncrier changelog generation - to avoid conflicts when simultaneously working on PRs - fix towncrier generated changelog to work with mindsers/changelog-reader-action ## [3.13.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.13.0) - 2023-11-16 ### Added - Add linux/arm64 platform support to the official Docker image - Add `cat` command for downloading file contents directly to stdout - Add `-r` as an alias for `--recursive` argument - Add `-q` as an alias for `--quiet` argument ### Fixed - Emit `Using https://api.backblazeb2.com` message to stderr instead of stdout, therefor prevent JSON output corruption ### Changed - Stream `ls --json` JSON output instead of dumping it only after all objects have been fetched - Alias `-` to stdout in `download-file-by-name` or `download-file-by-id` command ## [3.12.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.12.0) - 2023-10-28 ### Added - docker tests and pushing the official docker image on release ### Fixed - `--quiet` now will implicitly set `--noProgress` option as well - pypy integration tests ### Infrastructure - Use stable Python 3.12 in CI - Fix readthedocs build by updating to v2 configuration schema ## [3.11.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.11.0) - 2023-10-04 ### Added - Add `--quiet` option to all commands to suppress all messages printed to stdout & stderr ### Changed - Improve `--quiet` and `--profile` options documentation mentions, while suppressing them in `--help` output ### Infrastructure - Fix gathering licenses of typeshed libraries - Fix spellcheck erroring out on LICENSE file ## [3.10.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.10.1) - 2023-09-27 ### Fixed - Fix lifecycle rules being cleared after using `update-bucket` command if not explicitly set again. - Fix missing key ID for large file encrypted with SSE-C (fixed by `b2sdk` update) ### Infrastructure - Fix bad version number generation in CD ## [3.10.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.10.0) - 2023-09-10 ### Added - Add ability to upload from an unbound source such as standard input or a named pipe - --bypassGovernance option to delete_file_version - Declare official support of Python 3.12 - Cache-Control option when uploading files - Add `--lifecycleRule` to `create-bucket` and `update-bucket` and deprecate `--lifecycleRules` argument - Add extra dependencies for better UX, installable with `pip install b2[full]` - Add s3 endpoint to `get-account-info` ### Deprecated - Deprecate support of `-` as a valid filename in `upload-file` command. In the future `-` will always be interpreted as standard input ### Changed - Better help text for --corsRules - if `--threads` is not explicitly set, number of threads is no longer guaranteed to be 10 ### Infrastructure - Remove unsupported PyPy 3.7 from tests matrix and add PyPy 3.10 instead - Autocomplete integration tests will now work properly even if tested package has not been installed - Automatically set copyright date when generating the docs - Increase timeout time in autocomplete tests to accommodate slower CI environments - Update pyinstaller to fix Linux Bundle build - Replace `pyflakes` with `ruff` for linting - Make dependency version pinning less restrictive - Fix tests by making mocks compatible with latest `b2sdk` version - Fix readthedocs build ### Fixed - Fast rm sometimes failing due to a rare race condition - Fix UnicodeEncodeError in non-Unicode terminals by prioritizing stdout encoding - When listing licenses in `license` command only show licenses of `b2` and its dependencies - Fix license command failing on Windows when non-UTF8 encoding is the default ## [3.9.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.9.0) - 2023-04-28 ### Added - Support for custom file upload timestamp ### Infrastructure - Limit GitHub CI workload by running most integration tests only against edge versions of supported Python versions - Add a direct dependency from tqdm ## [3.8.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.8.0) - 2023-03-23 ### Added - Add `install-autocomplete` command for installing shell autocompletion (currently only `bash` is supported) ### Fixed - Hitting the download endpoint twice in some cases ### Infrastructure - GitHub CD builds and uploads an official B2 CLI image to docker hub - Disable changelog verification for dependabot PRs ## [3.7.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.7.1) - 2023-02-08 ### Fixed - Remove unnecessary printing options from `rm` - Clarify that `--recursive` is required when `--withWildcard` is used - Adjust description of `rm` ### Infrastructure - Remove macos stand-alone binary from CI/CD ## [3.7.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.7.0) - 2023-02-07 ### Added - Add `--incrementalMode` to `sync` and `upload-file` - Add `license` command for printing licenses of b2 and libraries - Add wildcard support for the `ls` command - Add `rm` command ### Fixed - Stop using b2sdk.v1 in arg_parser.py - Fix issues when running commands on Python 3.11 - Fix tests after changes introduced in b2sdk 1.19.0 - `rm` can handle any number of files ### Infrastructure - GitHub CI got checkout action updated to v3 and setup-python to v4 - Ensured that changelog validation only happens on pull requests - GitHub CI uses GITHUB_OUTPUT instead of deprecated set-output - Releases now feature digests of each file - Change default Python version in CI/CD to 3.11 - Temporary marking all directories as `safe.directory` inside CI/CD when bundling ## [3.6.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.6.0) - 2022-09-20 ### Added - Add `replication-delete` command - Add `replication-pause` command - Add `replication-status` command - Add `replication-unpause` command - Add `--include-existing-files` to `replication-setup` - Add `--max-streams` parameter to download commands - Add `--fileLockEnabled` switch to `update-bucket` subcommand ### Fixed - Fix `replication-setup` default priority setter ### Infrastructure - Fix warnings in tests - Fix `test_keys` unit test after changes in b2sdk - Fix running tests on the CI with the latest SDK from the master branch ## [3.5.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.5.0) - 2022-07-27 As in 3.4.0, replication support may be unstable, however no backward-incompatible changes are currently planned. This version is pinned strictly to `b2-sdk-python==1.17.3` for the same reason. ### Added - Add `--write-buffer-size` parameter - Add `--skip-hash-verification` parameter ### Changed - Minimum MacOS version from 10.15 to 11.0 ### Infrastructure - Try not to crash tests due to bucket name collision - Fix replication integration tests - Fix leaking buckets in integration tests - Limit number of workers for integration tests to 1 for now - Make integration tests remove buckets only based on name, not based on creation time - Add dependabot configuration ## [3.4.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.4.0) - 2022-05-04 This release contains a preview of replication support. It allows for basic usage of B2 replication feature (currently in closed beta). Until this notice is removed, the interface of replication related functionality should be not considered as public API (as defined by SemVer). This version is pinned strictly to `b2-sdk-python==1.16.0` for the same reason. ### Added - Add basic replication support to `create-bucket` and `update-bucket` - Add more fields to `get-account-info` json - Add `--replication` to `ls --long` - Add `replication-setup` command - Add "quick start guide" to documentation ### Changed - Made `bucketType` positional argument to `update-bucket` optional - Run unit tests on all CPUs ## [3.3.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.3.0) - 2022-04-20 ### Added - Add `--threads` parameter to `download-file-by-name` and `download-file-by-id` - Add `--uploadThreads` and `--downloadThreads` parameters to `sync` - Add `--profile` switch support - Add `applicationKeyId` and `isMasterKey` to the output of `get-account-info` ### Changed - Rename `--threads` parameter for `--sync` to `--syncThreads` ### Fixed - Fix license header checker on Windows - Fix `UnicodeEncodeError` after successful SSE-C download on a non-utf8 terminal (#786) ### Removed - Remove official support for python 3.5 - Remove official support for python 3.6 ## [3.2.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.2.1) - 2022-02-23 ### Fixed - Fix setting permissions for local sqlite database (thanks to Jan Schejbal for responsible disclosure!) ## [3.2.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.2.0) - 2021-12-23 ### Added - Add compatibility support for arrow >= 1.0.2 on newer Python versions while continuing to support Python 3.5 ### Fixed - Fallback to `ascii` decoder when printing help in case the locales are not properly set - Apply the value of `--threads` parameter to `sync` downloader threads ## [3.1.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.1.0) - 2021-11-02 ### Added - Add `--allCapabilities` to `create-key` - Add support for Python 3.10 ### Fixed - Fix testing bundle in CI for a new `staticx` version ## [3.0.3](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.0.3) - 2021-09-27 ### Fixed - Fix pypy selector in CI - Fix for static linking of Linux binary (CD uses python container) ## [3.0.2](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.0.2) - 2021-09-17 ### Added - Sign Windows binary ### Changed - Download instruction in README.md (wording suggested by https://github.com/philh7456) - Make Linux binary statically linked ## [3.0.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.0.1) - 2021-08-09 ### Fixed - logs from all loggers (in dependencies too) brought back ## [3.0.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v3.0.0) - 2021-08-07 ### Added - Add possibility to change realm during integration tests - Add possibility to install SDK from local folder instead of pypi when running tests - Add full support of establishing file metadata when copying, with either source or target using SSE-C - Add `--noInfo` option to `copy-file-by-id` - Integration test for checking if `bad_bucket_id` error code is returned ### Fixed - Fix integration tests on non-production environments - Fix warnings thrown by integration tests - delete-key unit test adjusted to a less mocked simulator - Fix integration test cleanup - Representing encryption-related metadata in buckets and file versions is now consistent ### Changed - CLI now uses `b2sdk.v2` - Downloading files prints file metadata as soon as the download commences (not when it finishes) - New way of establishing location of the SQLite cache file, using `XDG_CONFIG_HOME` env var - Downloaded file's metadata is complete and is displayed before the file is downloaded, a `Download finished` message is issued at the end - `contentLength` changed to `size` where appropriate - Log configuration: stack traces are not printed in case of errors by default, `--verbose` changes that - Log configuration arguments behaviour altered: `--logConfig` is exclusive with `--verbose` and `--debugLogs` - Log configuration arguments behaviour altered: `--verbose` and `--debugLogs` can be used at the same time (and they will both be taken into account) ### Removed - Support of `--metadataDirective` argument in `copy-file-by-id` (the `metadataDirective` sent to B2 cloud is detected automatically) ## [2.5.1](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.5.1) - 2021-08-06 - `SRC_LAST_MODIFIED_MILLIS` import fix ## [2.5.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.5.0) - 2021-05-22 ### Added - Add integration test for sync within one bucket with different encryption - Notarize OSX binary - File lock arguments and new commands ### Fixed - Fixed breaking integration test case - Add zoneinfo to the Windows bundle - Fixed unit tests failing on new attributes of FileVersionInfo - Removing old buckets in integration tests - Bucket name entropy in tests increased ## [2.4.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.4.0) - 2021-04-22 ### Added - Sign OSX binary - Add support for SSE-C server-side encryption mode ### Fixed - Exclude packages inside the test package when installing ## [2.3.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.3.0) - 2021-03-25 ### Added - Add support for SSE-B2 server-side encryption mode ### Fixed - Pin `setuptools-scm<6.0` as `>=6.0` doesn't support Python 3.5 - Fix boot speed regression caused by the `rst2ansi` invocations ## [2.2.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.2.0) - 2021-03-15 ### Added - Option to automatically authorize account when running commands other than `authorize-account` via `B2_APPLICATION_KEY_ID` and `B2_APPLICATION_KEY` env vars ### Changed - Improve setup and teardown for the integration tests - Use `setuptools-scm` for versioning - Improve CLI and RTD descriptions of the commands - Add upper version limit for arrow dependency, because of a breaking change ### Fixed - Fix for the Windows bundled version - Fix docs autogen ## [2.1.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.1.0) - 2020-11-03 ### Added - Add support for Python 3.9 - Add a possibility to append a string to the User-Agent via `B2_USER_AGENT_APPEND` env ### Changed - Update `b2 sync` usage text for bucket-to-bucket sync ### Removed - Drop Python 2 support :tada: (for old systems you can now use the [binary distribution](https://www.backblaze.com/b2/docs/quick_command_line.html)) - Remove `--prefix` from `ls` (it didn't really work, use `folderName` argument) - Clean up legacy code (`CliBucket`, etc.) ### Fixed - Fix docs generation in CI - Correct names of the arguments in `b2 create-key` usage text ## [2.0.2](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.0.2) - 2020-07-15 ### Added - Add `--environment` internal parameter for `authorize-account` ## [2.0.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v2.0.0) - 2020-06-25 ### Added - Add official support for python 3.8 - Add `make-friendly-url` command - Add `--excludeIfModifiedAfter` parameter for `sync` - Add `--json` parameter to `ls` and `list-buckets` - Introduce bundled versions of B2 CLI for Linux, Mac OS and Windows ### Changed - Switch to b2sdk api version v1: remove output of `delete-bucket` - Use b2sdk >1.1.0: add large file server-side copy - Switch option parser to argparse: readthedocs documentation is now generated automatically - Normalize output indentation level to 4 spaces ### Removed - Remove the ability to import b2sdk classes through b2cli (please use b2sdk directly) - Remove official support for python 3.4 - Remove `list-file-names` command. Use `ls --recursive --json` instead - Remove `list-file-versions` command. Use `ls --recursive --json --versions` instead ## [1.4.2](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.4.2) - 2019-10-03 ### Added - Add `prefix` parameter to `list-file-names` and `list-file-versions` - Add support for (server-side) copy-file command ### Changed - Make parameters of `list-file-names` and `list-file-versions` optional (use an empty string like this: `""`) - (b2sdk) Fix sync when used with a key restricted to filename prefix - When authorizing with application keys, optional application key ID and application key can be added using environment variables B2_APPLICATION_KEY_ID and B2_APPLICATION_KEY respectively. ## [1.4.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.4.0) - 2019-04-25 ### Added - (b2sdk) Support for python 3.7 ### Changed - Renaming accountId for authentication to application key Id Note: this means account Id is still backwards compatible, only the terminology has changed. - Most of the code moved to b2sdk [repository](https://github.com/Backblaze/b2-sdk-python) and [package](https://pypi.org/project/b2sdk/) - (b2sdk) Fix transferer crashing on empty file download attempt - (b2sdk) Enable retries of non-transfer operations - (b2sdk) Enable continuation of download operations ### Deprecated - Deprecation warning added for imports of sdk classes from cli package ## [1.3.8](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.3.8) - 2018-12-06 ### Added - New `--excludeAllSymlinks` option for `sync`. - Faster downloading of large files using multiple threads and bigger buffers. ### Fixed - Fixed doc for cancel-all-unfinished-large-files ## [1.3.6](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.3.6) - 2018-08-21 ### Fixed - Fix auto-reauthorize for application keys. - Fix problem with bash auto-completion module. - Fix (hopefully) markdown display in PyPI. ## [1.3.4](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.3.4) - 2018-08-10 ### Fixed - Better documentation for authorize-account command. - Fix error reporting when using application keys - Fix auth issues with bucket-restricted application keys. ## [1.3.2](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.3.2) - 2018-07-28 ### Fixed - Tests fixed for Python 3.7 - Add documentation about what capabilities are required for different commands. - Better error messages for authorization problems with application keys. ## [1.3.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.3.0) - 2018-07-20 ### Added - Support for [application keys](https://www.backblaze.com/b2/docs/application_keys.html). - Support for Python 3.6 - Drop support for Python 3.3 (`setuptools` no longer supports 3.3) ### Changed - Faster and more complete integration tests ### Fixed - Fix content type so markdown displays properly in PyPI - The testing package is called `test`, not `tests` ## [1.2.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.2.0) - 2018-07-06 ### Added - New `--recursive` option for ls - New `--showSize` option for get-bucket - New `--excludeDirRegex` option for sync ### Fixed - Include LICENSE file in the source tarball. Fixes #433 - Test suite now runs as root (fixes #427) - Validate file names before trying to upload - Fix scaling problems when syncing large numbers of files - Prefix Windows paths during sync to handle long paths (fixes #265) - Check if file to be synced is still accessible before syncing (fixes #397) ## [1.1.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.1.0) - 2017-11-30 ### Added - Add support for CORS rules in `create-bucket` and `update-bucket`. `get-bucket` will display CORS rules. ### Fixed - cleanup in integration tests works ## [1.0.0](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v1.0.0) - 2017-11-09 ### Added - Require `--allowEmptySource` to sync from empty directory, to help avoid accidental deletion of all files. ## [0.7.4](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v0.7.4) - 2017-11-09 ### Added - More efficient uploads by sending SHA1 checksum at the end. ### Fixed - File modification times are set correctly when downloading. - Fix an off-by-one issue when downloading a range of a file (affects library, but not CLI). - Better handling of some errors from the B2 service. B2_Command_Line_Tool-3.19.1/CONTRIBUTING.md000066400000000000000000000120421461201031300176710ustar00rootroot00000000000000# Contributing to B2 Command Line Tool We encourage outside contributors to perform changes to our codebase. Many such changes have been merged already. In order to make it easier to contribute, core developers of this project: * provide guidance (through the issue reporting system) * provide tool assisted code review (through the Pull Request system) * maintain a set of unit tests * maintain a set of integration tests (run with a production cloud) * maintain development automation tools using [nox](https://github.com/theacodes/nox) that can easily: * format the code using [yapf](https://github.com/google/yapf) and [ruff](https://github.com/astral-sh/ruff) * run linters to find subtle/potential issues with maintainability * run the test suite on multiple Python versions using [pytest](https://github.com/pytest-dev/pytest) * maintain Continuous Integration (by using GitHub Actions) that: * runs all sorts of linters * checks if the Python distribution can be built * runs all tests on a matrix of supported versions of Python (including PyPy) and 3 operating systems (Linux, Mac OS X, and Windows) * checks if the documentation can be built properly * maintain other Continuous Integration tools (coverage tracker) ## Versioning This package's versions adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) and the versions are established by reading git tags, i.e. no code or manifest file changes are required when working on PRs. ## Changelog Each PR needs to have at least one changelog (aka news) item added. This is done by creating files in `changelog.d`. `towncrier` is used for compiling these files into [CHANGELOG.md](CHANGELOG.md). There are several types of changes (news): 1. fixed 2. changed 3. added 4. deprecated 5. removed 6. infrastructure 7. doc The `changelog.d` file name convention is: 1. If the PR closes a github issue: `{issue_number}.{type}.md` e.g. `157.fixed.md`. Note that the change description still has to be complete, linking an issue is just there for convenience, a change like `fixed #157` will not be accepted. 2. If the PR is not related to a github issue: `+{unique_string}.{type}.md` e.g. `+foobar.fixed.md`. These files can either be created manually, or using `towncrier` e.g. towncrier create -c 'Add proper changelog example to CONTRIBUTING guide' 157.added.md `towncrier create` also takes care of duplicates automatically (if there is more than 1 news fragment of one type for a given github issue). ## Developer Info You'll need to have [nox](https://github.com/theacodes/nox) and [pdm](https://pdm-project.org/) installed: * `pip install nox pdm` With `nox`, you can run different sessions (default are `lint` and `test`): * `format` -> Format the code. * `lint` -> Run linters. * `test` (`test-3.7`, `test-3.8`, `test-3.9`, `test-3.10`, `test-3.11`) -> Run test suite. * `cover` -> Perform coverage analysis. * `build` -> Build the distribution. * `generate_dockerfile` -> generate dockerfile * `docker_test` -> run integration tests against a docker image * `build_and_test_docker` -> build a docker image and integration tests against it * `doc` -> Build the documentation. * `doc_cover` -> Perform coverage analysis for the documentation. For example: ```bash $ nox -s format nox > Running session format nox > Creating virtual environment (virtualenv) using python3.11 in .nox/format ... $ nox -s format nox > Running session format nox > Re-using existing virtual environment at .nox/format. ... $ nox --no-venv -s format nox > Running session format ... ``` Sessions `test` ,`unit`, and `integration` can run on many Python versions, 3.7-3.11 by default. Sessions other than that use the last given Python version, 3.11 by default. You can change it: ```bash export NOX_PYTHONS=3.9,3.10 ``` With the above setting, session `test` will run on Python 3.9 and 3.10, and all other sessions on Python 3.10. Given Python interpreters should be installed in the operating system or via [pyenv](https://github.com/pyenv/pyenv). ## Managing dependencies We use [pdm](https://pdm-project.org/) for managing dependencies and developing locally. If you want to change any of the project requirements (or requirement bounds) in `pyproject.toml`, make sure that `pdm.lock` file reflects those changes by using `pdm add`, `pdm update` or other commands - see [documentation](https://pdm-project.org/latest/). You can verify that lock file is up to date by running the linter. ## Linting To run all available linters: ```bash nox -s lint ``` ## Testing To run all tests on every available Python version: ```bash nox -s test ``` To run all tests on a specific version: ```bash nox -s test-3.11 ``` To run just unit tests: ```bash nox -s unit-3.11 ``` To run just integration tests: ```bash export B2_TEST_APPLICATION_KEY=your_app_key export B2_TEST_APPLICATION_KEY_ID=your_app_key_id nox -s integration-3.11 ``` ## Documentation To build the documentation and watch for changes (including the source code): ```bash nox -s doc ``` To just build the documentation: ```bash nox --non-interactive -s doc ``` B2_Command_Line_Tool-3.19.1/Dockerfile.template000066400000000000000000000017051461201031300212500ustar00rootroot00000000000000FROM python:${python_version}-slim as builder RUN apt-get update -y && apt-get install git patchelf -y && pip install -U pdm WORKDIR /b2 COPY ./b2 /b2/b2 COPY pyproject.toml pdm.lock LICENSE README.md /b2/ ENV PDM_BUILD_SCM_VERSION=${version} RUN pdm install --prod --group license RUN pdm run b2 license --dump --with-packages # Run pdm in PEP 582 mode, install packaged to __pypackages__, not virtualenv RUN rm -r .venv && mkdir __pypackages__ && pdm install --prod --group full --no-editable FROM python:${python_version}-slim LABEL vendor=${vendor} LABEL name="${name}" LABEL description="${description}" LABEL version="${version}" LABEL url="${url}" LABEL vcs-url="${vcs_url}" LABEL vcs-ref="${vcs_ref}" LABEL build-date-iso8601="${build_date}" ENV PYTHONPATH=/opt/b2 COPY --from=builder /b2/__pypackages__/${python_version}/lib /opt/b2 COPY --from=builder /b2/__pypackages__/${python_version}/bin/* /bin/ WORKDIR /root ENTRYPOINT ["b2"] CMD ["--help"] B2_Command_Line_Tool-3.19.1/LICENSE000066400000000000000000000025061461201031300164510ustar00rootroot00000000000000Backblaze wants developers and organization to copy and re-use our code examples, so we make the samples available by several different licenses. One option is the MIT license (below). Other options are available here: https://www.backblaze.com/using_b2_code.html The MIT License (MIT) Copyright (c) 2015 Backblaze Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. B2_Command_Line_Tool-3.19.1/README.md000066400000000000000000000273731461201031300167340ustar00rootroot00000000000000# B2 Command Line Tool [![Continuous Integration](https://github.com/Backblaze/B2_Command_Line_Tool/actions/workflows/ci.yml/badge.svg)](https://github.com/Backblaze/B2_Command_Line_Tool/actions/workflows/ci.yml) [![License](https://img.shields.io/pypi/l/b2.svg?label=License)](https://pypi.python.org/pypi/b2) [![python versions](https://img.shields.io/pypi/pyversions/b2.svg?label=python%20versions)](https://pypi.python.org/pypi/b2) [![PyPI version](https://img.shields.io/pypi/v/b2.svg?label=PyPI%20version)](https://pypi.python.org/pypi/b2) [![Docs](https://readthedocs.org/projects/b2-command-line-tool/badge/?version=master)](https://b2-command-line-tool.readthedocs.io/en/master/?badge=master) The command-line tool that gives easy access to all of the capabilities of B2 Cloud Storage. This program provides command-line access to the B2 service. ## Documentation The latest documentation is available on [Read the Docs](https://b2-command-line-tool.readthedocs.io/). ## Installation For detailed instructions on how to install the command line tool see our [quick start guide](https://www.backblaze.com/b2/docs/quick_command_line.html). ### Homebrew [Homebrew](https://brew.sh/) is widely used in the Mac community, particularly amongst developers. We recommend using the [B2 CLI Homebrew](https://formulae.brew.sh/formula/b2-tools) formula as the quickest setup method for Mac users: ```bash brew install b2-tools ``` ### Binaries Stand-alone binaries are available for Linux and Windows; this is the most straightforward way to use the command-line tool and is sufficient in most use cases. The latest versions are available for download from the [Releases page](https://github.com/Backblaze/B2_Command_Line_Tool/releases). ### Python Package Index You can also install it in your Python environment ([virtualenv](https://pypi.org/project/virtualenv/) is recommended) from PyPI with: ```bash pip install b2[full] ``` The extra dependencies improve debugging experience and, potentially, performance of `b2` CLI, but are not strictly required. You can install the `b2` without them: ```bash pip install b2 ``` ### Docker For a truly platform independent solution, use the official docker image: ```bash docker run backblazeit/b2:latest ... ``` See examples in [Usage/Docker image](#docker-image) ### Installing from source Not recommended, unless you want to check if a current pre-release code solves a bug affecting you. ```bash pip install git+https://github.com/Backblaze/B2_Command_Line_Tool.git ``` If you wish to contribute to or otherwise modify source code, please see our [contributing guidelines](CONTRIBUTING.md). ## Usage ```bash b2 authorize-account [-h] [applicationKeyId] [applicationKey] b2 cancel-all-unfinished-large-files [-h] bucketName b2 cancel-large-file [-h] fileId b2 clear-account [-h] b2 copy-file-by-id [-h] [--fetch-metadata] [--content-type CONTENTTYPE] [--range RANGE] [--info INFO | --no-info] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--source-server-side-encryption {SSE-C}] [--source-server-side-encryption-algorithm {AES256}] [--file-retention-mode {compliance,governance}] [--retain-until TIMESTAMP] [--legal-hold {on,off}] sourceFileId destinationBucketName b2FileName b2 create-bucket [-h] [--bucket-info BUCKETINFO] [--cors-rules CORSRULES] [--file-lock-enabled] [--replication REPLICATION] [--default-server-side-encryption {SSE-B2,none}] [--default-server-side-encryption-algorithm {AES256}] [--lifecycle-rule LIFECYCLERULES | --lifecycle-rules LIFECYCLERULES] bucketName {allPublic,allPrivate} b2 create-key [-h] [--bucket BUCKET] [--name-prefix NAMEPREFIX] [--duration DURATION] [--all-capabilities] keyName [capabilities] b2 delete-bucket [-h] bucketName b2 delete-file-version [-h] [--bypass-governance] [fileName] fileId b2 delete-key [-h] applicationKeyId b2 download-file [-h] [--threads THREADS] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] [--no-progress] [--source-server-side-encryption {SSE-C}] [--source-server-side-encryption-algorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] B2_URI localFileName b2 cat [-h] [--no-progress] [--source-server-side-encryption {SSE-C}] [--source-server-side-encryption-algorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] B2_URI b2 get-account-info [-h] b2 get-bucket [-h] [--show-size] bucketName b2 file-info [-h] B2_URI b2 get-download-auth [-h] [--prefix PREFIX] [--duration DURATION] bucketName b2 get-download-url-with-auth [-h] [--duration DURATION] bucketName fileName b2 hide-file [-h] bucketName fileName b2 list-buckets [-h] [--json] b2 list-keys [-h] [--long] b2 list-parts [-h] largeFileId b2 list-unfinished-large-files [-h] bucketName b2 ls [-h] [--long] [--json] [--replication] [--versions] [-r] [--with-wildcard] bucketName [folderName] b2 rm [-h] [--dry-run] [--queue-size QUEUESIZE] [--no-progress] [--fail-fast] [--threads THREADS] [--versions] [-r] [--with-wildcard] bucketName [folderName] b2 get-url [-h] B2_URI b2 sync [-h] [--no-progress] [--dry-run] [--allow-empty-source] [--exclude-all-symlinks] [--sync-threads SYNCTHREADS] [--download-threads DOWNLOADTHREADS] [--upload-threads UPLOADTHREADS] [--compare-versions {none,modTime,size}] [--compare-threshold MILLIS] [--exclude-regex REGEX] [--include-regex REGEX] [--exclude-dir-regex REGEX] [--exclude-if-modified-after TIMESTAMP] [--threads THREADS] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--source-server-side-encryption {SSE-C}] [--source-server-side-encryption-algorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] [--incremental-mode] [--skip-newer | --replace-newer] [--delete | --keep-days DAYS] source destination b2 update-bucket [-h] [--bucket-info BUCKETINFO] [--cors-rules CORSRULES] [--default-retention-mode {compliance,governance,none}] [--default-retention-period period] [--replication REPLICATION] [--file-lock-enabled] [--default-server-side-encryption {SSE-B2,none}] [--default-server-side-encryption-algorithm {AES256}] [--lifecycle-rule LIFECYCLERULES | --lifecycle-rules LIFECYCLERULES] bucketName [{allPublic,allPrivate}] b2 upload-file [-h] [--content-type CONTENTTYPE] [--sha1 SHA1] [--cache-control CACHE_CONTROL] [--info INFO] [--custom-upload-timestamp CUSTOM_UPLOAD_TIMESTAMP] [--min-part-size MINPARTSIZE] [--threads THREADS] [--no-progress] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--legal-hold {on,off}] [--file-retention-mode {compliance,governance}] [--retain-until TIMESTAMP] [--incremental-mode] bucketName localFilePath b2FileName b2 upload-unbound-stream [-h] [--part-size PARTSIZE] [--unused-buffer-timeout-seconds UNUSEDBUFFERTIMEOUTSECONDS] [--content-type CONTENTTYPE] [--sha1 SHA1] [--cache-control CACHE_CONTROL] [--info INFO] [--custom-upload-timestamp CUSTOM_UPLOAD_TIMESTAMP] [--min-part-size MINPARTSIZE] [--threads THREADS] [--no-progress] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--legal-hold {on,off}] [--file-retention-mode {compliance,governance}] [--retain-until TIMESTAMP] bucketName localFilePath b2FileName b2 update-file-legal-hold [-h] [fileName] fileId {on,off} b2 update-file-retention [-h] [--retain-until TIMESTAMP] [--bypass-governance] [fileName] fileId {governance,compliance,none} b2 replication-setup [-h] [--destination-profile DESTINATION_PROFILE] [--name NAME] [--priority PRIORITY] [--file-name-prefix PREFIX] [--include-existing-files] SOURCE_BUCKET_NAME DESTINATION_BUCKET_NAME b2 replication-delete [-h] SOURCE_BUCKET_NAME REPLICATION_RULE_NAME b2 replication-pause [-h] SOURCE_BUCKET_NAME REPLICATION_RULE_NAME b2 replication-unpause [-h] SOURCE_BUCKET_NAME REPLICATION_RULE_NAME b2 replication-status [-h] [--rule REPLICATION_RULE_NAME] [--destination-profile DESTINATION_PROFILE] [--dont-scan-destination] [--output-format {console,json,csv}] [--no-progress] [--columns COLUMN ONE,COLUMN TWO] SOURCE_BUCKET_NAME b2 version [-h] [--short] b2 license [-h] b2 install-autocomplete [-h] [--shell {bash}] ``` The environment variable `B2_ACCOUNT_INFO` specifies the SQLite file to use for caching authentication information. The default file to use is: `~/.b2_account_info`. To get more details on a specific command use `b2 --help`. When authorizing with application keys, this tool requires that the key have the `listBuckets` capability so that it can take the bucket names you provide on the command line and translate them into bucket IDs for the B2 Storage service. Each different command may required additional capabilities. You can find the details for each command in the help for that command. ### Docker image #### Authorization User can either authorize on each command (`list-buckets` is just a example here) ```bash B2_APPLICATION_KEY= B2_APPLICATION_KEY_ID= docker run --rm -e B2_APPLICATION_KEY -e B2_APPLICATION_KEY_ID backblazeit/b2:latest list-buckets ``` or authorize once and keep the credentials persisted: ```bash docker run --rm -it -v b2:/root backblazeit/b2:latest authorize-account docker run --rm -v b2:/root backblazeit/b2:latest list-buckets # remember to include `-v` - authorization details are there ``` #### Downloading and uploading When uploading a single file, data can be passed to the container via a pipe: ```bash cat source_file.txt | docker run -i --rm -v b2:/root backblazeit/b2:latest upload-unbound-stream bucket_name - target_file_name ``` or by mounting local files in the docker container: ```bash docker run --rm -v b2:/root -v /home/user/path/to/data:/data backblazeit/b2:latest upload-file bucket_name /data/source_file.txt target_file_name ``` ## Versions When you start working with `b2`, you might notice that more than one script is available to you. This is by design - we use the `ApiVer` methodology to provide all the commands to all the versions while also providing all the bugfixes to all the old versions. If you use the `b2` command, you're working with the latest stable version. It provides all the bells and whistles, latest features, and the best performance. While it's a great version to work with, if you're willing to write a reliable, long-running script, you might find out that after some time it will break. New commands will appear, older will deprecate and be removed, parameters will change. Backblaze service evolves and the `b2` CLI evolves with it. However, now you have a way around this problem. Instead of using the `b2` command, you can use a version-bound interface e.g.: `b2v3`. This command will always provide the same interface that the `ApiVer` version `3` provided. Even if the `b2` command goes into the `ApiVer` version `4`, `6` or even `10` with some major changes, `b2v3` will still provide the same interface, same commands, and same parameters. Over time, it might get slower as we may need to emulate some older behaviors, but we'll ensure that it won't break. ## Contrib ### Detailed logs Verbose logs to stdout can be enabled with the `--verbose` flag. A hidden flag `--debug-logs` can be used to enable logging to a `b2_cli.log` file (with log rotation at midnight) in current working directory. Please pay attention not to launch the tool from the directory that you are syncing, or the logs will get synced to the remote server (unless that is really what you want to achieve). For advanced users, a hidden option `--log-config ` can be used to enable logging in a user-defined format and verbosity. Check out the [example log configuration](contrib/debug_logs.ini). ## Release History Please refer to the [changelog](CHANGELOG.md). ## Developer Info Please see our [contributing guidelines](CONTRIBUTING.md). B2_Command_Line_Tool-3.19.1/README.release.md000066400000000000000000000004511461201031300203370ustar00rootroot00000000000000# Release Process - Run `nox -s make_release_commit -- X.Y.Z` where `X.Y.Z` is the version you're releasing - Copy the main usage string (from `b2 --help`) to `README.md`. Handy command for consistent format: `COLUMNS=4000 b2 --help | awk '/^usages:/ {p=1; next} p {sub(/^ */, "", $0); print}'` B2_Command_Line_Tool-3.19.1/b2.spec.template000066400000000000000000000021501461201031300204300ustar00rootroot00000000000000# -*- mode: python ; coding: utf-8 -*- from PyInstaller.utils.hooks import collect_data_files, copy_metadata block_cipher = None # Data from "python-dateutil" is added because of # https://github.com/Backblaze/B2_Command_Line_Tool/issues/689 datas = copy_metadata('b2') + collect_data_files('dateutil') a = Analysis(['b2/_internal/${VERSION}/__main__.py'], pathex=['.'], binaries=[], datas=datas, hiddenimports=['pkg_resources.py2_warn'], hookspath=['pyinstaller-hooks'], runtime_hooks=[], win_no_prefer_redirects=False, win_private_assemblies=False, cipher=block_cipher, noarchive=False) pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) exe = EXE(pyz, a.scripts, a.binaries, a.zipfiles, a.datas, [], name='${NAME}', debug=False, bootloader_ignore_signals=False, strip=False, upx=True, upx_exclude=[], runtime_tmpdir=None, console=True) B2_Command_Line_Tool-3.19.1/b2/000077500000000000000000000000001461201031300157445ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/LICENSE000077700000000000000000000000001461201031300201642../LICENSEustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/__init__.py000066400000000000000000000010521461201031300200530ustar00rootroot00000000000000###################################################################### # # File: b2/__init__.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # Set default logging handler to avoid "No handler found" warnings. import logging # noqa logging.getLogger(__name__).addHandler(logging.NullHandler()) import b2._internal.version # noqa: E402 __version__ = b2._internal.version.VERSION assert __version__ # PEP-0396 B2_Command_Line_Tool-3.19.1/b2/_internal/000077500000000000000000000000001461201031300177175ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/_internal/__init__.py000066400000000000000000000004431461201031300220310ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### B2_Command_Line_Tool-3.19.1/b2/_internal/_b2v4/000077500000000000000000000000001461201031300206335ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/_internal/_b2v4/__init__.py000066400000000000000000000005731461201031300227510ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_b2v4/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # Note: importing console_tool in any shape or form in here will break sys.argv. B2_Command_Line_Tool-3.19.1/b2/_internal/_b2v4/__main__.py000066400000000000000000000005371461201031300227320ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_b2v4/__main__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from b2._internal._b2v4.registry import main main() B2_Command_Line_Tool-3.19.1/b2/_internal/_b2v4/registry.py000066400000000000000000000040271461201031300230600ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_b2v4/registry.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # ruff: noqa: F405 from b2._internal.console_tool import * # noqa B2.register_subcommand(AuthorizeAccount) B2.register_subcommand(CancelAllUnfinishedLargeFiles) B2.register_subcommand(CancelLargeFile) B2.register_subcommand(ClearAccount) B2.register_subcommand(CopyFileById) B2.register_subcommand(CreateBucket) B2.register_subcommand(CreateKey) B2.register_subcommand(DeleteBucket) B2.register_subcommand(DeleteFileVersion) B2.register_subcommand(DeleteKey) B2.register_subcommand(DownloadFile) B2.register_subcommand(DownloadFileById) B2.register_subcommand(DownloadFileByName) B2.register_subcommand(Cat) B2.register_subcommand(GetAccountInfo) B2.register_subcommand(GetBucket) B2.register_subcommand(FileInfo) B2.register_subcommand(GetFileInfo) B2.register_subcommand(GetDownloadAuth) B2.register_subcommand(GetDownloadUrlWithAuth) B2.register_subcommand(HideFile) B2.register_subcommand(ListBuckets) B2.register_subcommand(ListKeys) B2.register_subcommand(ListParts) B2.register_subcommand(ListUnfinishedLargeFiles) B2.register_subcommand(Ls) B2.register_subcommand(Rm) B2.register_subcommand(GetUrl) B2.register_subcommand(MakeUrl) B2.register_subcommand(MakeFriendlyUrl) B2.register_subcommand(Sync) B2.register_subcommand(UpdateBucket) B2.register_subcommand(UploadFile) B2.register_subcommand(UploadUnboundStream) B2.register_subcommand(UpdateFileLegalHold) B2.register_subcommand(UpdateFileRetention) B2.register_subcommand(ReplicationSetup) B2.register_subcommand(ReplicationDelete) B2.register_subcommand(ReplicationPause) B2.register_subcommand(ReplicationUnpause) B2.register_subcommand(ReplicationStatus) B2.register_subcommand(Version) B2.register_subcommand(License) B2.register_subcommand(InstallAutocomplete) B2.register_subcommand(NotificationRules) B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/000077500000000000000000000000001461201031300206255ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/__init__.py000066400000000000000000000006461461201031300227440ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """ _cli package contains internals of the command-line interface to the B2. It is not intended to be used as a library. """ B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/arg_parser_types.py000066400000000000000000000040641461201031300245540ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/arg_parser_types.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import argparse import functools import re import arrow from b2sdk.v2 import RetentionPeriod _arrow_version = tuple(int(p) for p in arrow.__version__.split(".")) def parse_comma_separated_list(s): """ Parse comma-separated list. """ return [word.strip() for word in s.split(",")] def parse_millis_from_float_timestamp(s): """ Parse timestamp, e.g. 1367900664 or 1367900664.152 """ parsed = arrow.get(float(s)) if _arrow_version < (1, 0, 0): return int(parsed.format("XSSS")) else: return int(parsed.format("x")[:13]) def parse_range(s): """ Parse optional integer range """ bytes_range = None if s is not None: bytes_range = s.split(',') if len(bytes_range) != 2: raise argparse.ArgumentTypeError('the range must have 2 values: start,end') bytes_range = ( int(bytes_range[0]), int(bytes_range[1]), ) return bytes_range def parse_default_retention_period(s): unit_part = '(' + ')|('.join(RetentionPeriod.KNOWN_UNITS) + ')' m = re.match(r'^(?P\d+) (?P%s)$' % (unit_part), s) if not m: raise argparse.ArgumentTypeError( 'default retention period must be in the form of "X days|years "' ) return RetentionPeriod(**{m.group('unit'): int(m.group('duration'))}) def wrap_with_argument_type_error(func, translator=str, exc_type=ValueError): """ Wrap function that may raise an exception into a function that raises ArgumentTypeError error. """ @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except exc_type as e: raise argparse.ArgumentTypeError(translator(e)) return wrapper B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/argcompleters.py000066400000000000000000000064211461201031300240510ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/argcompleters.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import itertools # We import all the necessary modules lazily in completers in order # to avoid upfront cost of the imports when argcompleter is used for # autocompletions. from itertools import islice def bucket_name_completer(prefix, parsed_args, **kwargs): from b2sdk.v2 import unprintable_to_hex from b2._internal._cli.b2api import _get_b2api_for_profile api = _get_b2api_for_profile(getattr(parsed_args, 'profile', None)) res = [ unprintable_to_hex(bucket_name_alias) for bucket_name_alias in itertools.chain.from_iterable( (bucket.name, f"b2://{bucket.name}") for bucket in api.list_buckets(use_cache=True) ) ] return res def file_name_completer(prefix, parsed_args, **kwargs): """ Completes file names in a bucket. To limit delay & cost only lists files returned from by single call to b2_list_file_names """ from b2sdk.v2 import LIST_FILE_NAMES_MAX_LIMIT, unprintable_to_hex from b2._internal._cli.b2api import _get_b2api_for_profile api = _get_b2api_for_profile(parsed_args.profile) bucket = api.get_bucket_by_name(parsed_args.bucketName) file_versions = bucket.ls( getattr(parsed_args, 'folderName', None) or '', latest_only=True, recursive=False, fetch_count=LIST_FILE_NAMES_MAX_LIMIT, ) return [ unprintable_to_hex(folder_name or file_version.file_name) for file_version, folder_name in islice(file_versions, LIST_FILE_NAMES_MAX_LIMIT) ] def b2uri_file_completer(prefix: str, parsed_args, **kwargs): """ Complete B2 URI pointing to a file-like object in a bucket. """ from b2sdk.v2 import LIST_FILE_NAMES_MAX_LIMIT, unprintable_to_hex from b2._internal._cli.b2api import _get_b2api_for_profile from b2._internal._utils.python_compat import removeprefix from b2._internal._utils.uri import parse_b2_uri api = _get_b2api_for_profile(getattr(parsed_args, 'profile', None)) if prefix.startswith('b2://'): prefix_without_scheme = removeprefix(prefix, 'b2://') if '/' not in prefix_without_scheme: return [ f"b2://{unprintable_to_hex(bucket.name)}/" for bucket in api.list_buckets(use_cache=True) ] b2_uri = parse_b2_uri(prefix) bucket = api.get_bucket_by_name(b2_uri.bucket_name) file_versions = bucket.ls( f"{b2_uri.path}*", latest_only=True, recursive=True, fetch_count=LIST_FILE_NAMES_MAX_LIMIT, with_wildcard=True, ) return [ unprintable_to_hex(f"b2://{bucket.name}/{file_version.file_name}") for file_version, folder_name in islice(file_versions, LIST_FILE_NAMES_MAX_LIMIT) if file_version ] elif prefix.startswith('b2id://'): # listing all files from all buckets is unreasonably expensive return ["b2id://"] else: return [ "b2://", "b2id://", ] B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/autocomplete_cache.py000066400000000000000000000122161461201031300250250ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/autocomplete_cache.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import abc import argparse import itertools import os import pathlib import pickle from typing import Callable import argcomplete import platformdirs from b2._internal.arg_parser import DeprecatedActionMarker from b2._internal.version import VERSION def identity(x): return x class StateTracker(abc.ABC): @abc.abstractmethod def current_state_identifier(self) -> str: raise NotImplementedError() class PickleStore(abc.ABC): @abc.abstractmethod def get_pickle(self, identifier: str) -> bytes | None: raise NotImplementedError() @abc.abstractmethod def set_pickle(self, identifier: str, data: bytes) -> None: raise NotImplementedError() class VersionTracker(StateTracker): def current_state_identifier(self) -> str: return VERSION class HomeCachePickleStore(PickleStore): _dir: pathlib.Path def __init__(self, dir_path: pathlib.Path | None = None) -> None: self._dir = dir_path def _cache_dir(self) -> pathlib.Path: if not self._dir: self._dir = pathlib.Path( platformdirs.user_cache_dir(appname='b2', appauthor='backblaze') ) / 'autocomplete' return self._dir def _fname(self, identifier: str) -> str: return f"b2-autocomplete-cache-{identifier}.pickle" def get_pickle(self, identifier: str) -> bytes | None: path = self._cache_dir() / self._fname(identifier) if path.exists(): with open(path, 'rb') as f: return f.read() def set_pickle(self, identifier: str, data: bytes) -> None: """Sets the pickle for identifier if it doesn't exist. When a new pickle is added, old ones are removed.""" dir_path = self._cache_dir() dir_path.mkdir(parents=True, exist_ok=True) path = dir_path / self._fname(identifier) for file in dir_path.glob('b2-autocomplete-cache-*.pickle'): file.unlink() with open(path, 'wb') as f: f.write(data) class AutocompleteCache: _tracker: StateTracker _store: PickleStore _unpickle: Callable[[bytes], argparse.ArgumentParser] def __init__( self, tracker: StateTracker, store: PickleStore, unpickle: Callable[[bytes], argparse.ArgumentParser] | None = None ): self._tracker = tracker self._store = store self._unpickle = unpickle or pickle.loads def _is_autocomplete_run(self) -> bool: return '_ARGCOMPLETE' in os.environ def autocomplete_from_cache( self, uncached_args: dict | None = None, raise_exc: bool = False ) -> None: if not self._is_autocomplete_run(): return try: identifier = self._tracker.current_state_identifier() pickle_data = self._store.get_pickle(identifier) if pickle_data: parser = self._unpickle(pickle_data) argcomplete.autocomplete(parser, **(uncached_args or {})) except Exception: if raise_exc: raise # Autocomplete from cache failed but maybe we can autocomplete from scratch return def _clean_parser(self, parser: argparse.ArgumentParser) -> None: parser.register('type', None, identity) def _get_deprecated_actions(actions): return [action for action in actions if isinstance(action, DeprecatedActionMarker)] for action in _get_deprecated_actions(parser._actions): parser._actions.remove(action) for option_string in action.option_strings: del parser._option_string_actions[option_string] for action in parser._actions: if action.type not in [str, int]: action.type = None for group in itertools.chain(parser._action_groups, parser._mutually_exclusive_groups): for action in _get_deprecated_actions(group._group_actions): group._group_actions.remove(action) for key in parser._defaults: group.set_defaults(**{key: None}) parser.description = None if parser._subparsers: for group_action in parser._subparsers._group_actions: for parser in group_action.choices.values(): self._clean_parser(parser) def cache_and_autocomplete( self, parser: argparse.ArgumentParser, uncached_args: dict | None = None ) -> None: if not self._is_autocomplete_run(): return try: identifier = self._tracker.current_state_identifier() self._clean_parser(parser) self._store.set_pickle(identifier, pickle.dumps(parser)) finally: argcomplete.autocomplete(parser, **(uncached_args or {})) AUTOCOMPLETE = AutocompleteCache(tracker=VersionTracker(), store=HomeCachePickleStore()) B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/autocomplete_install.py000066400000000000000000000265451461201031300254420ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/autocomplete_install.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import abc import io import logging import os import re import shlex import shutil import signal import subprocess import textwrap from datetime import datetime from importlib.util import find_spec from pathlib import Path from shlex import quote import argcomplete from class_registry import ClassRegistry, RegistryKeyError from b2._internal._utils.python_compat import shlex_join logger = logging.getLogger(__name__) SHELL_REGISTRY = ClassRegistry() def autocomplete_install(prog: str, shell: str = 'bash') -> None: """Install autocomplete for the given program.""" try: autocomplete_installer = SHELL_REGISTRY.get(shell, prog=prog) except RegistryKeyError: raise AutocompleteInstallError(f"Unsupported shell: {shell}") autocomplete_installer.install() logger.info("Autocomplete for %s has been enabled.", prog) class ShellAutocompleteInstaller(abc.ABC): shell_exec: str def __init__(self, prog: str): self.prog = prog def install(self) -> None: """Install autocomplete for the given program.""" script_path = self.create_script() if not self.is_enabled(): logger.info( "%s completion doesn't seem to be autoloaded from %s.", self.shell_exec, script_path.parent ) try: self.force_enable(script_path) except NotImplementedError as e: logging.warning( "Autocomplete wasn't automatically picked up and cannot force enable it: %s", e ) if not self.is_enabled(): logger.error("Autocomplete is still not enabled.") raise AutocompleteInstallError(f"Autocomplete for {self.prog} install failed.") def create_script(self) -> Path: """Create autocomplete for the given program.""" shellcode = self.get_shellcode() script_path = self.get_script_path() logger.info("Creating autocompletion script under %s", script_path) script_path.parent.mkdir(exist_ok=True, parents=True, mode=0o755) script_path.write_text(shellcode) return script_path @abc.abstractmethod def force_enable(self, completion_script: Path) -> None: """ Enable autocomplete for the given program. Used as fallback if shell doesn't automatically enable autocomplete. """ raise NotImplementedError def get_shellcode(self) -> str: """Get autocomplete shellcode for the given program.""" return argcomplete.shellcode([self.prog], shell=self.shell_exec) @abc.abstractmethod def get_script_path(self) -> Path: """Get autocomplete script path for the given program.""" raise NotImplementedError def program_in_path(self) -> bool: """Check if the given program is in PATH.""" return _silent_success_run([self.shell_exec, '-c', self.prog]) @abc.abstractmethod def is_enabled(self) -> bool: """Check if autocompletion is enabled.""" raise NotImplementedError class BashLikeAutocompleteInstaller(ShellAutocompleteInstaller): shell_exec: str rc_file_path: str def get_rc_path(self) -> Path: return Path(self.rc_file_path).expanduser() def force_enable(self, completion_script: Path) -> None: """Enable autocomplete for the given program, common logic.""" rc_path = self.get_rc_path() if rc_path.exists() and rc_path.read_text().strip(): bck_path = rc_path.with_suffix(f".{datetime.now():%Y-%m-%dT%H-%M-%S}.bak") logger.warning("Backing up %s to %s", rc_path, bck_path) try: shutil.copyfile(rc_path, bck_path) except OSError as e: raise AutocompleteInstallError( f"Failed to backup {rc_path} under {bck_path}" ) from e logger.warning("Explicitly adding %s to %s", completion_script, rc_path) add_or_update_shell_section( rc_path, f"{self.prog} autocomplete", self.prog, self.get_rc_section(completion_script) ) def get_rc_section(self, completion_script: Path) -> str: return f"source {quote(str(completion_script))}" def get_script_path(self) -> Path: """Get autocomplete script path for the given program, common logic.""" script_dir = Path(f"~/.{self.shell_exec}_completion.d/").expanduser() return script_dir / self.prog def is_enabled(self) -> bool: """Check if autocompletion is enabled.""" return _silent_success_run([self.shell_exec, '-i', '-c', f'complete -p {quote(self.prog)}']) @SHELL_REGISTRY.register('bash') class BashAutocompleteInstaller(BashLikeAutocompleteInstaller): shell_exec = 'bash' rc_file_path = "~/.bashrc" @SHELL_REGISTRY.register('zsh') class ZshAutocompleteInstaller(BashLikeAutocompleteInstaller): shell_exec = 'zsh' rc_file_path = "~/.zshrc" def get_rc_section(self, completion_script: Path) -> str: return textwrap.dedent( f"""\ if [[ -z "$_comps" ]] && [[ -t 0 ]]; then autoload -Uz compinit && compinit -i -D; fi source {quote(str(completion_script))} """ ) def get_script_path(self) -> Path: """Custom get_script_path for Zsh, if the structure differs from the base implementation.""" return Path("~/.zsh/completion/").expanduser() / f"_{self.prog}" def is_enabled(self) -> bool: rc_path = self.get_rc_path() if not rc_path.exists(): # if zshrc is missing `zshrc -i` may hang on creation wizard when emulating tty rc_path.touch(mode=0o750) _silent_success_run_with_pty( [self.shell_exec, '-c', 'autoload -Uz compaudit; echo AUDIT; compaudit'] ) cmd = [self.shell_exec, '-i', '-c', f'[[ -v _comps[{quote(self.prog)}] ]]'] return _silent_success_run_with_tty(cmd) @SHELL_REGISTRY.register('fish') class FishAutocompleteInstaller(ShellAutocompleteInstaller): shell_exec = 'fish' rc_file_path = "~/.config/fish/config.fish" def force_enable(self, completion_script: Path) -> None: raise NotImplementedError("Fish shell doesn't support manual completion enabling.") def get_script_path(self) -> Path: """Get autocomplete script path for the given program, common logic.""" complete_paths = [ Path(p) for p in shlex.split( subprocess.run( [self.shell_exec, '-c', 'echo $fish_complete_path'], timeout=30, text=True, check=True, capture_output=True ).stdout ) ] user_path = Path("~/.config/fish/completions").expanduser() if complete_paths: target_path = user_path if user_path in complete_paths else complete_paths[0] else: logger.warning("$fish_complete_path is empty, falling back to %r", user_path) target_path = user_path return target_path / f"{self.prog}.fish" def is_enabled(self) -> bool: """ Check if autocompletion is enabled. Fish seems to lazy-load completions, hence first we trigger completion. That alone cannot be used, since fish tends to always propose completions (e.g. suggesting similarly named filenames). """ environ = os.environ.copy() environ.setdefault("TERM", "xterm") # TERM has to be set for fish to load completions return _silent_success_run_with_tty( [ self.shell_exec, '-i', '-c', f'string length -q -- (complete -C{quote(f"{self.prog} ")} >/dev/null && complete -c {quote(self.prog)})' ], env=environ, ) def _silent_success_run_with_tty( cmd: list[str], timeout: int = 30, env: dict | None = None ) -> bool: emulate_tty = not os.isatty(0) # is True under GHA or pytest-xdist if emulate_tty and not find_spec('pexpect'): emulate_tty = False logger.warning( "pexpect is needed to check autocomplete installation correctness without tty. " "You can install it via `pip install pexpect`." ) run_func = _silent_success_run_with_pty if emulate_tty else _silent_success_run return run_func(cmd, timeout=timeout, env=env) def _silent_success_run(cmd: list[str], timeout: int = 30, env: dict | None = None) -> bool: p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.DEVNULL, start_new_session=True, # prevents `zsh -i` messing with parent tty under pytest-xdist env=env, ) try: stdout, stderr = p.communicate(timeout=timeout) except subprocess.TimeoutExpired: p.kill() stdout, stderr = p.communicate(timeout=1) logger.warning("Command %r timed out, stdout: %r, stderr: %r", cmd, stdout, stderr) else: logger.log( logging.DEBUG if p.returncode == 0 else logging.WARNING, "Command %r exited with code %r, stdout: %r, stderr: %r", cmd, p.returncode, stdout, stderr ) return p.returncode == 0 def _silent_success_run_with_pty( cmd: list[str], timeout: int = 30, env: dict | None = None ) -> bool: """ Run a command with emulated terminal and return whether it succeeded. """ import pexpect command_str = shlex_join(cmd) child = pexpect.spawn(command_str, timeout=timeout, env=env) output = io.BytesIO() try: child.logfile_read = output child.expect(pexpect.EOF) except pexpect.TIMEOUT: logger.warning("Command %r timed out, output: %r", cmd, output.getvalue()) child.kill(signal.SIGKILL) return False finally: child.close() logger.log( logging.DEBUG if child.exitstatus == 0 else logging.WARNING, "Command %r exited with code %r, output: %r", cmd, child.exitstatus, output.getvalue() ) return child.exitstatus == 0 def add_or_update_shell_section( path: Path, section: str, managed_by: str, content: str, comment_sign="#" ) -> None: """Add or update a section in a file.""" section_start = f"{comment_sign} >>> {section} >>>" section_end = f"{comment_sign} <<< {section} <<<" assert section_end not in content try: file_content = path.read_text() except FileNotFoundError: file_content = "" full_content = f""" {section_start} {comment_sign} This section is managed by {managed_by} . Manual edit may break automated updates. {content} {section_end} """.strip() pattern = re.compile( rf'^{re.escape(section_start)}.*?^{re.escape(section_end)}', flags=re.MULTILINE | re.DOTALL ) if pattern.search(file_content): file_content = pattern.sub(full_content, file_content) else: file_content += f"\n{full_content}\n" path.write_text(file_content) class AutocompleteInstallError(Exception): """Exception raised when autocomplete installation fails.""" SUPPORTED_SHELLS = sorted(SHELL_REGISTRY.keys()) B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/b2api.py000066400000000000000000000035521461201031300222010ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/b2api.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os from typing import Optional from b2sdk.v2 import ( AuthInfoCache, B2Api, B2HttpApiConfig, InMemoryAccountInfo, InMemoryCache, SqliteAccountInfo, ) from b2sdk.v2.exception import MissingAccountData from b2._internal._cli.const import B2_USER_AGENT_APPEND_ENV_VAR def _get_b2api_for_profile( profile: Optional[str] = None, raise_if_does_not_exist: bool = False, **kwargs, ) -> B2Api: if raise_if_does_not_exist: account_info_file = SqliteAccountInfo.get_user_account_info_path(profile=profile) if not os.path.exists(account_info_file): raise MissingAccountData(account_info_file) account_info = SqliteAccountInfo(profile=profile) b2api = B2Api( api_config=_get_b2httpapiconfig(), account_info=account_info, cache=AuthInfoCache(account_info), **kwargs, ) if os.getenv('CI', False) and os.getenv( 'GITHUB_REPOSITORY', '', ).endswith('/B2_Command_Line_Tool'): b2http = b2api.session.raw_api.b2_http b2http.CONNECTION_TIMEOUT = 3 + 6 + 1 b2http.TIMEOUT = 12 b2http.TIMEOUT_FOR_COPY = 24 b2http.TIMEOUT_FOR_UPLOAD = 24 b2http.TRY_COUNT_DATA = 2 b2http.TRY_COUNT_DOWNLOAD = 2 b2http.TRY_COUNT_HEAD = 2 b2http.TRY_COUNT_OTHER = 2 return b2api def _get_inmemory_b2api(**kwargs) -> B2Api: return B2Api(InMemoryAccountInfo(), cache=InMemoryCache(), **kwargs) def _get_b2httpapiconfig(): return B2HttpApiConfig(user_agent_append=os.environ.get(B2_USER_AGENT_APPEND_ENV_VAR),) B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/b2args.py000066400000000000000000000106471461201031300223670ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/b2args.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """ Utility functions for adding b2-specific arguments to an argparse parser. """ import argparse import functools from os import environ from typing import Optional, Tuple from b2._internal._cli.arg_parser_types import wrap_with_argument_type_error from b2._internal._cli.argcompleters import b2uri_file_completer, bucket_name_completer from b2._internal._cli.const import ( B2_APPLICATION_KEY_ENV_VAR, B2_APPLICATION_KEY_ID_ENV_VAR, ) from b2._internal._utils.uri import B2URI, B2URIBase, parse_b2_uri, parse_uri def b2id_or_file_like_b2_uri(value: str) -> B2URIBase: b2_uri = parse_b2_uri(value) if isinstance(b2_uri, B2URI): if b2_uri.is_dir(): raise ValueError( f"B2 URI pointing to a file-like object is required, but {value} was provided" ) return b2_uri return b2_uri def parse_bucket_name(value: str, allow_all_buckets: bool = False) -> str: uri = parse_uri(value, allow_all_buckets=allow_all_buckets) if isinstance(uri, B2URI): if uri.path: raise ValueError( f"Expected a bucket name, but {value!r} was provided which contains path part: {uri.path!r}" ) return uri.bucket_name return str(value) B2ID_OR_B2_URI_ARG_TYPE = wrap_with_argument_type_error(parse_b2_uri) B2ID_OR_B2_URI_OR_ALL_BUCKETS_ARG_TYPE = wrap_with_argument_type_error( functools.partial(parse_b2_uri, allow_all_buckets=True) ) B2ID_OR_FILE_LIKE_B2_URI_ARG_TYPE = wrap_with_argument_type_error(b2id_or_file_like_b2_uri) def add_bucket_name_argument( parser: argparse.ArgumentParser, name="bucketName", help="Target bucket name", nargs=None ): parser.add_argument( name, type=wrap_with_argument_type_error( functools.partial(parse_bucket_name, allow_all_buckets=nargs == "?") ), help=help, nargs=nargs ).completer = bucket_name_completer def add_b2_uri_argument( parser: argparse.ArgumentParser, name="B2_URI", help="B2 URI pointing to a bucket with optional path, e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/", ): """ Add B2 URI as an argument to the parser. B2 URI can point to a bucket optionally with a object name prefix (directory). """ parser.add_argument( name, type=wrap_with_argument_type_error(functools.partial(parse_b2_uri, allow_b2id=False)), help=help, ).completer = b2uri_file_completer def add_b2id_or_b2_uri_argument( parser: argparse.ArgumentParser, name="B2_URI", *, allow_all_buckets: bool = False ): """ Add B2 URI (b2:// or b2id://) as an argument to the parser. B2 URI can point to a bucket optionally with a object name prefix (directory) or a file-like object. If allow_all_buckets is True, the argument will accept B2 URI pointing to all buckets. """ if allow_all_buckets: argument_spec = parser.add_argument( name, type=B2ID_OR_B2_URI_OR_ALL_BUCKETS_ARG_TYPE, default=None, nargs="?", help="B2 URI pointing to a bucket, directory, file or all buckets. " "e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/, b2id://fileId, or b2://", ) else: argument_spec = parser.add_argument( name, type=B2ID_OR_B2_URI_ARG_TYPE, help="B2 URI pointing to a bucket, directory or a file. " "e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/, or b2id://fileId", ) argument_spec.completer = b2uri_file_completer def add_b2id_or_file_like_b2_uri_argument(parser: argparse.ArgumentParser, name="B2_URI"): """ Add a B2 URI pointing to a file as an argument to the parser. """ parser.add_argument( name, type=B2ID_OR_FILE_LIKE_B2_URI_ARG_TYPE, help="B2 URI pointing to a file, e.g. b2://yourBucket/file.txt or b2id://fileId", ).completer = b2uri_file_completer def get_keyid_and_key_from_env_vars() -> Tuple[Optional[str], Optional[str]]: return environ.get(B2_APPLICATION_KEY_ID_ENV_VAR), environ.get(B2_APPLICATION_KEY_ENV_VAR) B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/const.py000066400000000000000000000020051461201031300223220ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/const.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # Optional Env variable to use for getting account info while authorizing B2_APPLICATION_KEY_ID_ENV_VAR = 'B2_APPLICATION_KEY_ID' B2_APPLICATION_KEY_ENV_VAR = 'B2_APPLICATION_KEY' # Optional Env variable to use for adding custom string to the User Agent B2_USER_AGENT_APPEND_ENV_VAR = 'B2_USER_AGENT_APPEND' B2_ENVIRONMENT_ENV_VAR = 'B2_ENVIRONMENT' B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR = 'B2_DESTINATION_SSE_C_KEY_B64' B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR = 'B2_DESTINATION_SSE_C_KEY_ID' B2_SOURCE_SSE_C_KEY_B64_ENV_VAR = 'B2_SOURCE_SSE_C_KEY_B64' # Threads defaults DEFAULT_THREADS = 10 # Constants used in the B2 API CREATE_BUCKET_TYPES = ('allPublic', 'allPrivate') B2_ESCAPE_CONTROL_CHARACTERS = 'B2_ESCAPE_CONTROL_CHARACTERS' B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/obj_dumps.py000066400000000000000000000045571461201031300231740ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/obj_dumps.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import io from b2sdk.v2 import ( unprintable_to_hex, ) _simple_repr_map = { False: "false", None: "null", True: "true", } _simple_repr_map_values = set(_simple_repr_map.values()) | {"yes", "no"} def _yaml_simple_repr(obj): """ Like YAML for simple types, but also escapes control characters for safety. """ if isinstance(obj, (int, float)) and not isinstance(obj, bool): return str(obj) simple_repr = _simple_repr_map.get(obj) if simple_repr: return simple_repr obj_repr = unprintable_to_hex(str(obj)) if isinstance( obj, str ) and (obj == "" or obj_repr.lower() in _simple_repr_map_values or obj_repr.isdigit()): obj_repr = repr(obj) # add quotes to distinguish from numbers and booleans return obj_repr def _id_name_first_key(item): try: return ("id", "name").index(str(item[0]).lower()), item[0], item[1] except ValueError: return 2, item[0], item[1] def _dump(data, indent=0, skip=False, *, output): prefix = " " * indent if isinstance(data, dict): for idx, (key, value) in enumerate(sorted(data.items(), key=_id_name_first_key)): output.write(f"{'' if skip and idx == 0 else prefix}{_yaml_simple_repr(key)}: ") if isinstance(value, (dict, list)): output.write("\n") _dump(value, indent + 2, output=output) else: _dump(value, 0, True, output=output) elif isinstance(data, list): for idx, item in enumerate(data): output.write(f"{'' if skip and idx == 0 else prefix}- ") _dump(item, indent + 2, True, output=output) else: output.write(f"{'' if skip else prefix}{_yaml_simple_repr(data)}\n") def readable_yaml_dump(data, output: io.TextIOBase) -> None: """ Print YAML-like human-readable representation of the data. :param data: The data to be printed. Can be a list, dict, or any basic datatype. :param output: An output stream derived from io.TextIOBase where the data is to be printed. """ _dump(data, output=output) B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/obj_loads.py000066400000000000000000000035111461201031300231330ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/obj_loads.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import argparse import io import json from typing import TypeVar from b2sdk.v2 import get_b2sdk_doc_urls try: import pydantic from pydantic import TypeAdapter, ValidationError except ImportError: pydantic = None def convert_error_to_human_readable(validation_exc: ValidationError) -> str: buf = io.StringIO() for error in validation_exc.errors(): loc = '.'.join(str(loc) for loc in error['loc']) buf.write(f' In field {loc!r} input was `{error["input"]!r}`, error: {error["msg"]}\n') return buf.getvalue() def describe_type(type_) -> str: urls = get_b2sdk_doc_urls(type_) if urls: url_links = ', '.join(f'{name} <{url}>' for name, url in urls.items()) return f'{type_.__name__} ({url_links})' return type_.__name__ T = TypeVar('T') def validated_loads(data: str, expected_type: type[T] | None = None) -> T: if expected_type is not None and pydantic is not None: ta = TypeAdapter(expected_type) try: val = ta.validate_json(data) except ValidationError as e: errors = convert_error_to_human_readable(e) raise argparse.ArgumentTypeError( f'Invalid value inputted, expected {describe_type(expected_type)}, got {data!r}, more detail below:\n{errors}' ) from e else: try: val = json.loads(data) except json.JSONDecodeError as e: raise argparse.ArgumentTypeError(f'{data!r} is not a valid JSON value') from e return val B2_Command_Line_Tool-3.19.1/b2/_internal/_cli/shell.py000066400000000000000000000017211461201031300223070ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_cli/shell.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os import os.path import shutil from typing import Optional def detect_shell() -> Optional[str]: """Detect the shell we are running in.""" shell_var = os.environ.get('SHELL') if shell_var: return os.path.basename(shell_var) return None def resolve_short_call_name(binary_path: str) -> str: """ Resolve the short name of the binary. If binary is in PATH, return only basename, otherwise return a full path. This method is to be used with sys.argv[0] to resolve handy name for the user instead of full path. """ if shutil.which(binary_path) == binary_path: return os.path.basename(binary_path) return binary_path B2_Command_Line_Tool-3.19.1/b2/_internal/_utils/000077500000000000000000000000001461201031300212165ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/_internal/_utils/__init__.py000066400000000000000000000004521461201031300233300ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_utils/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### B2_Command_Line_Tool-3.19.1/b2/_internal/_utils/python_compat.py000066400000000000000000000030731461201031300244570ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_utils/python_compat.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """ Utilities for compatibility with older Python versions. """ import functools import shlex import sys if sys.version_info < (3, 9): def removeprefix(s: str, prefix: str) -> str: return s[len(prefix):] if s.startswith(prefix) else s else: removeprefix = str.removeprefix if sys.version_info < (3, 8): class singledispatchmethod: """ singledispatchmethod backport for Python 3.7. There are no guarantees for its completeness. """ def __init__(self, method): self.dispatcher = functools.singledispatch(method) self.method = method def register(self, cls, method=None): return self.dispatcher.register(cls, func=method) def __get__(self, obj, cls): @functools.wraps(self.method) def method_wrapper(arg, *args, **kwargs): method_desc = self.dispatcher.dispatch(arg.__class__) return method_desc.__get__(obj, cls)(arg, *args, **kwargs) method_wrapper.register = self.register return method_wrapper def shlex_join(split_command): return ' '.join(shlex.quote(arg) for arg in split_command) else: singledispatchmethod = functools.singledispatchmethod shlex_join = shlex.join B2_Command_Line_Tool-3.19.1/b2/_internal/_utils/uri.py000066400000000000000000000164231461201031300223750ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/_utils/uri.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import dataclasses import pathlib import urllib.parse from pathlib import Path from typing import Sequence from b2sdk.v2 import ( B2Api, DownloadVersion, FileVersion, Filter, ) from b2sdk.v2.exception import B2Error from b2._internal._utils.python_compat import removeprefix, singledispatchmethod class B2URIBase: pass @dataclasses.dataclass(frozen=True) class B2URI(B2URIBase): """ B2 URI designating a particular object by name & bucket or "subdirectory" in a bucket. Please note, both files and directories are symbolical concept, not a real one in B2, i.e. there is no such thing as "directory" in B2, but it is possible to mimic it by using object names with non-trailing slashes. To make it possible, it is highly discouraged to use trailing slashes in object names. Please note `path` attribute should exclude prefixing slash, i.e. `path` should be empty string for the root of the bucket. """ bucket_name: str path: str = "" def __str__(self) -> str: return f"b2://{self.bucket_name}/{self.path}" def is_dir(self) -> bool | None: """ Return if the path is a directory. Please note this is symbolical. It is possible for file to have a trailing slash, but it is HIGHLY discouraged, and not supported by B2 CLI. At the same time it is possible for a directory to not have a trailing slash, which is discouraged, but allowed by B2 CLI. This is done to mimic unix-like Path. In practice, this means that `.is_dir() == True` will always be interpreted as "this is a directory", but reverse is not necessary true, and `not uri.is_dir()` should be merely interpreted as "this is a directory or a file". :return: True if the path is a directory, None if it is unknown """ return not self.path or self.path.endswith("/") or None @dataclasses.dataclass(frozen=True) class B2FileIdURI(B2URIBase): """ B2 URI designating a particular file by its id. """ file_id: str def __str__(self) -> str: return f"b2id://{self.file_id}" def parse_uri(uri: str, *, allow_all_buckets: bool = False) -> Path | B2URI | B2FileIdURI: """ Parse URI. :param uri: string to parse :param allow_all_buckets: if True, allow `b2://` without a bucket name to refer to all buckets :return: B2 URI or Path :raises ValueError: if the URI is invalid """ if not uri: raise ValueError("URI cannot be empty") parsed = urllib.parse.urlsplit(uri) if parsed.scheme == "": return pathlib.Path(uri) return _parse_b2_uri(uri, parsed, allow_all_buckets=allow_all_buckets) def parse_b2_uri( uri: str, *, allow_all_buckets: bool = False, allow_b2id: bool = True ) -> B2URI | B2FileIdURI: """ Parse B2 URI. :param uri: string to parse :param allow_all_buckets: if True, allow `b2://` without a bucket name to refer to all buckets :param allow_b2id: if True, allow `b2id://` to refer to a file by its id :return: B2 URI :raises ValueError: if the URI is invalid """ parsed = urllib.parse.urlsplit(uri) return _parse_b2_uri(uri, parsed, allow_all_buckets=allow_all_buckets, allow_b2id=allow_b2id) def _parse_b2_uri( uri, parsed: urllib.parse.SplitResult, *, allow_all_buckets: bool = False, allow_b2id: bool = True ) -> B2URI | B2FileIdURI: if parsed.scheme in ("b2", "b2id"): path = urllib.parse.urlunsplit(parsed._replace(scheme="", netloc="")) if not parsed.netloc: if allow_all_buckets: if path: raise ValueError( f"Invalid B2 URI: all buckets URI doesn't allow non-empty path, but {path!r} was provided" ) return B2URI(bucket_name="") raise ValueError(f"Invalid B2 URI: {uri!r}") elif parsed.password or parsed.username: raise ValueError( "Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI" ) if parsed.scheme == "b2": return B2URI(bucket_name=parsed.netloc, path=removeprefix(path, "/")) elif parsed.scheme == "b2id" and allow_b2id: return B2FileIdURI(file_id=parsed.netloc) else: raise ValueError(f"Unsupported URI scheme: {parsed.scheme!r}") class B2URIAdapter: """ Adapter for using B2URI with B2Api. When this matures enough methods from here should be moved to b2sdk B2Api class. """ def __init__(self, api: B2Api): self.api = api def __getattr__(self, name): return getattr(self.api, name) @singledispatchmethod def download_file_by_uri(self, uri, *args, **kwargs): raise NotImplementedError(f"Unsupported URI type: {type(uri)}") @download_file_by_uri.register def _(self, uri: B2URI, *args, **kwargs): bucket = self.get_bucket_by_name(uri.bucket_name) return bucket.download_file_by_name(uri.path, *args, **kwargs) @download_file_by_uri.register def _(self, uri: B2FileIdURI, *args, **kwargs): return self.download_file_by_id(uri.file_id, *args, **kwargs) @singledispatchmethod def get_file_info_by_uri(self, uri, *args, **kwargs): raise NotImplementedError(f"Unsupported URI type: {type(uri)}") @get_file_info_by_uri.register def _(self, uri: B2URI, *args, **kwargs) -> DownloadVersion: return self.get_file_info_by_name(uri.bucket_name, uri.path, *args, **kwargs) @get_file_info_by_uri.register def _(self, uri: B2FileIdURI, *args, **kwargs) -> FileVersion: return self.get_file_info(uri.file_id, *args, **kwargs) @singledispatchmethod def get_download_url_by_uri(self, uri, *args, **kwargs): raise NotImplementedError(f"Unsupported URI type: {type(uri)}") @get_download_url_by_uri.register def _(self, uri: B2URI, *args, **kwargs) -> str: return self.get_download_url_for_file_name(uri.bucket_name, uri.path, *args, **kwargs) @get_download_url_by_uri.register def _(self, uri: B2FileIdURI, *args, **kwargs) -> str: return self.get_download_url_for_fileid(uri.file_id, *args, **kwargs) @singledispatchmethod def list_file_versions_by_uri(self, uri, *args, **kwargs): raise NotImplementedError(f"Unsupported URI type: {type(uri)}") @list_file_versions_by_uri.register def _(self, uri: B2URI, *args, filters: Sequence[Filter] = (), **kwargs): bucket = self.api.get_bucket_by_name(uri.bucket_name) try: yield from bucket.ls(uri.path, *args, filters=filters, **kwargs) except ValueError as error: # Wrap these errors into B2Error. At the time of writing there's # exactly one – `with_wildcard` being passed without `recursive` option. raise B2Error(error.args[0]) @list_file_versions_by_uri.register def _(self, uri: B2FileIdURI, *args, **kwargs): yield self.get_file_info_by_uri(uri), None B2_Command_Line_Tool-3.19.1/b2/_internal/arg_parser.py000066400000000000000000000153651461201031300224300ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/arg_parser.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import argparse import functools import locale import re import sys import textwrap import unittest.mock from rst2ansi import rst2ansi class B2RawTextHelpFormatter(argparse.RawTextHelpFormatter): """ CLI custom formatter. It removes default "usage: " text and prints usage for all (non-hidden) subcommands. """ def __init__(self, *args, show_all: bool = False, **kwargs): super().__init__(*args, **kwargs) self.show_all = show_all def add_usage(self, usage, actions, groups, prefix=None): if prefix is None: prefix = '' super().add_usage(usage, actions, groups, prefix) def add_argument(self, action): if isinstance(action, argparse._SubParsersAction) and action.help is not argparse.SUPPRESS: usages = [] for choice in self._unique_choice_values(action): deprecated = getattr(choice, 'deprecated', False) if deprecated: if self.show_all: usages.append(f'(DEPRECATED) {choice.format_usage()}') else: usages.append(choice.format_usage()) self.add_text(''.join(usages)) else: super().add_argument(action) @classmethod def _unique_choice_values(cls, action): seen = set() seen_add = seen.add for value in action.choices.values(): if not (value in seen or seen_add(value)): yield value class _HelpAllAction(argparse._HelpAction): """Like argparse._HelpAction but prints help for all subcommands (even deprecated ones).""" def __call__(self, parser, namespace, values, option_string=None): parser.print_help(show_all=True) parser.exit() class B2ArgumentParser(argparse.ArgumentParser): """ CLI custom parser. It fixes indentation of the description, set the custom formatter as a default and use help message in case of error. """ def __init__( self, *args, add_help_all: bool = True, for_docs: bool = False, deprecated: bool = False, **kwargs ): """ :param for_docs: is this parser used for generating docs :param deprecated: is this option deprecated """ self._raw_description = None self._description = None self._for_docs = for_docs self.deprecated = deprecated kwargs.setdefault('formatter_class', B2RawTextHelpFormatter) super().__init__(*args, **kwargs) if add_help_all: self.register('action', 'help_all', _HelpAllAction) self.add_argument( '--help-all', help='show help for all options, including deprecated ones', action='help_all', ) @property def description(self): if self._description is None and self._raw_description is not None: if self._for_docs: self._description = textwrap.dedent(self._raw_description) else: encoding = self._get_encoding() self._description = rst2ansi( self._raw_description.encode(encoding), output_encoding=encoding ) return self._description @description.setter def description(self, value): self._raw_description = value def error(self, message): self.print_help() self.exit(2, f'\n{self.prog}: error: {message}\n') @classmethod def _get_encoding(cls): _, locale_encoding = locale.getdefaultlocale() # Check if the stdout is properly set if sys.stdout.encoding is not None: # Use the stdout encoding return sys.stdout.encoding # Fall back to the locale_encoding if stdout encoding is not set elif locale_encoding is not None: return locale_encoding # locales are improperly configured return 'ascii' def print_help(self, *args, show_all: bool = False, **kwargs): """ Print help message. """ with unittest.mock.patch.object( self, 'formatter_class', functools.partial(B2RawTextHelpFormatter, show_all=show_all) ): super().print_help(*args, **kwargs) SUPPORT_CAMEL_CASE_ARGUMENTS = False def enable_camel_case_arguments(): global SUPPORT_CAMEL_CASE_ARGUMENTS SUPPORT_CAMEL_CASE_ARGUMENTS = True def make_deprecated_action_call(action): def deprecated_action_call(self, parser, namespace, values, option_string=None, **kwargs): action.__call__(self, parser, namespace, values, option_string, **kwargs) if option_string: kebab_option_string = _camel_to_kebab(option_string) print( f"The '{option_string}' argument is deprecated. Use '{kebab_option_string}' instead.", file=sys.stderr ) return deprecated_action_call _kebab_to_snake_pattern = re.compile(r'-') _camel_to_kebab_pattern = re.compile(r'(?<=[a-z])([A-Z])') _kebab_to_camel_pattern = re.compile(r'-(\w)') def _camel_to_kebab(s: str): return _camel_to_kebab_pattern.sub(r'-\1', s).lower() def _kebab_to_camel(s: str): return "--" + _kebab_to_camel_pattern.sub(lambda m: m.group(1).upper(), s[2:]) def _kebab_to_snake(s: str): return _kebab_to_snake_pattern.sub('_', s) class DeprecatedActionMarker: pass def add_normalized_argument(parser, param_name, *args, **kwargs): param_name_kebab = _camel_to_kebab(param_name) param_name_camel = _kebab_to_camel(param_name_kebab) dest_name_snake = _kebab_to_snake(param_name_kebab)[2:] kwargs_kebab = dict(kwargs) kwargs_camel = kwargs kwargs_camel['help'] = argparse.SUPPRESS if 'dest' not in kwargs_kebab: kwargs_kebab['dest'] = dest_name_snake kwargs_camel['dest'] = dest_name_snake if 'action' in kwargs: if isinstance(kwargs['action'], str): action = parser._registry_get('action', kwargs['action']) else: action = kwargs['action'] else: action = argparse._StoreAction kwargs_camel['action'] = type( 'DeprecatedAction', (action, DeprecatedActionMarker), {'__call__': make_deprecated_action_call(action)} ) parser.add_argument(f'{param_name_kebab}', *args, **kwargs_kebab) if SUPPORT_CAMEL_CASE_ARGUMENTS and param_name_kebab != param_name_camel: parser.add_argument(f'{param_name_camel}', *args, **kwargs_camel) B2_Command_Line_Tool-3.19.1/b2/_internal/b2v3/000077500000000000000000000000001461201031300204735ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/b2/_internal/b2v3/__init__.py000066400000000000000000000005721461201031300226100ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/b2v3/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # Note: importing console_tool in any shape or form in here will break sys.argv. B2_Command_Line_Tool-3.19.1/b2/_internal/b2v3/__main__.py000066400000000000000000000005351461201031300225700ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/b2v3/__main__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from b2._internal.b2v3.registry import main main() B2_Command_Line_Tool-3.19.1/b2/_internal/b2v3/registry.py000066400000000000000000000077721461201031300227320ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/b2v3/registry.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # ruff: noqa: F405 from b2._internal._b2v4.registry import * # noqa from b2._internal._cli.b2api import _get_b2api_for_profile from b2._internal.arg_parser import enable_camel_case_arguments from .rm import Rm enable_camel_case_arguments() class ConsoleTool(ConsoleTool): # same as original console tool, but does not use InMemoryAccountInfo and InMemoryCache # when auth env vars are used @classmethod def _initialize_b2_api(cls, args: argparse.Namespace, kwargs: dict) -> B2Api: return _get_b2api_for_profile(profile=args.profile, **kwargs) def main() -> None: # this is a copy of v4 `main()` but with custom console tool class ct = ConsoleTool(stdout=sys.stdout, stderr=sys.stderr) exit_status = ct.run_command(sys.argv) logger.info('\\\\ %s %s %s //', SEPARATOR, ('exit=%s' % exit_status).center(8), SEPARATOR) # I haven't tracked down the root cause yet, but in Python 2.7, the futures # packages is hanging on exit sometimes, waiting for a thread to finish. # This happens when using sync to upload files. sys.stdout.flush() sys.stderr.flush() logging.shutdown() os._exit(exit_status) class Ls(B2URIBucketNFolderNameArgMixin, BaseLs): """ {BaseLs} Examples .. note:: Note the use of quotes, to ensure that special characters are not expanded by the shell. List csv and tsv files (in any directory, in the whole bucket): .. code-block:: {NAME} ls --recursive --withWildcard bucketName "*.[ct]sv" List all info.txt files from directories `b?`, where `?` is any character: .. code-block:: {NAME} ls --recursive --withWildcard bucketName "b?/info.txt" List all pdf files from directories b0 to b9 (including sub-directories): .. code-block:: {NAME} ls --recursive --withWildcard bucketName "b[0-9]/*.pdf" List all buckets: .. code-block:: {NAME} ls Requires capability: - **listFiles** - **listBuckets** (if bucket name is not provided) """ ALLOW_ALL_BUCKETS = True B2.register_subcommand(AuthorizeAccount) B2.register_subcommand(CancelAllUnfinishedLargeFiles) B2.register_subcommand(CancelLargeFile) B2.register_subcommand(ClearAccount) B2.register_subcommand(CopyFileById) B2.register_subcommand(CreateBucket) B2.register_subcommand(CreateKey) B2.register_subcommand(DeleteBucket) B2.register_subcommand(DeleteFileVersion) B2.register_subcommand(DeleteKey) B2.register_subcommand(DownloadFile) B2.register_subcommand(DownloadFileById) B2.register_subcommand(DownloadFileByName) B2.register_subcommand(Cat) B2.register_subcommand(GetAccountInfo) B2.register_subcommand(GetBucket) B2.register_subcommand(FileInfo) B2.register_subcommand(GetFileInfo) B2.register_subcommand(GetDownloadAuth) B2.register_subcommand(GetDownloadUrlWithAuth) B2.register_subcommand(HideFile) B2.register_subcommand(ListBuckets) B2.register_subcommand(ListKeys) B2.register_subcommand(ListParts) B2.register_subcommand(ListUnfinishedLargeFiles) B2.register_subcommand(Ls) B2.register_subcommand(Rm) B2.register_subcommand(GetUrl) B2.register_subcommand(MakeUrl) B2.register_subcommand(MakeFriendlyUrl) B2.register_subcommand(Sync) B2.register_subcommand(UpdateBucket) B2.register_subcommand(UploadFile) B2.register_subcommand(UploadUnboundStream) B2.register_subcommand(UpdateFileLegalHold) B2.register_subcommand(UpdateFileRetention) B2.register_subcommand(ReplicationSetup) B2.register_subcommand(ReplicationDelete) B2.register_subcommand(ReplicationPause) B2.register_subcommand(ReplicationUnpause) B2.register_subcommand(ReplicationStatus) B2.register_subcommand(Version) B2.register_subcommand(License) B2.register_subcommand(InstallAutocomplete) B2.register_subcommand(NotificationRules) B2_Command_Line_Tool-3.19.1/b2/_internal/b2v3/rm.py000066400000000000000000000026431461201031300214700ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/b2v3/rm.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations from b2._internal._b2v4.registry import B2URIBucketNFolderNameArgMixin, BaseRm # NOTE: We need to keep v3 Rm in separate file, because we need to import it in # unit tests without registering any commands. class Rm(B2URIBucketNFolderNameArgMixin, BaseRm): """ {BaseRm} Examples. .. note:: Note the use of quotes, to ensure that special characters are not expanded by the shell. .. note:: Use with caution. Running examples presented below can cause data-loss. Remove all csv and tsv files (in any directory, in the whole bucket): .. code-block:: {NAME} rm --recursive --withWildcard bucketName "*.[ct]sv" Remove all info.txt files from buckets bX, where X is any character: .. code-block:: {NAME} rm --recursive --withWildcard bucketName "b?/info.txt" Remove all pdf files from buckets b0 to b9 (including sub-directories): .. code-block:: {NAME} rm --recursive --withWildcard bucketName "b[0-9]/*.pdf" Requires capability: - **listFiles** - **deleteFiles** """ B2_Command_Line_Tool-3.19.1/b2/_internal/console_tool.py000066400000000000000000005217031461201031300230000ustar00rootroot00000000000000#!/usr/bin/env python3 # PYTHON_ARGCOMPLETE_OK ###################################################################### # # File: b2/_internal/console_tool.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # ruff: noqa: E402 from __future__ import annotations import copy import tempfile import warnings from b2._internal._cli.autocomplete_cache import AUTOCOMPLETE # noqa from b2._internal._utils.python_compat import removeprefix AUTOCOMPLETE.autocomplete_from_cache() import argparse import base64 import contextlib import csv import dataclasses import datetime import functools import getpass import io import json import locale import logging import logging.config import os import pathlib import platform import queue import re import signal import subprocess import sys import threading import time import unicodedata from abc import ABCMeta, abstractmethod from concurrent.futures import Executor, Future, ThreadPoolExecutor from contextlib import suppress from enum import Enum from typing import Any, BinaryIO, List import b2sdk import requests import rst2ansi from b2sdk.v2 import ( ALL_CAPABILITIES, B2_ACCOUNT_INFO_DEFAULT_FILE, B2_ACCOUNT_INFO_ENV_VAR, B2_ACCOUNT_INFO_PROFILE_FILE, DEFAULT_MIN_PART_SIZE, DEFAULT_SCAN_MANAGER, NO_RETENTION_BUCKET_SETTING, REALM_URLS, SRC_LAST_MODIFIED_MILLIS, SSE_C_KEY_ID_FILE_INFO_KEY_NAME, STDOUT_FILEPATH, UNKNOWN_KEY_ID, XDG_CONFIG_HOME_ENV_VAR, AbstractAccountInfo, ApplicationKey, B2Api, BasicSyncEncryptionSettingsProvider, Bucket, BucketRetentionSetting, CompareVersionMode, DownloadedFile, EncryptionAlgorithm, EncryptionKey, EncryptionMode, EncryptionSetting, FileRetentionSetting, FileVersion, Filter, KeepOrDeleteMode, LegalHold, LifecycleRule, NewerFileSyncMode, ProgressReport, ReplicationConfiguration, ReplicationMonitor, ReplicationRule, ReplicationSetupHelper, RetentionMode, ScanPoliciesManager, Synchronizer, SyncReport, TqdmProgressListener, UploadMode, current_time_millis, escape_control_chars, get_included_sources, make_progress_listener, notification_rule_response_to_request, parse_sync_folder, points_to_fifo, substitute_control_chars, unprintable_to_hex, ) from b2sdk.v2.exception import ( B2Error, BadFileInfo, EmptyDirectory, FileNotPresent, MissingAccountData, NotADirectory, UnableToCreateDirectory, ) from b2sdk.version import VERSION as b2sdk_version from class_registry import ClassRegistry from tabulate import tabulate from b2._internal._cli.arg_parser_types import ( parse_comma_separated_list, parse_default_retention_period, parse_millis_from_float_timestamp, parse_range, ) from b2._internal._cli.argcompleters import file_name_completer from b2._internal._cli.autocomplete_install import ( SUPPORTED_SHELLS, AutocompleteInstallError, autocomplete_install, ) from b2._internal._cli.b2api import _get_b2api_for_profile, _get_inmemory_b2api from b2._internal._cli.b2args import ( add_b2_uri_argument, add_b2id_or_b2_uri_argument, add_b2id_or_file_like_b2_uri_argument, add_bucket_name_argument, get_keyid_and_key_from_env_vars, ) from b2._internal._cli.const import ( B2_APPLICATION_KEY_ENV_VAR, B2_APPLICATION_KEY_ID_ENV_VAR, B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR, B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR, B2_ENVIRONMENT_ENV_VAR, B2_ESCAPE_CONTROL_CHARACTERS, B2_SOURCE_SSE_C_KEY_B64_ENV_VAR, B2_USER_AGENT_APPEND_ENV_VAR, CREATE_BUCKET_TYPES, DEFAULT_THREADS, ) from b2._internal._cli.obj_dumps import readable_yaml_dump from b2._internal._cli.obj_loads import validated_loads from b2._internal._cli.shell import detect_shell, resolve_short_call_name from b2._internal._utils.uri import B2URI, B2FileIdURI, B2URIAdapter, B2URIBase from b2._internal.arg_parser import B2ArgumentParser, add_normalized_argument from b2._internal.json_encoder import B2CliJsonEncoder from b2._internal.version import VERSION piplicenses = None prettytable = None with suppress(ImportError): import piplicenses import prettytable logger = logging.getLogger(__name__) SEPARATOR = '=' * 40 # Enable to get 0.* behavior in the command-line tool. # Disable for 1.* behavior. VERSION_0_COMPATIBILITY = False def filter_out_empty_values(v, empty_marker=None): if isinstance(v, dict): d = {} for k, v in v.items(): new_v = filter_out_empty_values(v, empty_marker=empty_marker) if new_v is not empty_marker: d[k] = new_v return d or empty_marker return v def override_dict(base_dict, override): result = copy.deepcopy(base_dict) for k, v in override.items(): if isinstance(v, dict): result[k] = override_dict(result.get(k, {}), v) else: result[k] = v return result class NoControlCharactersStdout: def __init__(self, stdout): self.stdout = stdout def __getattr__(self, attr): return getattr(self.stdout, attr) def write(self, s): if s: s, cc_present = substitute_control_chars(s) if cc_present: logger.warning('WARNING: Control Characters were detected in the output') self.stdout.write(s) def resolve_b2_bin_call_name(argv: list[str] | None = None) -> str: call_name = resolve_short_call_name((argv or sys.argv)[0]) if call_name.endswith('.py'): version_name = re.search(r'[\\/]b2[\\/]_internal[\\/](_?b2v\d+)[\\/]__main__.py', call_name) call_name = version_name.group(1) if version_name else 'b2' return call_name FILE_RETENTION_COMPATIBILITY_WARNING = """ .. warning:: Setting file retention mode to '{}' is irreversible - such files can only be ever deleted after their retention period passes, regardless of keys (master or not) used. This is especially dangerous when setting bucket default retention, as it may lead to high storage costs. """.format(RetentionMode.COMPLIANCE.value) # Strings available to use when formatting doc strings. DOC_STRING_DATA = dict( B2_ACCOUNT_INFO_ENV_VAR=B2_ACCOUNT_INFO_ENV_VAR, B2_ACCOUNT_INFO_DEFAULT_FILE=B2_ACCOUNT_INFO_DEFAULT_FILE, B2_ACCOUNT_INFO_PROFILE_FILE=B2_ACCOUNT_INFO_PROFILE_FILE, XDG_CONFIG_HOME_ENV_VAR=XDG_CONFIG_HOME_ENV_VAR, B2_APPLICATION_KEY_ID_ENV_VAR=B2_APPLICATION_KEY_ID_ENV_VAR, B2_APPLICATION_KEY_ENV_VAR=B2_APPLICATION_KEY_ENV_VAR, B2_USER_AGENT_APPEND_ENV_VAR=B2_USER_AGENT_APPEND_ENV_VAR, B2_ENVIRONMENT_ENV_VAR=B2_ENVIRONMENT_ENV_VAR, B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR=B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR, B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR=B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR, B2_SOURCE_SSE_C_KEY_B64_ENV_VAR=B2_SOURCE_SSE_C_KEY_B64_ENV_VAR, SSE_C_KEY_ID_FILE_INFO_KEY_NAME=SSE_C_KEY_ID_FILE_INFO_KEY_NAME, FILE_RETENTION_COMPATIBILITY_WARNING=FILE_RETENTION_COMPATIBILITY_WARNING, ) class CommandError(B2Error): """ b2 command error (user caused). Accepts exactly one argument: message. We expect users of shell scripts will parse our ``__str__`` output. """ def __init__(self, message): super().__init__() self.message = message def __str__(self): return self.message def local_path_to_b2_path(path): """ Ensures that the separator in the path is '/', not '\'. :param path: A path from the local file system :return: A path that uses '/' as the separator. """ return path.replace(os.path.sep, '/') def keyboard_interrupt_handler(signum, frame): raise KeyboardInterrupt() def mixed_case_to_hyphens(s): return s[0].lower() + ''.join( c if c.islower() or c.isdigit() else '-' + c.lower() for c in s[1:] ) def apply_or_none(fcn, value): """ If the value is None, return None, otherwise return the result of applying the function to it. """ if value is None: return None else: return fcn(value) def format_account_info(account_info: AbstractAccountInfo) -> dict: allowed = account_info.get_allowed() allowed['capabilities'] = sorted(allowed['capabilities']) return dict( accountId=account_info.get_account_id(), accountFilePath=getattr( account_info, 'filename', None, ), # missing in StubAccountInfo in tests allowed=allowed, applicationKeyId=account_info.get_application_key_id(), applicationKey=account_info.get_application_key(), isMasterKey=account_info.is_master_key(), accountAuthToken=account_info.get_account_auth_token(), apiUrl=account_info.get_api_url(), downloadUrl=account_info.get_download_url(), s3endpoint=account_info.get_s3_api_url(), ) class DescriptionGetter: def __init__(self, described_cls, **kwargs): self.described_cls = described_cls self.kwargs = kwargs def __str__(self): return self.described_cls._get_description(**self.kwargs) class Described: """ Base class for Commands, providing them with tools for evaluating docstrings to CLI help texts. Allows for including superclasses' evaluated docstrings. """ @classmethod def _get_description(cls, **kwargs): mro_docs = { klass.__name__: klass.lazy_get_description(**kwargs) for klass in cls.mro() if klass is not cls and klass.__doc__ and issubclass(klass, Described) } return cls.__doc__.format(**kwargs, **DOC_STRING_DATA, **mro_docs) @classmethod def lazy_get_description(cls, **kwargs): return DescriptionGetter(cls, **kwargs) class JSONOptionMixin(Described): """ Use ``--json`` to get machine-readable output. Unless ``--json`` is used, the output is human-readable, and may change from one minor version to the next. Therefore, for scripting, it is strongly encouraged to use ``--json``. """ @classmethod def _setup_parser(cls, parser): parser.add_argument( '--json', action='store_true', help='output in JSON format to use in scripts' ) super()._setup_parser(parser) # noqa class DefaultSseMixin(Described): """ If you want server-side encryption for all of the files that are uploaded to a bucket, you can enable SSE-B2 encryption as a default setting for the bucket. In order to do that pass ``--default-server-side-encryption=SSE-B2``. The default algorithm is set to AES256 which can by changed with ``--default-server-side-encryption-algorithm`` parameter. All uploads to that bucket, from the time default encryption is enabled onward, will then be encrypted with SSE-B2 by default. To disable default bucket encryption, use ``--default-server-side-encryption=none``. If ``--default-server-side-encryption`` is not provided, default server side encryption is determined by the server. .. note:: Note that existing files in the bucket are not affected by default bucket encryption settings. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--default-server-side-encryption', default=None, choices=('SSE-B2', 'none') ) add_normalized_argument( parser, '--default-server-side-encryption-algorithm', default='AES256', choices=('AES256',) ) super()._setup_parser(parser) # noqa @classmethod def _get_default_sse_setting(cls, args): mode = apply_or_none(EncryptionMode, args.default_server_side_encryption) if mode is not None: if mode == EncryptionMode.NONE: args.default_server_side_encryption_algorithm = None algorithm = apply_or_none( EncryptionAlgorithm, args.default_server_side_encryption_algorithm ) return EncryptionSetting(mode=mode, algorithm=algorithm) return None class DestinationSseMixin(Described): """ To request SSE-B2 or SSE-C encryption for destination files, please set ``--destination-server-side-encryption=SSE-B2/SSE-C``. The default algorithm is set to AES256 which can be changed with ``--destination-server-side-encryption-algorithm`` parameter. Using SSE-C requires providing ``{B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR}`` environment variable, containing the base64 encoded encryption key. If ``{B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR}`` environment variable is provided, it's value will be saved as ``{SSE_C_KEY_ID_FILE_INFO_KEY_NAME}`` in the uploaded file's fileInfo. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--destination-server-side-encryption', default=None, choices=('SSE-B2', 'SSE-C') ) add_normalized_argument( parser, '--destination-server-side-encryption-algorithm', default='AES256', choices=('AES256',) ) super()._setup_parser(parser) # noqa def _get_destination_sse_setting(self, args): mode = apply_or_none(EncryptionMode, args.destination_server_side_encryption) if mode is not None: algorithm = apply_or_none( EncryptionAlgorithm, args.destination_server_side_encryption_algorithm ) if mode == EncryptionMode.SSE_B2: key = None elif mode == EncryptionMode.SSE_C: encryption_key_b64 = os.environ.get(B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR) if not encryption_key_b64: raise ValueError( 'Using SSE-C requires providing an encryption key via %s env var' % B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR ) key_id = os.environ.get(B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR) if key_id is None: self._print_stderr( f'Encrypting file(s) with SSE-C without providing key id. ' f'Set {B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR} to allow key identification.' ) key = EncryptionKey(secret=base64.b64decode(encryption_key_b64), key_id=key_id) else: raise NotImplementedError(f'Unsupported encryption mode for writes: {mode.value}') return EncryptionSetting(mode=mode, algorithm=algorithm, key=key) return None class FileRetentionSettingMixin(Described): """ Setting file retention settings requires the **writeFileRetentions** capability, and only works in bucket with fileLockEnabled=true. Providing ``--file-retention-mode`` requires providing ``--retain-until`` which has to be a future timestamp, in the form of an integer representing milliseconds since epoch. Leaving out these options results in a file retained according to bucket defaults. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--file-retention-mode', default=None, choices=(RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value) ) add_normalized_argument( parser, '--retain-until', type=parse_millis_from_float_timestamp, default=None, metavar='TIMESTAMP' ) super()._setup_parser(parser) # noqa @classmethod def _get_file_retention_setting(cls, args): if (args.file_retention_mode is None) != (args.retain_until is None): raise ValueError( 'provide either both --retain-until and --file-retention-mode or none of them' ) file_retention_mode = apply_or_none(RetentionMode, args.file_retention_mode) if file_retention_mode is None: return None return FileRetentionSetting(file_retention_mode, args.retain_until) class HeaderFlagsMixin(Described): @classmethod def _setup_parser(cls, parser: argparse.ArgumentParser) -> None: add_normalized_argument( parser, '--cache-control', help= "optional Cache-Control header, value based on RFC 2616 section 14.9, example: 'public, max-age=86400')" ) add_normalized_argument( parser, '--content-disposition', help= "optional Content-Disposition header, value based on RFC 2616 section 19.5.1, example: 'attachment; filename=\"fname.ext\"'" ) add_normalized_argument( parser, '--content-encoding', help= "optional Content-Encoding header, value based on RFC 2616 section 14.11, example: 'gzip'" ) add_normalized_argument( parser, '--content-language', help= "optional Content-Language header, value based on RFC 2616 section 14.12, example: 'mi, en'" ) add_normalized_argument( parser, '--expires', help= "optional Expires header, value based on RFC 2616 section 14.21, example: 'Thu, 01 Dec 2050 16:00:00 GMT'" ) super()._setup_parser(parser) def _file_info_with_header_args(self, args, file_info: dict[str, str] | None) -> dict[str, str] | None: """Construct an updated file_info dictionary. Print a warning if any of file_info items will be overwritten by explicit header arguments. """ add_file_info = {} overwritten = [] if args.cache_control is not None: add_file_info['b2-cache-control'] = args.cache_control if args.content_disposition is not None: add_file_info['b2-content-disposition'] = args.content_disposition if args.content_encoding is not None: add_file_info['b2-content-encoding'] = args.content_encoding if args.content_language is not None: add_file_info['b2-content-language'] = args.content_language if args.expires is not None: add_file_info['b2-expires'] = args.expires for key, value in add_file_info.items(): if file_info is not None and key in file_info and file_info[key] != value: overwritten.append(key) if overwritten: self._print_stderr( 'The following file info items will be overwritten by explicit arguments:\n ' + '\n '.join(f'{key} = {add_file_info[key]}' for key in overwritten) ) if add_file_info: return {**(file_info or {}), **add_file_info} return file_info class LegalHoldMixin(Described): """ Setting legal holds requires the **writeFileLegalHolds** capability, and only works in bucket with fileLockEnabled=true. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--legal-hold', default=None, choices=(LegalHold.ON.value, LegalHold.OFF.value) ) super()._setup_parser(parser) # noqa @classmethod def _get_legal_hold_setting(cls, args) -> LegalHold: return apply_or_none(LegalHold.from_string_or_none, args.legal_hold) class SourceSseMixin(Described): """ To access SSE-C encrypted files, please set ``--source-server-side-encryption=SSE-C``. The default algorithm is set to AES256 which can by changed with ``--source-server-side-encryption-algorithm`` parameter. Using SSE-C requires providing ``{B2_SOURCE_SSE_C_KEY_B64_ENV_VAR}`` environment variable, containing the base64 encoded encryption key. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--source-server-side-encryption', default=None, choices=('SSE-C',) ) add_normalized_argument( parser, '--source-server-side-encryption-algorithm', default='AES256', choices=('AES256',) ) super()._setup_parser(parser) # noqa @classmethod def _get_source_sse_setting(cls, args): mode = apply_or_none(EncryptionMode, args.source_server_side_encryption) if mode is not None: algorithm = apply_or_none( EncryptionAlgorithm, args.source_server_side_encryption_algorithm ) key = None if mode == EncryptionMode.SSE_C: encryption_key_b64 = os.environ.get(B2_SOURCE_SSE_C_KEY_B64_ENV_VAR) if not encryption_key_b64: raise ValueError( 'Using SSE-C requires providing an encryption key via %s env var' % B2_SOURCE_SSE_C_KEY_B64_ENV_VAR ) key = EncryptionKey( secret=base64.b64decode(encryption_key_b64), key_id=UNKNOWN_KEY_ID ) else: raise NotImplementedError( f'Encryption modes other than {EncryptionMode.SSE_C.value} are not supported in reads' ) return EncryptionSetting(mode=mode, algorithm=algorithm, key=key) return None class WriteBufferSizeMixin(Described): """ Use --write-buffer-size to set the size (in bytes) of the buffer used to write files. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--write-buffer-size', type=int, metavar='BYTES') super()._setup_parser(parser) # noqa class SkipHashVerificationMixin(Described): """ Use --skip-hash-verification to disable hash check on downloaded files. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--skip-hash-verification', action='store_true', default=False ) super()._setup_parser(parser) # noqa class MaxDownloadStreamsMixin(Described): """ Use --max-download-streams-per-file to set max num of streams for parallel downloader. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--max-download-streams-per-file', type=int) super()._setup_parser(parser) # noqa class FileIdAndOptionalFileNameMixin(Described): """ Specifying the ``fileName`` is more efficient than leaving it out. If you omit the ``fileName``, it requires an initial query to B2 to get the file name, before making the call to delete the file. This extra query requires the ``readFiles`` capability. """ @classmethod def _setup_parser(cls, parser): parser.add_argument('fileName', nargs='?') parser.add_argument('fileId') super()._setup_parser(parser) # noqa def _get_file_name_from_args(self, args): if args.fileName is not None: return args.fileName file_info = self.api.get_file_info(args.fileId) return file_info.file_name class B2URIFileArgMixin: @classmethod def _setup_parser(cls, parser): add_b2id_or_file_like_b2_uri_argument(parser) super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URIBase: return args.B2_URI class B2URIFileIDArgMixin: @classmethod def _setup_parser(cls, parser): parser.add_argument('fileId') super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URIBase: return B2FileIdURI(args.fileId) class B2URIBucketArgMixin: @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser) super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URIBase: return B2URI(args.bucketName) class B2URIBucketNFilenameArgMixin: @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser) parser.add_argument('fileName').completion = file_name_completer super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URIBase: return B2URI(args.bucketName, args.fileName) class B2URIBucketNFolderNameArgMixin: ALLOW_ALL_BUCKETS: bool = False @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser, nargs='?' if cls.ALLOW_ALL_BUCKETS else None) parser.add_argument('folderName', nargs='?').completer = file_name_completer super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URI: return B2URI(removeprefix(args.bucketName or '', "b2://"), args.folderName or '') class B2IDOrB2URIMixin: ALLOW_ALL_BUCKETS: bool = False @classmethod def _setup_parser(cls, parser): add_b2id_or_b2_uri_argument(parser, allow_all_buckets=cls.ALLOW_ALL_BUCKETS) super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URI | B2FileIdURI: return args.B2_URI class UploadModeMixin(Described): """ Use --incremental-mode to allow for incremental file uploads to safe bandwidth. This will only affect files, which have been appended to since last upload. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--incremental-mode', action='store_true') super()._setup_parser(parser) # noqa @staticmethod def _get_upload_mode_from_args(args): if args.incremental_mode: return UploadMode.INCREMENTAL return UploadMode.FULL class ProgressMixin(Described): """ If the ``tqdm`` library is installed, progress bar is displayed on stderr. Without it, simple text progress is printed. Use ``--no-progress`` to disable progress reporting (marginally improves performance in some cases). """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--no-progress', action='store_true', help="progress will not be reported" ) super()._setup_parser(parser) # noqa class LifecycleRulesMixin(Described): """ Use `--lifecycle-rule` to set lifecycle rule for the bucket. Multiple rules can be specified by repeating the option. `--lifecycle-rules` option is deprecated and cannot be used together with --lifecycle-rule. """ @classmethod def _setup_parser(cls, parser): lifecycle_group = parser.add_mutually_exclusive_group() add_normalized_argument( lifecycle_group, '--lifecycle-rule', action='append', default=None, type=functools.partial(validated_loads, expected_type=LifecycleRule), dest='lifecycle_rules', help="Lifecycle rule in JSON format. Can be supplied multiple times.", ) add_normalized_argument( lifecycle_group, '--lifecycle-rules', type=functools.partial(validated_loads, expected_type=List[LifecycleRule]), help= "(deprecated; use --lifecycle-rule instead) List of lifecycle rules in JSON format.", ) super()._setup_parser(parser) # noqa class ThreadsMixin(Described): """ Use --threads to manually adjust the number of threads used in the operation. Otherwise, the number of threads will be automatically chosen. """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--threads', type=int, default=None) super()._setup_parser(parser) # noqa def _get_threads_from_args(self, args) -> int: return args.threads or DEFAULT_THREADS def _set_threads_from_args(self, args): threads = self._get_threads_from_args(args) self.api.services.download_manager.set_thread_pool_size(threads) self.api.services.upload_manager.set_thread_pool_size(threads) class _TqdmCloser: """ On OSX using Tqdm with b2sdk causes semaphore leaks. This fix is located here and not in b2sdk, because after this cleanup Tqdm might not work properly, therefore it's best to do it when exiting a python process. """ def __init__(self, progress_listener): self.progress_listener = progress_listener def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if sys.platform != "darwin" or os.environ.get('B2_TEST_DISABLE_TQDM_CLOSER'): return try: from multiprocessing.synchronize import SemLock tqdm_lock = self.progress_listener.tqdm.get_lock() if tqdm_lock.mp_lock._semlock.name is not None: SemLock._cleanup(tqdm_lock.mp_lock._semlock.name) except Exception as ex: logger.debug('Error encountered during Tqdm cleanup', exc_info=ex) class Command(Described, metaclass=ABCMeta): COMMAND_NAME: str | None = None # Set to True for commands that receive sensitive information in arguments FORBID_LOGGING_ARGUMENTS = False deprecated = False # The registry for the subcommands, should be reinitialized in subclass subcommands_registry = None # set to False for commands not requiring b2 authentication REQUIRES_AUTH = True def __init__(self, console_tool): self.console_tool = console_tool self.api = B2URIAdapter(console_tool.api) self.stdout = console_tool.stdout self.stderr = console_tool.stderr self.quiet = False self.escape_control_characters = True self.exit_stack = contextlib.ExitStack() def make_progress_listener(self, file_name: str, quiet: bool): progress_listener = make_progress_listener(file_name, quiet) self.exit_stack.enter_context(progress_listener) if isinstance(progress_listener, TqdmProgressListener): self.exit_stack.enter_context(_TqdmCloser(progress_listener)) return progress_listener @classmethod def name_and_alias(cls): name = mixed_case_to_hyphens(cls.COMMAND_NAME or cls.__name__) alias = None if '-' in name: alias = name.replace('-', '_') return name, alias @classmethod def register_subcommand(cls, command_class): assert cls.subcommands_registry is not None, 'Initialize the registry class' name, _ = command_class.name_and_alias() decorator = cls.subcommands_registry.register(key=name)(command_class) return decorator @classmethod def create_parser( cls, subparsers: argparse._SubParsersAction | None = None, parents=None, for_docs=False, name: str | None = None, b2_binary_name: str | None = None, ) -> argparse.ArgumentParser: """ Creates a parser for the command. :param subparsers: subparsers object to which add new parser :param parents: created ArgumentParser `parents`, see `argparse.ArgumentParser` :param for_docs: if parser is to be used for documentation generation :param name: action name :param b2_binary_name: B2 binary call name :return: created parser """ if parents is None: parents = [] b2_binary_name = b2_binary_name or resolve_b2_bin_call_name() description = cls._get_description(NAME=b2_binary_name) if name: alias = None else: name, alias = cls.name_and_alias() parser_kwargs = dict( prog=name, description=description, parents=parents, for_docs=for_docs, deprecated=cls.deprecated, ) if subparsers is None: parser = B2ArgumentParser(**parser_kwargs) else: parser = subparsers.add_parser( parser_kwargs.pop('prog'), **parser_kwargs, aliases=[alias] if alias is not None and not for_docs else (), add_help_all=False, ) # Register class that will handle this particular command, for both name and alias. parser.set_defaults(command_class=cls) cls._setup_parser(parser) if cls.subcommands_registry: if not parents: common_parser = B2ArgumentParser(add_help=False, add_help_all=False) add_normalized_argument( common_parser, '--debug-logs', action='store_true', help=argparse.SUPPRESS ) common_parser.add_argument('--verbose', action='store_true', help=argparse.SUPPRESS) add_normalized_argument(common_parser, '--log-config', help=argparse.SUPPRESS) common_parser.add_argument('--profile', default=None, help=argparse.SUPPRESS) common_parser.add_argument( '-q', '--quiet', action='store_true', default=False, help=argparse.SUPPRESS ) common_parser.add_argument( '--escape-control-characters', action='store_true', help=argparse.SUPPRESS ) common_parser.add_argument( '--no-escape-control-characters', dest='escape_control_characters', action='store_false', help=argparse.SUPPRESS ) common_parser.set_defaults(escape_control_characters=None) parents = [common_parser] subparsers = parser.add_subparsers( prog=parser.prog, title='usages', dest='command', parser_class=B2ArgumentParser, ) subparsers.required = True for subcommand in cls.subcommands_registry.values(): subcommand.create_parser( subparsers=subparsers, parents=parents, for_docs=for_docs, b2_binary_name=b2_binary_name ) return parser def run(self, args): self.quiet = args.quiet self.escape_control_characters = args.escape_control_characters with self.exit_stack: return self._run(args) @abstractmethod def _run(self, args) -> int: ... @classmethod def _setup_parser(cls, parser): pass @classmethod def _parse_file_infos(cls, args_info): file_infos = {} for info in args_info: parts = info.split('=', 1) if len(parts) == 1: raise BadFileInfo(info) file_infos[parts[0]] = parts[1] return file_infos def _print_json(self, data) -> None: return self._print( json.dumps(data, indent=4, sort_keys=True, ensure_ascii=True, cls=B2CliJsonEncoder), enforce_output=True ) def _print_human_readable_structure(self, data) -> None: output = io.StringIO() readable_yaml_dump(data, output) return self._print(output.getvalue().rstrip()) def _print( self, *args, enforce_output: bool = False, end: str | None = None, ) -> None: return self._print_standard_descriptor( self.stdout, "stdout", *args, enforce_output=enforce_output, end=end, ) def _print_stderr(self, *args, end: str | None = None) -> None: return self._print_standard_descriptor( self.stderr, "stderr", *args, enforce_output=True, end=end ) def _print_standard_descriptor( self, descriptor, descriptor_name: str, *args, enforce_output: bool = False, end: str | None = None, ) -> None: """ Prints to fd, unless quiet is set. :param descriptor: file descriptor to print to :param descriptor_name: name of the descriptor, used for error reporting :param args: object to be printed :param enforce_output: overrides quiet setting; Should not be used for anything other than data :param end: end of the line characters; None for default newline """ if not self.quiet or enforce_output: self._print_helper( descriptor, descriptor.encoding, descriptor_name, *args, end=end, sanitize=self.escape_control_characters ) @classmethod def _print_helper( cls, descriptor, descriptor_encoding: str, descriptor_name: str, *args, sanitize: bool = True, end: str | None = None ): if sanitize: args = tuple(unprintable_to_hex(arg) or '' for arg in args) try: descriptor.write(' '.join(args)) except UnicodeEncodeError: sys.stderr.write( "\nWARNING: Unable to print unicode. Encoding for {} is: '{}'\n".format( descriptor_name, descriptor_encoding, ) ) args = [arg.encode('ascii', 'backslashreplace').decode() for arg in args] sys.stderr.write("Trying to print: %s\n" % args) descriptor.write(' '.join(args)) descriptor.write("\n" if end is None else end) def __str__(self): return f'{self.__class__.__module__}.{self.__class__.__name__}' class CmdReplacedByMixin: deprecated = True replaced_by_cmd: type[Command] def run(self, args): self._print_stderr( f'WARNING: {self.__class__.name_and_alias()[0]} command is deprecated. ' f'Use {self.replaced_by_cmd.name_and_alias()[0]} instead.' ) return super().run(args) @classmethod def _get_description(cls, **kwargs): return ( f'{super()._get_description(**kwargs)}\n\n' f'.. warning::\n' f' This command is deprecated. Use ``{cls.replaced_by_cmd.name_and_alias()[0]}`` instead.\n' ) class B2(Command): """ This program provides command-line access to the B2 service. There are two flows of authorization: * call ``{NAME} authorize-account`` and have the credentials cached in sqlite * set ``{B2_APPLICATION_KEY_ID_ENV_VAR}`` and ``{B2_APPLICATION_KEY_ENV_VAR}`` environment variables when running this program This program caches authentication-related and other data in a local SQLite database. The location of this database is determined in the following way: If ``--profile`` arg is provided: * ``{XDG_CONFIG_HOME_ENV_VAR}/b2/db-.sqlite``, if ``{XDG_CONFIG_HOME_ENV_VAR}`` env var is set * ``{B2_ACCOUNT_INFO_PROFILE_FILE}`` Otherwise: * ``{B2_ACCOUNT_INFO_ENV_VAR}`` env var's value, if set * ``{B2_ACCOUNT_INFO_DEFAULT_FILE}``, if it exists * ``{XDG_CONFIG_HOME_ENV_VAR}/b2/account_info``, if ``{XDG_CONFIG_HOME_ENV_VAR}`` env var is set * ``{B2_ACCOUNT_INFO_DEFAULT_FILE}``, as default If the directory ``{XDG_CONFIG_HOME_ENV_VAR}/b2`` does not exist (and is needed), it is created. Please note that the above rules may be changed in next versions of b2sdk, and in order to get reliable authentication file location you should use ``b2 get-account-info``. Control characters escaping is turned on if running under terminal. You can override it by explicitly using `--escape-control-chars`/`--no-escape-control-chars`` option, or by setting `B2_ESCAPE_CONTROL_CHARACTERS` environment variable to either `1` or `0`. You can suppress command stdout & stderr output by using ``--quiet`` option. To supress only progress bar, use ``--no-progress`` option. For more details on one command: .. code-block:: {NAME} --help When authorizing with application keys, this tool requires that the key have the ``listBuckets`` capability so that it can take the bucket names you provide on the command line and translate them into bucket IDs for the B2 Storage service. Each different command may required additional capabilities. You can find the details for each command in the help for that command. A string provided via an optional environment variable ``{B2_USER_AGENT_APPEND_ENV_VAR}`` will be appended to the User-Agent. """ REQUIRES_AUTH = False subcommands_registry = ClassRegistry() @classmethod def name_and_alias(cls): return resolve_b2_bin_call_name(), None def _run(self, args): # Commands could be named via name or alias, so we fetch # the command from args assigned during parser preparation. return args.command_class class AuthorizeAccount(Command): """ Prompts for Backblaze ``applicationKeyId`` and ``applicationKey`` (unless they are given on the command line). You can authorize with either the master application key or a normal application key. To use the master application key, provide the application key ID and application key from the ``B2 Cloud Storage Buckets`` page on the web site: https://secure.backblaze.com/b2_buckets.htm To use a normal application key, created with the ``create-key`` command or on the web site, provide the application key ID and the application key itself. You can also optionally provide application key ID and application key using environment variables ``{B2_APPLICATION_KEY_ID_ENV_VAR}`` and ``{B2_APPLICATION_KEY_ENV_VAR}`` respectively. Stores an account auth token in a local cache, see .. code-block:: {NAME} --help for details on how the location of this cache is determined. Requires capability: - **listBuckets** """ FORBID_LOGGING_ARGUMENTS = True REQUIRES_AUTH = False @classmethod def _setup_parser(cls, parser): realm_group = parser.add_mutually_exclusive_group() realm_group.add_argument('--dev', action='store_true', help=argparse.SUPPRESS) realm_group.add_argument('--staging', action='store_true', help=argparse.SUPPRESS) realm_group.add_argument('--environment', help=argparse.SUPPRESS) parser.add_argument('applicationKeyId', nargs='?') parser.add_argument('applicationKey', nargs='?') super()._setup_parser(parser) def _run(self, args): # Handle internal options for testing inside Backblaze. # These are not documented in the usage string. realm = self._get_user_requested_realm(args) if args.applicationKeyId is None: args.applicationKeyId = ( os.environ.get(B2_APPLICATION_KEY_ID_ENV_VAR) or input('Backblaze application key ID: ') ) if args.applicationKey is None: args.applicationKey = ( os.environ.get(B2_APPLICATION_KEY_ENV_VAR) or getpass.getpass('Backblaze application key: ') ) status = self.authorize(args.applicationKeyId, args.applicationKey, realm) if status == 0: data = format_account_info(self.api.account_info) self._print_json(data) return status def authorize(self, application_key_id, application_key, realm: str | None): """ Perform the authorization and capability checks, report errors. :param application_key_id: application key ID used to authenticate :param application_key: application key :param realm: authorization realm; if None, production is used :return: exit status """ verbose_realm = bool(realm) realm = realm or 'production' url = REALM_URLS.get(realm, realm) logger.info(f"Using {url}") if verbose_realm: self._print_stderr(f'Using {url}') try: self.api.authorize_account(realm, application_key_id, application_key) allowed = self.api.account_info.get_allowed() if 'listBuckets' not in allowed['capabilities']: logger.error( 'ConsoleTool cannot work with a bucket-restricted key and no listBuckets capability' ) self._print_stderr( 'ERROR: application key has no listBuckets capability, which is required for the b2 command-line tool' ) self.api.account_info.clear() return 1 if allowed['bucketId'] is not None and allowed['bucketName'] is None: logger.error('ConsoleTool has bucket-restricted key and the bucket does not exist') self._print_stderr( "ERROR: application key is restricted to bucket id '{}', which no longer exists" .format(allowed['bucketId']) ) self.api.account_info.clear() return 1 return 0 except B2Error as e: logger.exception('ConsoleTool account authorization error') self._print_stderr('ERROR: unable to authorize account: ' + str(e)) return 1 @classmethod def _get_user_requested_realm(cls, args) -> str | None: """ Determine the realm to use for authorization. """ if args.dev: return 'dev' if args.staging: return 'staging' if args.environment: return args.environment return os.environ.get(B2_ENVIRONMENT_ENV_VAR) class CancelAllUnfinishedLargeFiles(Command): """ Lists all large files that have been started but not finished and cancels them. Any parts that have been uploaded will be deleted. Requires capability: - **listFiles** - **writeFiles** """ @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser) super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) for file_version in bucket.list_unfinished_large_files(): bucket.cancel_large_file(file_version.file_id) self._print(file_version.file_id, 'canceled') return 0 class CancelLargeFile(Command): """ Cancels a large file upload. Used to undo a ``start-large-file``. Cannot be used once the file is finished. After finishing, using ``delete-file-version`` to delete the large file. Requires capability: - **writeFiles** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('fileId') super()._setup_parser(parser) def _run(self, args): self.api.cancel_large_file(args.fileId) self._print(args.fileId, 'canceled') return 0 class ClearAccount(Command): """ Erases everything in local cache. See .. code-block:: {NAME} --help for details on how the location of this cache is determined. """ REQUIRES_AUTH = False def _run(self, args): self.api.account_info.clear() return 0 class CopyFileById( HeaderFlagsMixin, DestinationSseMixin, SourceSseMixin, FileRetentionSettingMixin, LegalHoldMixin, Command ): """ Copy a file version to the given bucket (server-side, **not** via download+upload). Copies the contents of the source B2 file to destination bucket and assigns the given name to the new B2 file, possibly setting options like server-side encryption and retention. {FILE_RETENTION_COMPATIBILITY_WARNING} By default, it copies the file info and content type, therefore ``--content-type`` and ``--info`` are optional. If one of them is set, the other has to be set as well. To force the destination file to have empty fileInfo, use ``--no-info``. By default, the whole file gets copied, but you can copy an (inclusive!) range of bytes from the source file to the new file using ``--range`` option. Each ``--info`` entry is in the form ``a=b``, you can specify many. The maximum file size is 5GB or 10TB, depending on capability of installed ``b2sdk`` version. {DestinationSseMixin} {SourceSseMixin} {FileRetentionSettingMixin} {LegalHoldMixin} If either the source or the destination uses SSE-C and ``--content-type`` and ``--info`` are not provided, then to perform the copy the source file's metadata has to be fetched first - an additional request to B2 cloud has to be made. To achieve that, provide ``--fetch-metadata``. Without that flag, the command will fail. Requires capability: - **readFiles** (if ``sourceFileId`` bucket is private) - **writeFiles** """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--fetch-metadata', action='store_true', default=False) add_normalized_argument( parser, '--metadata-directive', default=None, help=argparse.SUPPRESS ) add_normalized_argument(parser, '--content-type') parser.add_argument('--range', type=parse_range) info_group = parser.add_mutually_exclusive_group() add_normalized_argument(info_group, '--info', action='append') add_normalized_argument(info_group, '--no-info', action='store_true', default=False) parser.add_argument('sourceFileId') parser.add_argument('destinationBucketName') parser.add_argument('b2FileName') super()._setup_parser(parser) # add parameters from the mixins def _run(self, args): file_infos = None if args.info: file_infos = self._parse_file_infos(args.info) elif args.no_info: file_infos = {} file_infos = self._file_info_with_header_args(args, file_infos) if args.metadata_directive is not None: self._print_stderr( '--metadata-directive is deprecated, the value of this argument is determined based on the existence of ' '--content-type and --info.' ) bucket = self.api.get_bucket_by_name(args.destinationBucketName) destination_encryption_setting = self._get_destination_sse_setting(args) source_encryption_setting = self._get_source_sse_setting(args) legal_hold = self._get_legal_hold_setting(args) file_retention = self._get_file_retention_setting(args) if args.range is not None: range_args = { 'offset': args.range[0], 'length': args.range[1] - args.range[0] + 1, } else: range_args = {} source_file_info, source_content_type = self._determine_source_metadata( source_file_id=args.sourceFileId, source_encryption=source_encryption_setting, destination_encryption=destination_encryption_setting, target_content_type=args.content_type, target_file_info=file_infos, fetch_if_necessary=args.fetch_metadata, ) file_version = bucket.copy( args.sourceFileId, args.b2FileName, **range_args, content_type=args.content_type, file_info=file_infos, destination_encryption=destination_encryption_setting, source_encryption=source_encryption_setting, legal_hold=legal_hold, file_retention=file_retention, source_file_info=source_file_info, source_content_type=source_content_type, ) self._print_json(file_version) return 0 def _is_ssec(self, encryption: EncryptionSetting | None): if encryption is not None and encryption.mode == EncryptionMode.SSE_C: return True return False def _determine_source_metadata( self, source_file_id: str, destination_encryption: EncryptionSetting | None, source_encryption: EncryptionSetting | None, target_file_info: dict | None, target_content_type: str | None, fetch_if_necessary: bool, ) -> tuple[dict | None, str | None]: """Determine if source file metadata is necessary to perform the copy - due to sse_c_key_id""" if not self._is_ssec(source_encryption) and not self._is_ssec( destination_encryption ): # no sse-c, no problem return None, None if target_file_info is not None or target_content_type is not None: # metadataDirective=REPLACE, no problem return None, None if not fetch_if_necessary: raise ValueError( 'Attempting to copy file with metadata while either source or destination uses ' 'SSE-C. Use --fetch-metadata to fetch source file metadata before copying.' ) source_file_version = self.api.get_file_info(source_file_id) return source_file_version.file_info, source_file_version.content_type class CreateBucket(DefaultSseMixin, LifecycleRulesMixin, Command): """ Creates a new bucket. Prints the ID of the bucket created. Optionally stores bucket info, CORS rules and lifecycle rules with the bucket. These can be given as JSON on the command line. {DefaultSseMixin} {LifecycleRulesMixin} Requires capability: - **writeBuckets** - **readBucketEncryption** - **writeBucketEncryption** - **writeBucketRetentions** """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--bucket-info', type=validated_loads) add_normalized_argument( parser, '--cors-rules', type=validated_loads, help= "If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string." ) add_normalized_argument( parser, '--file-lock-enabled', action='store_true', help= "If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed after bucket creation." ) parser.add_argument('--replication', type=validated_loads) add_bucket_name_argument(parser) parser.add_argument('bucketType', choices=CREATE_BUCKET_TYPES) super()._setup_parser(parser) # add parameters from the mixins def _run(self, args): encryption_setting = self._get_default_sse_setting(args) bucket = self.api.create_bucket( args.bucketName, args.bucketType, bucket_info=args.bucket_info, cors_rules=args.cors_rules, lifecycle_rules=args.lifecycle_rules, default_server_side_encryption=encryption_setting, is_file_lock_enabled=args.file_lock_enabled, replication=args.replication and ReplicationConfiguration.from_dict(args.replication), ) self._print(bucket.id_) return 0 class CreateKey(Command): """ Creates a new application key. Prints the application key information. This is the only time the application key itself will be returned. Listing application keys will show their IDs, but not the secret keys. The capabilities are passed in as a comma-separated list, like ``readFiles,writeFiles``. Optionally, you can pass all capabilities known to this client with ``--all-capabilities``. The ``duration`` is the length of time (in seconds) the new application key will exist. When the time expires the key will disappear and will no longer be usable. If not specified, the key will not expire. The ``bucket`` is the name of a bucket in the account. When specified, the key will only allow access to that bucket. The ``namePrefix`` restricts file access to files whose names start with the prefix. The output is the new application key ID, followed by the application key itself. The two values returned are the two that you pass to ``authorize-account`` to use the key. Requires capability: - **writeKeys** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--bucket') add_normalized_argument(parser, '--name-prefix') parser.add_argument('--duration', type=int) parser.add_argument('keyName') capabilities = parser.add_mutually_exclusive_group(required=True) capabilities.add_argument('capabilities', type=parse_comma_separated_list, nargs='?') add_normalized_argument(capabilities, '--all-capabilities', action='store_true') super()._setup_parser(parser) def _run(self, args): # Translate the bucket name into a bucketId if args.bucket is None: bucket_id_or_none = None else: bucket_id_or_none = self.api.get_bucket_by_name(args.bucket).id_ if args.all_capabilities: current_key_caps = set(self.api.account_info.get_allowed()['capabilities']) preview_feature_caps = { 'readBucketNotifications', 'writeBucketNotifications', } args.capabilities = sorted( set(ALL_CAPABILITIES) - preview_feature_caps | current_key_caps ) application_key = self.api.create_key( capabilities=args.capabilities, key_name=args.keyName, valid_duration_seconds=args.duration, bucket_id=bucket_id_or_none, name_prefix=args.name_prefix ) self._print(f'{application_key.id_} {application_key.application_key}') return 0 class DeleteBucket(Command): """ Deletes the bucket with the given name. Requires capability: - **deleteBuckets** """ @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser) super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) self.api.delete_bucket(bucket) return 0 class DeleteFileVersion(FileIdAndOptionalFileNameMixin, Command): """ Permanently and irrevocably deletes one version of a file. {FileIdAndOptionalFileNameMixin} If a file is in governance retention mode, and the retention period has not expired, adding ``--bypass-governance`` is required. Requires capability: - **deleteFiles** - **readFiles** (if file name not provided) and optionally: - **bypassGovernance** """ @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) def _run(self, args): file_name = self._get_file_name_from_args(args) file_info = self.api.delete_file_version(args.fileId, file_name, args.bypass_governance) self._print_json(file_info) return 0 class DeleteKey(Command): """ Deletes the specified application key by its ID. Requires capability: - **deleteKeys** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('applicationKeyId') super()._setup_parser(parser) def _run(self, args): application_key = self.api.delete_key_by_id(application_key_id=args.applicationKeyId) self._print(application_key.id_) return 0 class DownloadCommand( ProgressMixin, SourceSseMixin, WriteBufferSizeMixin, SkipHashVerificationMixin, Command, metaclass=ABCMeta ): """ helper methods for returning results from download commands """ def _print_download_info( self, downloaded_file: DownloadedFile, output_filepath: pathlib.Path ) -> None: download_version = downloaded_file.download_version output_filepath_string = 'stdout' if output_filepath == STDOUT_FILEPATH else str( output_filepath.resolve() ) self._print_file_attribute('File name', download_version.file_name) self._print_file_attribute('File id', download_version.id_) self._print_file_attribute('Output file path', output_filepath_string) self._print_file_attribute('File size', str(download_version.content_length)) self._print_file_attribute('Content type', download_version.content_type) self._print_file_attribute('Content sha1', download_version.content_sha1) self._print_file_attribute( 'Encryption', self._represent_encryption(download_version.server_side_encryption) ) self._print_file_attribute( 'Retention', self._represent_retention(download_version.file_retention) ) self._print_file_attribute( 'Legal hold', self._represent_legal_hold(download_version.legal_hold) ) for label, attr_name in [ ('CacheControl', 'cache_control'), ('Expires', 'expires'), ('ContentDisposition', 'content_disposition'), ('ContentLanguage', 'content_language'), ('ContentEncoding', 'content_encoding'), ]: attr_value = getattr(download_version, attr_name) if attr_value is not None: self._print_file_attribute(label, attr_value) for name in sorted(download_version.file_info): self._print_file_attribute(f'INFO {name}', download_version.file_info[name]) if download_version.content_sha1 != 'none': self._print('Checksum matches') return 0 @classmethod def _represent_encryption(cls, encryption: EncryptionSetting): # TODO: refactor to use "match" syntax after dropping python 3.9 support if encryption.mode is EncryptionMode.NONE: return 'none' result = f'mode={encryption.mode.value}, algorithm={encryption.algorithm.value}' if encryption.mode is EncryptionMode.SSE_B2: pass elif encryption.mode is EncryptionMode.SSE_C: if encryption.key.key_id is not None: result += f', key_id={encryption.key.key_id}' else: raise ValueError(f'Unsupported encryption mode: {encryption.mode}') return result @classmethod def _represent_retention(cls, retention: FileRetentionSetting): if retention.mode is RetentionMode.NONE: return 'none' if retention.mode is RetentionMode.UNKNOWN: return '' if retention.mode in (RetentionMode.COMPLIANCE, RetentionMode.GOVERNANCE): return 'mode={}, retainUntil={}'.format( retention.mode.value, datetime.datetime.fromtimestamp( retention.retain_until / 1000, datetime.timezone.utc ) ) raise ValueError(f'Unsupported retention mode: {retention.mode}') @classmethod def _represent_legal_hold(cls, legal_hold: LegalHold): if legal_hold in (LegalHold.ON, LegalHold.OFF): return legal_hold.value if legal_hold is LegalHold.UNKNOWN: return '' if legal_hold is LegalHold.UNSET: return '' raise ValueError(f'Unsupported legal hold: {legal_hold}') def _print_file_attribute(self, label, value): self._print((label + ':').ljust(20) + ' ' + value) def get_local_output_filepath( self, filename: str, file_request: DownloadedFile ) -> pathlib.Path: if filename == '-': return STDOUT_FILEPATH output_filepath = pathlib.Path(filename) # As longs as it's not a directory, we're overwriting everything. if not output_filepath.is_dir(): return output_filepath # If the output is directory, we're expected to download the file right there. # Normally, we overwrite the target without asking any questions, but in this case # user might be oblivious of the actual mistake he's about to commit. # If he, e.g.: downloads file by ID, he might not know the name of the file # and actually overwrite something unintended. output_directory = output_filepath output_filepath = output_directory / file_request.download_version.file_name # If it doesn't exist, we stop worrying. if not output_filepath.exists(): return output_filepath # If it does exist, we make a unique file prefixed with the actual file name. file_name_as_path = pathlib.Path(file_request.download_version.file_name) file_name = file_name_as_path.stem file_extension = file_name_as_path.suffix # Default permissions are: readable and writable by this user only, executable by noone. # This "temporary" file is not automatically removed, but still created in the safest way possible. fd_handle, output_filepath_str = tempfile.mkstemp( prefix=file_name, suffix=file_extension, dir=output_directory, ) # Close the handle, so the file is not locked. # This file is no longer 100% "safe", but that's acceptable. os.close(fd_handle) # "Normal" file created by Python has readable for everyone, writable for user only. # We change the permissions, to match the default ones. os.chmod(output_filepath_str, 0o644) return pathlib.Path(output_filepath_str) class DownloadFileBase( ThreadsMixin, MaxDownloadStreamsMixin, DownloadCommand, ): """ Downloads the given file-like object, and stores it in the given local file. {ProgressMixin} {ThreadsMixin} {SourceSseMixin} {WriteBufferSizeMixin} {SkipHashVerificationMixin} {MaxDownloadStreamsMixin} Requires capability: - **readFiles** """ def _run(self, args): progress_listener = self.make_progress_listener( args.localFileName, args.no_progress or args.quiet ) encryption_setting = self._get_source_sse_setting(args) self._set_threads_from_args(args) b2_uri = self.get_b2_uri_from_arg(args) downloaded_file = self.api.download_file_by_uri( b2_uri, progress_listener, encryption=encryption_setting ) output_filepath = self.get_local_output_filepath(args.localFileName, downloaded_file) self._print_download_info(downloaded_file, output_filepath) progress_listener.change_description(output_filepath.name) downloaded_file.save_to(output_filepath) self._print('Download finished') return 0 class DownloadFile(B2URIFileArgMixin, DownloadFileBase): __doc__ = DownloadFileBase.__doc__ @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('localFileName') def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URIBase: return args.B2_URI class DownloadFileById(CmdReplacedByMixin, B2URIFileIDArgMixin, DownloadFileBase): __doc__ = DownloadFileBase.__doc__ replaced_by_cmd = DownloadFile @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('localFileName') class DownloadFileByName(CmdReplacedByMixin, B2URIBucketNFilenameArgMixin, DownloadFileBase): __doc__ = DownloadFileBase.__doc__ replaced_by_cmd = DownloadFile @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('localFileName') class Cat(B2URIFileArgMixin, DownloadCommand): """ Download content of a file-like object identified by B2 URI directly to stdout. {ProgressMixin} {SourceSseMixin} {WriteBufferSizeMixin} {SkipHashVerificationMixin} Requires capability: - **readFiles** """ def _run(self, args): target_filename = '-' progress_listener = self.make_progress_listener( target_filename, args.no_progress or args.quiet ) encryption_setting = self._get_source_sse_setting(args) file_request = self.api.download_file_by_uri( args.B2_URI, progress_listener=progress_listener, encryption=encryption_setting ) output_filepath = self.get_local_output_filepath(target_filename, file_request) file_request.save_to(output_filepath) return 0 class GetAccountInfo(Command): """ Shows the account ID, key, auth token, URLs, and what capabilities the current application keys has. """ def _run(self, args): data = format_account_info(self.api.account_info) self._print_json(data) return 0 class GetBucket(Command): """ Prints all of the information about the bucket, including bucket info, CORS rules and lifecycle rules. If ``--show-size`` is specified, then display the number of files (``fileCount``) in the bucket and the aggregate size of all files (``totalSize``). Hidden files and hide markers are accounted for in the reported number of files, and hidden files also contribute toward the reported aggregate size, whereas hide markers do not. Each version of a file counts as an individual file, and its size contributes toward the aggregate size. Analysis is recursive. .. note:: Note that ``--show-size`` requires multiple API calls, and will therefore incur additional latency, computation, and Class C transactions. Requires capability: - **listBuckets** """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--show-size', action='store_true') add_bucket_name_argument(parser) super()._setup_parser(parser) def _run(self, args): # This always wants up-to-date info, so it does not use # the bucket cache. for b in self.api.list_buckets(args.bucketName): if not args.show_size: self._print_json(b) return 0 else: result = b.as_dict() # `files` is a generator. We don't want to collect all of the values from the # generator, as there many be billions of files in a large bucket. files = b.ls("", latest_only=False, recursive=True) # `files` yields tuples of (file_version, folder_name). We don't care about # `folder_name`, so just access the first slot of the tuple directly in the # reducer. We can't ask a generator for its size, as the elements are yielded # lazily, so we need to accumulate the count as we go. By using a tuple of # (file count, total size), we can obtain the desired information very compactly # and efficiently. count_size_tuple = functools.reduce( (lambda partial, f: (partial[0] + 1, partial[1] + f[0].size)), files, (0, 0) ) result['fileCount'] = count_size_tuple[0] result['totalSize'] = count_size_tuple[1] self._print_json(result) return 0 self._print_stderr('bucket not found: ' + args.bucketName) return 1 class FileInfoBase(Command): """ Prints all of the information about the object, but not its contents. Requires capability: - **readFiles** """ def _run(self, args): b2_uri = self.get_b2_uri_from_arg(args) file_version = self.api.get_file_info_by_uri(b2_uri) self._print_json(file_version) return 0 class FileInfo(B2URIFileArgMixin, FileInfoBase): __doc__ = FileInfoBase.__doc__ class GetFileInfo(CmdReplacedByMixin, B2URIFileIDArgMixin, FileInfoBase): __doc__ = FileInfoBase.__doc__ replaced_by_cmd = FileInfo class GetDownloadAuth(Command): """ Prints an authorization token that is valid only for downloading files from the given bucket. The token is valid for the duration specified, which defaults to 86400 seconds (one day). Only files that match that given prefix can be downloaded with the token. The prefix defaults to "", which matches all files in the bucket. Requires capability: - **shareFiles** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--prefix', default='') parser.add_argument('--duration', type=int, default=86400) add_bucket_name_argument(parser) super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) auth_token = bucket.get_download_authorization( file_name_prefix=args.prefix, valid_duration_in_seconds=args.duration ) self._print(auth_token) return 0 class GetDownloadUrlWithAuth(Command): """ Prints a URL to download the given file. The URL includes an authorization token that allows downloads from the given bucket for files whose names start with the given file name. The URL will work for the given file, but is not specific to that file. Files with longer names that start with the give file name can also be downloaded with the same auth token. The token is valid for the duration specified, which defaults to 86400 seconds (one day). Requires capability: - **shareFiles** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--duration', type=int, default=86400) add_bucket_name_argument(parser) parser.add_argument('fileName').completer = file_name_completer super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) auth_token = bucket.get_download_authorization( file_name_prefix=args.fileName, valid_duration_in_seconds=args.duration ) base_url = self.api.get_download_url_for_file_name(args.bucketName, args.fileName) url = base_url + '?Authorization=' + auth_token self._print(url) return 0 class HideFile(Command): """ Uploads a new, hidden, version of the given file. Requires capability: - **writeFiles** """ @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser) parser.add_argument('fileName').completer = file_name_completer super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) file_info = bucket.hide_file(args.fileName) self._print_json(file_info) return 0 class ListBuckets(Command): """ Lists all of the buckets in the current account. Output lines list the bucket ID, bucket type, and bucket name, and look like this: .. code-block:: 98c960fd1cb4390c5e0f0519 allPublic my-bucket Alternatively, the ``--json`` option produces machine-readable output similar (but not identical) to the server api response format. Requires capability: - **listBuckets** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--json', action='store_true') super()._setup_parser(parser) def _run(self, args): return self.__class__.run_list_buckets(self, json_=args.json) @classmethod def run_list_buckets(cls, command: Command, *, json_: bool) -> int: buckets = command.api.list_buckets() if json_: command._print_json(list(buckets)) return 0 for b in buckets: command._print(f'{b.id_} {b.type_:<10} {b.name}') return 0 class ListKeys(Command): """ Lists the application keys for the current account. The columns in the output are: - ID of the application key - Name of the application key - Name of the bucket the key is restricted to, or ``-`` for no restriction - Date of expiration, or ``-`` - Time of expiration, or ``-`` - File name prefix, in single quotes - Command-separated list of capabilities None of the values contain whitespace. For keys restricted to buckets that do not exist any more, the bucket name is replaced with ``id=``, because deleted buckets do not have names any more. Requires capability: - **listKeys** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--long', action='store_true') super()._setup_parser(parser) def __init__(self, console_tool): super().__init__(console_tool) self.bucket_id_to_bucket_name = None def _run(self, args): for key in self.api.list_keys(): self.print_key(key, args.long) return 0 def print_key(self, key: ApplicationKey, is_long_format: bool): if is_long_format: format_str = "{keyId} {keyName:20s} {bucketName:20s} {dateStr:10s} {timeStr:8s} '{namePrefix}' {capabilities}" else: format_str = '{keyId} {keyName:20s}' timestamp_or_none = apply_or_none(int, key.expiration_timestamp_millis) (date_str, time_str) = self.timestamp_display(timestamp_or_none) key_str = format_str.format( keyId=key.id_, keyName=key.key_name, bucketName=self.bucket_display_name(key.bucket_id), namePrefix=(key.name_prefix or ''), capabilities=','.join(key.capabilities), dateStr=date_str, timeStr=time_str, ) self._print(key_str) def bucket_display_name(self, bucket_id): # Special case for no bucket ID if bucket_id is None: return '-' # Make sure we have the map if self.bucket_id_to_bucket_name is None: self.bucket_id_to_bucket_name = dict((b.id_, b.name) for b in self.api.list_buckets()) return self.bucket_id_to_bucket_name.get(bucket_id, 'id=' + bucket_id) def timestamp_display(self, timestamp_or_none): """ Returns a pair (date_str, time_str) for the given timestamp """ if timestamp_or_none is None: return '-', '-' else: timestamp = timestamp_or_none dt = datetime.datetime.fromtimestamp(timestamp / 1000, datetime.timezone.utc) return dt.strftime('%Y-%m-%d'), dt.strftime('%H:%M:%S') class ListParts(Command): """ Lists all of the parts that have been uploaded for the given large file, which must be a file that was started but not finished or canceled. Requires capability: - **writeFiles** """ @classmethod def _setup_parser(cls, parser): parser.add_argument('largeFileId') super()._setup_parser(parser) def _run(self, args): for part in self.api.list_parts(args.largeFileId): self._print('%5d %9d %s' % (part.part_number, part.content_length, part.content_sha1)) return 0 class ListUnfinishedLargeFiles(Command): """ Lists all of the large files in the bucket that were started, but not finished or canceled. Requires capability: - **listFiles** """ @classmethod def _setup_parser(cls, parser): add_bucket_name_argument(parser) super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) for unfinished in bucket.list_unfinished_large_files(): file_info_text = ' '.join( f'{k}={unfinished.file_info[k]}' for k in sorted(unfinished.file_info) ) self._print( f'{unfinished.file_id} {unfinished.file_name} {unfinished.content_type} {file_info_text}' ) return 0 class AbstractLsCommand(Command, metaclass=ABCMeta): """ The ``--versions`` option selects all versions of each file, not just the most recent. The ``--recursive`` option will descend into folders, and will select only files, not folders. The ``--with-wildcard`` option will allow using ``*``, ``?`` and ```[]``` characters in ``folderName`` as a greedy wildcard, single character wildcard and range of characters. It requires the ``--recursive`` option. Remember to quote ``folderName`` to avoid shell expansion. The --include and --exclude flags can be used to filter the files returned from the server using wildcards. You can specify multiple --include and --exclude filters. The order of filters matters. The *last* matching filter decides whether a file is included or excluded. If the given list of filters contains only INCLUDE filters, then it is assumed that all files are excluded by default. """ @classmethod def _setup_parser(cls, parser): parser.add_argument('--versions', action='store_true') parser.add_argument('-r', '--recursive', action='store_true') add_normalized_argument(parser, '--with-wildcard', action='store_true') parser.add_argument( '--include', dest='filters', action='append', type=Filter.include, default=[] ) parser.add_argument( '--exclude', dest='filters', action='append', type=Filter.exclude, default=[] ) super()._setup_parser(parser) def _print_files(self, args, b2_uri: B2URI | None = None): generator = self._get_ls_generator(args, b2_uri=b2_uri) for file_version, folder_name in generator: self._print_file_version(args, file_version, folder_name) def _print_file_version( self, args, file_version: FileVersion, folder_name: str | None, ) -> None: name = folder_name or file_version.file_name if args.escape_control_characters: name = escape_control_chars(name) self._print(name) def _get_ls_generator(self, args, b2_uri: B2URI | None = None): b2_uri = b2_uri or self.get_b2_uri_from_arg(args) try: yield from self.api.list_file_versions_by_uri( b2_uri, latest_only=not args.versions, recursive=args.recursive, with_wildcard=args.with_wildcard, filters=args.filters, ) except Exception as err: raise CommandError(unprintable_to_hex(str(err))) from err def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URI: raise NotImplementedError class BaseLs(AbstractLsCommand, metaclass=ABCMeta): """ Using the file naming convention that ``/`` separates folder names from their contents, returns a list of the files and folders in a given folder. If no folder name is given, lists all files at the top level. The ``--long`` option produces very wide multi-column output showing the upload date/time, file size, file id, whether it is an uploaded file or the hiding of a file, and the file name. Folders don't really exist in B2, so folders are shown with ``-`` in each of the fields other than the name. The ``--json`` option produces machine-readable output similar to the server api response format. The ``--replication`` option adds replication status {AbstractLsCommand} """ # order is file_id, action, date, time, size(, replication), name LS_ENTRY_TEMPLATE = '%83s %6s %10s %8s %9d %s' LS_ENTRY_TEMPLATE_REPLICATION = LS_ENTRY_TEMPLATE + ' %s' @classmethod def _setup_parser(cls, parser): parser.add_argument('--long', action='store_true') parser.add_argument('--json', action='store_true') parser.add_argument('--replication', action='store_true') super()._setup_parser(parser) def _run(self, args): b2_uri = self.get_b2_uri_from_arg(args) if args.long and args.json: raise CommandError('Cannot use --long and --json options together') if not b2_uri or b2_uri == B2URI(""): for option_name in ('long', 'recursive', 'replication'): if getattr(args, option_name, False): raise CommandError( f'Cannot use --{option_name} option without specifying a bucket name' ) return ListBuckets.run_list_buckets(self, json_=args.json) if args.json: i = -1 for i, (file_version, _) in enumerate(self._get_ls_generator(args, b2_uri=b2_uri)): if i: self._print(',', end='') else: self._print('[') self._print_json(file_version) self._print(']' if i >= 0 else '[]') else: self._print_files(args) return 0 def _print_file_version( self, args, file_version: FileVersion, folder_name: str | None, ) -> None: if not args.long: super()._print_file_version(args, file_version, folder_name) elif folder_name is not None: self._print(self.format_folder_ls_entry(args, folder_name, args.replication)) else: self._print(self.format_ls_entry(args, file_version, args.replication)) def format_folder_ls_entry(self, args, name, replication: bool): if args.escape_control_characters: name = escape_control_chars(name) if replication: return self.LS_ENTRY_TEMPLATE_REPLICATION % ('-', '-', '-', '-', 0, '-', name) return self.LS_ENTRY_TEMPLATE % ('-', '-', '-', '-', 0, name) def format_ls_entry(self, args, file_version: FileVersion, replication: bool): dt = datetime.datetime.fromtimestamp( file_version.upload_timestamp / 1000, datetime.timezone.utc ) date_str = dt.strftime('%Y-%m-%d') time_str = dt.strftime('%H:%M:%S') size = file_version.size or 0 # required if self.action == 'hide' template = replication and self.LS_ENTRY_TEMPLATE_REPLICATION or self.LS_ENTRY_TEMPLATE parameters = [ file_version.id_, file_version.action, date_str, time_str, size, ] if replication: replication_status = file_version.replication_status parameters.append(replication_status.value if replication_status else '-') name = file_version.file_name if args.escape_control_characters: name = escape_control_chars(name) parameters.append(name) return template % tuple(parameters) class Ls(B2IDOrB2URIMixin, BaseLs): """ {BaseLs} Examples .. note:: Note the use of quotes, to ensure that special characters are not expanded by the shell. List csv and tsv files (in any directory, in the whole bucket): .. code-block:: {NAME} ls --recursive --with-wildcard "b2://bucketName/*.[ct]sv" List all info.txt files from directories named `b?`, where `?` is any character: .. code-block:: {NAME} ls --recursive --with-wildcard "b2://bucketName/b?/info.txt" List all pdf files from directories b0 to b9 (including sub-directories): .. code-block:: {NAME} ls --recursive --with-wildcard "b2://bucketName/b[0-9]/*.pdf" List all buckets: .. code-block:: {NAME} ls Requires capability: - **listFiles** - **listBuckets** (if bucket name is not provided) """ ALLOW_ALL_BUCKETS = True class BaseRm(ThreadsMixin, AbstractLsCommand, metaclass=ABCMeta): """ Removes a "folder" or a set of files matching a pattern. Use with caution. .. note:: ``rm`` is a high-level command that under the hood utilizes multiple calls to the server, which means the server cannot guarantee consistency between multiple operations. For example if a file matching a pattern is uploaded during a run of ``rm`` command, it MIGHT be deleted (as "latest") instead of the one present when the ``rm`` run has started. In order to safely delete a single file version, please use ``delete-file-version``. To list (but not remove) files to be deleted, use ``--dry-run``. You can also list files via ``ls`` command - the listing behaviour is exactly the same. Progress is displayed on the console unless ``--no-progress`` is specified. {ThreadsMixin} {AbstractLsCommand} The ``--dry-run`` option prints all the files that would be affected by the command, but removes nothing. Normally, when an error happens during file removal, log is printed and the command goes further. If any error should be immediately breaking the command, ``--fail-fast`` can be passed to ensure that first error will stop the execution. This could be useful to e.g. check whether provided credentials have **deleteFiles** capabilities. .. note:: Using ``--fail-fast`` doesn't prevent the command from trying to remove further files. It just stops the progress. Since multiple files are removed in parallel, it's possible that just some of them were not reported. Command returns 0 if all files were removed successfully and a value different from 0 if any file was left. """ PROGRESS_REPORT_CLASS = ProgressReport class SubmitThread(threading.Thread): END_MARKER = object() ERROR_TAG = 'error' EXCEPTION_TAG = 'general_exception' def __init__( self, runner: BaseRm, args: argparse.Namespace, messages_queue: queue.Queue, reporter: ProgressReport, threads: int, ): self.runner = runner self.args = args self.messages_queue = messages_queue self.reporter = reporter self.threads = threads removal_queue_size = self.args.queue_size or (2 * self.threads) self.semaphore = threading.BoundedSemaphore(value=removal_queue_size) self.fail_fast_event = threading.Event() self.mapping_lock = threading.Lock() self.futures_mapping = {} super().__init__(daemon=True) def run(self) -> None: try: with ThreadPoolExecutor(max_workers=self.threads) as executor: self._run_removal(executor) except Exception as error: self.messages_queue.put((self.EXCEPTION_TAG, error)) finally: self.messages_queue.put(self.END_MARKER) def _run_removal(self, executor: Executor): for file_version, subdirectory in self.runner._get_ls_generator(self.args): if subdirectory is not None: # This file_version is not for listing/deleting. # It is only here to list the subdirectory, so skip deleting it. continue # Obtaining semaphore limits number of elements that we fetch from LS. self.semaphore.acquire(blocking=True) # This event is updated before the semaphore is released. This way, # in a single threaded scenario, we get synchronous responses. if self.fail_fast_event.is_set(): break self.reporter.update_total(1) future = executor.submit( self.runner.api.delete_file_version, file_version.id_, file_version.file_name, ) with self.mapping_lock: self.futures_mapping[future] = file_version # Done callback is added after, so it's "sure" that mapping is updated earlier. future.add_done_callback(self._removal_done) self.reporter.end_total() def _removal_done(self, future: Future) -> None: with self.mapping_lock: file_version = self.futures_mapping.pop(future) try: future.result() self.reporter.update_count(1) except FileNotPresent: # We wanted to remove this file anyway. self.reporter.update_count(1) except B2Error as error: if self.args.fail_fast: # This is set before releasing the semaphore. # It means that when the semaphore is released, # we'll already have information about requirement to fail. self.fail_fast_event.set() self.messages_queue.put((self.ERROR_TAG, file_version, error)) except Exception as error: self.messages_queue.put((self.EXCEPTION_TAG, error)) finally: self.semaphore.release() @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--dry-run', action='store_true') add_normalized_argument(parser, '--queue-size', type=int, default=None, help='max elements fetched at once for removal, ' \ 'if left unset defaults to twice the number of threads.', ) add_normalized_argument(parser, '--no-progress', action='store_true') add_normalized_argument(parser, '--fail-fast', action='store_true') super()._setup_parser(parser) def _run(self, args): if args.dry_run: self._print_files(args) return 0 failed_on_any_file = False messages_queue = queue.Queue() threads = self._get_threads_from_args(args) with self.PROGRESS_REPORT_CLASS(self.stdout, args.no_progress or args.quiet) as reporter: submit_thread = self.SubmitThread(self, args, messages_queue, reporter, threads=threads) # This thread is started in daemon mode, no joining needed. submit_thread.start() while True: queue_entry = messages_queue.get(block=True) if queue_entry is submit_thread.END_MARKER: break event_type, *data = queue_entry if event_type == submit_thread.ERROR_TAG: file_version, error = data message = f'Deletion of file "{file_version.file_name}" ' \ f'({file_version.id_}) failed: {str(error)}' reporter.print_completion(message) failed_on_any_file = True if args.fail_fast: break elif event_type == submit_thread.EXCEPTION_TAG: raise data[0] return 1 if failed_on_any_file else 0 class Rm(B2IDOrB2URIMixin, BaseRm): """ {BaseRm} Examples. .. note:: Note the use of quotes, to ensure that special characters are not expanded by the shell. .. note:: Use with caution. Running examples presented below can cause data-loss. Remove all csv and tsv files (in any directory, in the whole bucket): .. code-block:: {NAME} rm --recursive --with-wildcard "b2://bucketName/*.[ct]sv" Remove all info.txt files from buckets bX, where X is any character: .. code-block:: {NAME} rm --recursive --with-wildcard "b2://bucketName/b?/info.txt" Remove all pdf files from buckets b0 to b9 (including sub-directories): .. code-block:: {NAME} rm --recursive --with-wildcard "b2://bucketName/b[0-9]/*.pdf" Requires capability: - **listFiles** - **deleteFiles** """ class GetUrlBase(Command): """ Prints an URL that can be used to download the given file, if it is public. """ def _run(self, args): b2_uri = self.get_b2_uri_from_arg(args) self._print(self.api.get_download_url_by_uri(b2_uri)) return 0 class GetUrl(B2URIFileArgMixin, GetUrlBase): __doc__ = GetUrlBase.__doc__ class MakeUrl(CmdReplacedByMixin, B2URIFileIDArgMixin, GetUrlBase): __doc__ = GetUrlBase.__doc__ replaced_by_cmd = GetUrl class MakeFriendlyUrl(CmdReplacedByMixin, B2URIBucketNFilenameArgMixin, GetUrlBase): __doc__ = GetUrlBase.__doc__ replaced_by_cmd = GetUrl class Sync( ThreadsMixin, DestinationSseMixin, SourceSseMixin, WriteBufferSizeMixin, SkipHashVerificationMixin, MaxDownloadStreamsMixin, UploadModeMixin, Command, ): """ Copies multiple files from source to destination. Optionally deletes or hides destination files that the source does not have. The synchronizer can copy files: - From a B2 bucket to a local destination. - From a local source to a B2 bucket. - From one B2 bucket to another. - Between different folders in the same B2 bucket. Use ``b2:///`` for B2 paths, e.g. ``b2://my-bucket-name/a/path/prefix/``. Progress is displayed on the console unless ``--no-progress`` is specified. A list of actions taken is always printed. Specify ``--dry-run`` to simulate the actions that would be taken. To allow sync to run when the source directory is empty, potentially deleting all files in a bucket, specify ``--allow-empty-source``. The default is to fail when the specified source directory doesn't exist or is empty. (This check only applies to version 1.0 and later.) {ThreadsMixin} You can alternatively control number of threads per each operation. The number of files processed in parallel is set by ``--sync-threads``, the number of files/file parts downloaded in parallel is set by``--download-threads``, and the number of files/file parts uploaded in parallel is set by `--upload-threads``. All the three parameters can be set to the same value by ``--threads``. Experiment with parameters if the defaults are not working well. Users with low-performance networks may benefit from reducing the number of threads. Using just one thread will minimize the impact on other users of the network. .. note:: Note that using multiple threads could be detrimental to the other users on your network. You can specify ``--exclude-regex`` to selectively ignore files that match the given pattern. Ignored files will not copy during the sync operation. The pattern is a regular expression that is tested against the full path of each file. You can specify ``--include-regex`` to selectively override ignoring files that match the given ``--exclude-regex`` pattern by an ``--include-regex`` pattern. Similarly to ``--exclude-regex``, the pattern is a regular expression that is tested against the full path of each file. .. note:: Note that ``--include-regex`` cannot be used without ``--exclude-regex``. You can specify ``--exclude-all-symlinks`` to skip symlinks when syncing from a local source. When a directory is excluded by using ``--exclude-dir-regex``, all of the files within it are excluded, even if they match an ``--include-regex`` pattern. This means that there is no need to look inside excluded directories, and you can exclude directories containing files for which you don't have read permission and avoid getting errors. The ``--exclude-dir-regex`` is a regular expression that is tested against the full path of each directory. The path being matched does not have a trailing ``/``, so don't include on in your regular expression. Multiple regex rules can be applied by supplying them as pipe delimited instructions. Note that the regex for this command is Python regex. Reference: ``_ Regular expressions are considered a match if they match a substring starting at the first character. ``.*e`` will match ``hello``. This is not ideal, but we will maintain this behavior for compatibility. If you want to match the entire path, put a ``$`` at the end of the regex, such as ``.*llo$``. You can specify ``--exclude-if-modified-after`` to selectively ignore file versions (including hide markers) which were synced after given time (for local source) or ignore only specific file versions (for b2 source). Ignored files or file versions will not be taken for consideration during sync. The time should be given as a seconds timestamp (e.g. "1367900664") If you need milliseconds precision, put it after the comma (e.g. "1367900664.152") Files are considered to be the same if they have the same name and modification time. This behaviour can be changed using the ``--compare-versions`` option. Possible values are: - ``none``: Comparison using the file name only - ``modTime``: Comparison using the modification time (default) - ``size``: Comparison using the file size A future enhancement may add the ability to compare the SHA1 checksum of the files. Fuzzy comparison of files based on modTime or size can be enabled by specifying the ``--compare-threshold`` option. This will treat modTimes (in milliseconds) or sizes (in bytes) as the same if they are within the comparison threshold. Files that match, within the threshold, will not be synced. Specifying ``--verbose`` and ``--dry-run`` can be useful to determine comparison value differences. When a destination file is present that is not in the source, the default is to leave it there. Specifying ``--delete`` means to delete destination files that are not in the source. When the destination is B2, you have the option of leaving older versions in place. Specifying ``--keep-days`` will delete any older versions more than the given number of days old, based on the modification time of the file. This option is not available when the destination is a local folder. Files at the source that have a newer modification time are always copied to the destination. If the destination file is newer, the default is to report an error and stop. But with ``--skip-newer`` set, those files will just be skipped. With ``--replace-newer`` set, the old file from the source will replace the newer one in the destination. To make the destination exactly match the source, use: .. code-block:: {NAME} sync --delete --replace-newer ... ... .. warning:: Using ``--delete`` deletes files! We recommend not using it. If you use ``--keep-days`` instead, you will have some time to recover your files if you discover they are missing on the source end. To make the destination match the source, but retain previous versions for 30 days: .. code-block:: {NAME} sync --keep-days 30 --replace-newer ... b2://... Example of sync being used with ``--exclude-regex``. This will ignore ``.DS_Store`` files and ``.Spotlight-V100`` folders: .. code-block:: {NAME} sync --exclude-regex '(.*\\.DS_Store)|(.*\\.Spotlight-V100)' ... b2://... {DestinationSseMixin} {SourceSseMixin} {WriteBufferSizeMixin} {SkipHashVerificationMixin} {MaxDownloadStreamsMixin} {UploadModeMixin} Requires capabilities: - **listFiles** - **readFiles** (for downloading) - **writeFiles** (for uploading) """ DEFAULT_SYNC_THREADS = 10 DEFAULT_DOWNLOAD_THREADS = 10 DEFAULT_UPLOAD_THREADS = 10 @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--no-progress', action='store_true') add_normalized_argument(parser, '--dry-run', action='store_true') add_normalized_argument(parser, '--allow-empty-source', action='store_true') add_normalized_argument(parser, '--exclude-all-symlinks', action='store_true') add_normalized_argument( parser, '--sync-threads', type=int, default=cls.DEFAULT_SYNC_THREADS ) add_normalized_argument( parser, '--download-threads', type=int, default=cls.DEFAULT_DOWNLOAD_THREADS ) add_normalized_argument( parser, '--upload-threads', type=int, default=cls.DEFAULT_UPLOAD_THREADS ) add_normalized_argument( parser, '--compare-versions', default='modTime', choices=('none', 'modTime', 'size') ) add_normalized_argument(parser, '--compare-threshold', type=int, metavar='MILLIS') add_normalized_argument( parser, '--exclude-regex', action='append', default=[], metavar='REGEX' ) add_normalized_argument( parser, '--include-regex', action='append', default=[], metavar='REGEX' ) add_normalized_argument( parser, '--exclude-dir-regex', action='append', default=[], metavar='REGEX' ) add_normalized_argument( parser, '--exclude-if-modified-after', type=parse_millis_from_float_timestamp, default=None, metavar='TIMESTAMP' ) super()._setup_parser(parser) # add parameters from the mixins, and the parent class parser.add_argument('source') parser.add_argument('destination') skip_group = parser.add_mutually_exclusive_group() add_normalized_argument(skip_group, '--skip-newer', action='store_true') add_normalized_argument(skip_group, '--replace-newer', action='store_true') del_keep_group = parser.add_mutually_exclusive_group() add_normalized_argument(del_keep_group, '--delete', action='store_true') add_normalized_argument(del_keep_group, '--keep-days', type=float, metavar='DAYS') def _run(self, args): policies_manager = self.get_policies_manager_from_args(args) if args.threads is not None: if args.sync_threads != self.DEFAULT_SYNC_THREADS \ or args.upload_threads != self.DEFAULT_UPLOAD_THREADS \ or args.download_threads != self.DEFAULT_DOWNLOAD_THREADS: raise ValueError("--threads cannot be used with other thread options") sync_threads = upload_threads = download_threads = args.threads else: sync_threads = args.sync_threads upload_threads = args.upload_threads download_threads = args.download_threads self.api.services.upload_manager.set_thread_pool_size(upload_threads) self.api.services.download_manager.set_thread_pool_size(download_threads) source = parse_sync_folder(args.source, self.console_tool.api) destination = parse_sync_folder(args.destination, self.console_tool.api) allow_empty_source = args.allow_empty_source or VERSION_0_COMPATIBILITY synchronizer = self.get_synchronizer_from_args( args, sync_threads, policies_manager, allow_empty_source, self.api.session.account_info.get_absolute_minimum_part_size(), ) kwargs = {} read_encryption_settings = {} write_encryption_settings = {} source_bucket = destination_bucket = None destination_sse = self._get_destination_sse_setting(args) if destination.folder_type() == 'b2': destination_bucket = destination.bucket_name write_encryption_settings[destination_bucket] = destination_sse elif destination_sse is not None: raise ValueError('server-side encryption cannot be set for a non-b2 sync destination') source_sse = self._get_source_sse_setting(args) if source.folder_type() == 'b2': source_bucket = source.bucket_name read_encryption_settings[source_bucket] = source_sse elif source_sse is not None: raise ValueError('server-side encryption cannot be set for a non-b2 sync source') if read_encryption_settings or write_encryption_settings: kwargs['encryption_settings_provider'] = BasicSyncEncryptionSettingsProvider( read_bucket_settings=read_encryption_settings, write_bucket_settings=write_encryption_settings, ) with SyncReport(self.stdout, args.no_progress or args.quiet) as reporter: try: synchronizer.sync_folders( source_folder=source, dest_folder=destination, now_millis=current_time_millis(), reporter=reporter, **kwargs ) except EmptyDirectory as ex: raise CommandError( f'Directory {ex.path} is empty. Use --allow-empty-source to sync anyway.' ) except NotADirectory as ex: raise CommandError(f'{ex.path} is not a directory') except UnableToCreateDirectory as ex: raise CommandError(f'unable to create directory {ex.path}') return 0 def get_policies_manager_from_args(self, args): return ScanPoliciesManager( exclude_dir_regexes=args.exclude_dir_regex, exclude_file_regexes=args.exclude_regex, include_file_regexes=args.include_regex, exclude_all_symlinks=args.exclude_all_symlinks, exclude_modified_after=args.exclude_if_modified_after, ) def get_synchronizer_from_args( self, args, max_workers, policies_manager=DEFAULT_SCAN_MANAGER, allow_empty_source=False, absolute_minimum_part_size=None, ): if args.replace_newer: newer_file_mode = NewerFileSyncMode.REPLACE elif args.skip_newer: newer_file_mode = NewerFileSyncMode.SKIP else: newer_file_mode = NewerFileSyncMode.RAISE_ERROR if args.compare_versions == 'none': compare_version_mode = CompareVersionMode.NONE elif args.compare_versions == 'modTime': compare_version_mode = CompareVersionMode.MODTIME elif args.compare_versions == 'size': compare_version_mode = CompareVersionMode.SIZE else: compare_version_mode = CompareVersionMode.MODTIME compare_threshold = args.compare_threshold keep_days = None if args.delete: keep_days_or_delete = KeepOrDeleteMode.DELETE elif args.keep_days: keep_days_or_delete = KeepOrDeleteMode.KEEP_BEFORE_DELETE keep_days = args.keep_days else: keep_days_or_delete = KeepOrDeleteMode.NO_DELETE upload_mode = self._get_upload_mode_from_args(args) return Synchronizer( max_workers, policies_manager=policies_manager, dry_run=args.dry_run, allow_empty_source=allow_empty_source, newer_file_mode=newer_file_mode, keep_days_or_delete=keep_days_or_delete, compare_version_mode=compare_version_mode, compare_threshold=compare_threshold, keep_days=keep_days, upload_mode=upload_mode, absolute_minimum_part_size=absolute_minimum_part_size, ) class UpdateBucket(DefaultSseMixin, LifecycleRulesMixin, Command): """ Updates the ``bucketType`` of an existing bucket. Prints the ID of the bucket updated. Optionally stores bucket info, CORS rules and lifecycle rules with the bucket. These can be given as JSON on the command line. {DefaultSseMixin} {LifecycleRulesMixin} To set a default retention for files in the bucket ``--default-retention-mode`` and ``--default-retention-period`` have to be specified. The latter one is of the form "X days|years". {FILE_RETENTION_COMPATIBILITY_WARNING} This command can be used to set the bucket's ``fileLockEnabled`` flag to ``true`` using the ``--file-lock-enabled`` option. This can only be done if the bucket is not set up as a replication source. .. warning:: Once ``fileLockEnabled`` is set, it can NOT be reverted back to ``false`` Please note that replication from file-lock-enabled bucket to file-lock-disabled bucket is not allowed, therefore if file lock is enabled on a bucket, it can never again be the replication source bucket for a file-lock-disabled destination. Additionally in a file-lock-enabled bucket the file metadata limit will be decreased from 7000 bytes to 2048 bytes for new file versions Please consult ``b2_update_bucket`` official documentation for further guidance. Requires capability: - **writeBuckets** - **readBucketEncryption** and for some operations: - **writeBucketRetentions** - **writeBucketEncryption** """ @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--bucket-info', type=validated_loads) add_normalized_argument( parser, '--cors-rules', type=validated_loads, help= "If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string." ) add_normalized_argument( parser, '--default-retention-mode', choices=( RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value, 'none', ), default=None, ) add_normalized_argument( parser, '--default-retention-period', type=parse_default_retention_period, metavar='period', ) parser.add_argument('--replication', type=validated_loads) add_normalized_argument( parser, '--file-lock-enabled', action='store_true', default=None, help= "If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed back." ) add_bucket_name_argument(parser) parser.add_argument('bucketType', nargs='?', choices=CREATE_BUCKET_TYPES) super()._setup_parser(parser) # add parameters from the mixins and the parent class def _run(self, args): if args.default_retention_mode is not None: if args.default_retention_mode == 'none': default_retention = NO_RETENTION_BUCKET_SETTING else: default_retention = BucketRetentionSetting( RetentionMode(args.default_retention_mode), args.default_retention_period ) else: default_retention = None encryption_setting = self._get_default_sse_setting(args) if args.replication is None: replication = None else: replication = ReplicationConfiguration.from_dict(args.replication) bucket = self.api.get_bucket_by_name(args.bucketName) bucket = bucket.update( bucket_type=args.bucketType, bucket_info=args.bucket_info, cors_rules=args.cors_rules, lifecycle_rules=args.lifecycle_rules, default_server_side_encryption=encryption_setting, default_retention=default_retention, replication=replication, is_file_lock_enabled=args.file_lock_enabled, ) self._print_json(bucket) return 0 class MinPartSizeMixin(Described): """ By default, the file is broken into many parts to maximize upload parallelism and increase speed. Setting ``--min-part-size`` controls the minimal upload file part size. Part size must be in 5MB to 5GB range. Reference: ``_ """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--min-part-size', type=int, help="minimum part size in bytes", default=None, ) super()._setup_parser(parser) # noqa class UploadFileMixin( HeaderFlagsMixin, MinPartSizeMixin, ThreadsMixin, ProgressMixin, DestinationSseMixin, LegalHoldMixin, FileRetentionSettingMixin, metaclass=ABCMeta ): """ Content type is optional. If not set, it will be guessed. The maximum number of upload threads to use to upload parts of a large file is specified by ``--threads``. It has no effect on "small" files (under 200MB as of writing this). Each fileInfo is of the form ``a=b``. """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--content-type', help="MIME type of the file being uploaded. If not set it will be guessed." ) parser.add_argument( '--sha1', help="SHA-1 of the data being uploaded for verifying file integrity" ) parser.add_argument( '--info', action='append', default=[], help= "additional file info to be stored with the file. Can be used multiple times for different information." ) add_normalized_argument( parser, '--custom-upload-timestamp', type=int, help="overrides object creation date. Expressed as a number of milliseconds since epoch." ) add_bucket_name_argument(parser, help="name of the bucket where the file will be stored") parser.add_argument('localFilePath', help="path of the local file or stream to be uploaded") parser.add_argument('b2FileName', help="name file will be given when stored in B2") super()._setup_parser(parser) # add parameters from the mixins def _run(self, args): self._set_threads_from_args(args) upload_kwargs = self.get_execute_kwargs(args) file_info = self.execute_operation(**upload_kwargs) bucket = upload_kwargs["bucket"] self._print("URL by file name: " + bucket.get_download_url(file_info.file_name)) self._print("URL by fileId: " + self.api.get_download_url_for_fileid(file_info.id_)) self._print_json(file_info) return 0 def get_execute_kwargs(self, args) -> dict: file_infos = self._parse_file_infos(args.info) if SRC_LAST_MODIFIED_MILLIS not in file_infos and os.path.exists(args.localFilePath): try: mtime = os.path.getmtime(args.localFilePath) except OSError: if not points_to_fifo(pathlib.Path(args.localFilePath)): self._print_stderr( "WARNING: Unable to determine file modification timestamp. " f"{SRC_LAST_MODIFIED_MILLIS!r} file info won't be set." ) else: file_infos[SRC_LAST_MODIFIED_MILLIS] = str(int(mtime * 1000)) file_infos = self._file_info_with_header_args(args, file_infos) return { "bucket": self.api.get_bucket_by_name(args.bucketName), "content_type": args.content_type, "custom_upload_timestamp": args.custom_upload_timestamp, "encryption": self._get_destination_sse_setting(args), "file_info": file_infos, "file_name": args.b2FileName, "file_retention": self._get_file_retention_setting(args), "legal_hold": self._get_legal_hold_setting(args), "local_file": args.localFilePath, "min_part_size": args.min_part_size, "progress_listener": self.make_progress_listener(args.localFilePath, args.no_progress or args.quiet), "sha1_sum": args.sha1, "threads": self._get_threads_from_args(args), } @abstractmethod def execute_operation(self, **kwargs) -> b2sdk.file_version.FileVersion: raise NotImplementedError def upload_file_kwargs_to_unbound_upload(self, **kwargs): """ Translate upload_file kwargs to unbound_upload equivalents """ kwargs["large_file_sha1"] = kwargs.pop("sha1_sum", None) kwargs["buffers_count"] = kwargs["threads"] + 1 kwargs["read_size"] = kwargs["min_part_size"] or DEFAULT_MIN_PART_SIZE return kwargs def get_input_stream(self, filename: str) -> str | int | io.BinaryIO: """Get input stream IF filename points to a FIFO or stdin.""" if filename == "-": if os.path.exists('-'): self._print_stderr( "WARNING: Filename `-` won't be supported in the future and will always be treated as stdin alias." ) else: return sys.stdin.buffer if platform.system() == "Windows" else sys.stdin.fileno() elif points_to_fifo(pathlib.Path(filename)): return filename raise self.NotAnInputStream() def file_identifier_to_read_stream(self, file_id: str | int | BinaryIO, buffering) -> BinaryIO: if isinstance(file_id, (str, int)): return open( file_id, mode="rb", closefd=not isinstance(file_id, int), buffering=buffering, ) return file_id class NotAnInputStream(Exception): pass class UploadFile(UploadFileMixin, UploadModeMixin, Command): """ Uploads one file to the given bucket. Uploads the contents of the local file, and assigns the given name to the B2 file, possibly setting options like server-side encryption and retention. A FIFO file (such as named pipe) can be given instead of regular file. By default, upload_file will compute the sha1 checksum of the file to be uploaded. But, if you already have it, you can provide it on the command line to save a little time. {FILE_RETENTION_COMPATIBILITY_WARNING} {UploadFileMixin} {MinPartSizeMixin} {ProgressMixin} {ThreadsMixin} {DestinationSseMixin} {FileRetentionSettingMixin} {LegalHoldMixin} {UploadModeMixin} The ``--custom-upload-timestamp``, in milliseconds-since-epoch, can be used to artificially change the upload timestamp of the file for the purpose of preserving retention policies after migration of data from other storage. The access to this feature is restricted - if you really need it, you'll need to contact customer support to enable it temporarily for your account. Requires capability: - **writeFiles** """ def get_execute_kwargs(self, args) -> dict: kwargs = super().get_execute_kwargs(args) kwargs["upload_mode"] = self._get_upload_mode_from_args(args) return kwargs def execute_operation(self, local_file, bucket, threads, **kwargs): try: input_stream = self.get_input_stream(local_file) except self.NotAnInputStream: # it is a regular file file_version = bucket.upload_local_file(local_file=local_file, **kwargs) else: if kwargs.pop("upload_mode", None) != UploadMode.FULL: self._print_stderr( "WARNING: Ignoring upload mode setting as we are uploading a stream." ) kwargs = self.upload_file_kwargs_to_unbound_upload(threads=threads, **kwargs) del kwargs["threads"] input_stream = self.file_identifier_to_read_stream( input_stream, kwargs["min_part_size"] or DEFAULT_MIN_PART_SIZE ) with input_stream: file_version = bucket.upload_unbound_stream(read_only_object=input_stream, **kwargs) return file_version class UploadUnboundStream(UploadFileMixin, Command): """ Uploads an unbound stream to the given bucket. Uploads the contents of the unbound stream such as stdin or named pipe, and assigns the given name to the resulting B2 file. {FILE_RETENTION_COMPATIBILITY_WARNING} {UploadFileMixin} {MinPartSizeMixin} As opposed to ``b2 upload-file``, ``b2 upload-unbound-stream`` cannot choose optimal `partSize` on its own. So on memory constrained system it is best to use ``--part-size`` option to set it manually. During upload of unbound stream ``--part-size`` as well as ``--threads`` determine the amount of memory used. The maximum memory use for the upload buffers can be estimated at ``partSize * threads``, that is ~1GB by default. What is more, B2 Large File may consist of at most 10,000 parts, so ``minPartSize`` should be adjusted accordingly, if you expect the stream to be larger than 50GB. {ProgressMixin} {ThreadsMixin} {DestinationSseMixin} {FileRetentionSettingMixin} {LegalHoldMixin} The ``--custom-upload-timestamp``, in milliseconds-since-epoch, can be used to artificially change the upload timestamp of the file for the purpose of preserving retention policies after migration of data from other storage. The access to this feature is restricted - if you really need it, you'll need to contact customer support to enable it temporarily for your account. Requires capability: - **writeFiles** """ @classmethod def _setup_parser(cls, parser): add_normalized_argument( parser, '--part-size', type=int, default=None, help=("part size in bytes. Must be in range of "), ) add_normalized_argument( parser, '--unused-buffer-timeout-seconds', type=float, default=3600.0, help=( "maximum time in seconds that not a single part may sit in the queue," " waiting to be uploaded, before an error is returned" ), ) super()._setup_parser(parser) def get_execute_kwargs(self, args) -> dict: kwargs = super().get_execute_kwargs(args) kwargs = self.upload_file_kwargs_to_unbound_upload(**kwargs) kwargs["recommended_upload_part_size"] = args.part_size kwargs["unused_buffer_timeout_seconds"] = args.unused_buffer_timeout_seconds return kwargs def execute_operation(self, local_file, bucket, threads, **kwargs): try: input_stream = self.get_input_stream(local_file) except self.NotAnInputStream: # it is a regular file self._print_stderr( "WARNING: You are using a stream upload command to upload a regular file. " "While it will work, it is inefficient. " "Use of upload-file command is recommended." ) input_stream = local_file input_stream = self.file_identifier_to_read_stream( input_stream, kwargs["min_part_size"] or DEFAULT_MIN_PART_SIZE ) with input_stream: file_version = bucket.upload_unbound_stream(read_only_object=input_stream, **kwargs) return file_version class UpdateFileLegalHold(FileIdAndOptionalFileNameMixin, Command): """ Only works in buckets with fileLockEnabled=true. {FileIdAndOptionalFileNameMixin} Requires capability: - **writeFileLegalHolds** - **readFiles** (if file name not provided) """ @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('legalHold', choices=(LegalHold.ON.value, LegalHold.OFF.value)) def _run(self, args): file_name = self._get_file_name_from_args(args) legal_hold = LegalHold(args.legalHold) self.api.update_file_legal_hold(args.fileId, file_name, legal_hold) return 0 class UpdateFileRetention(FileIdAndOptionalFileNameMixin, Command): """ Only works in buckets with fileLockEnabled=true. Providing a ``retention-mode`` other than ``none`` requires providing ``retainUntil``, which has to be a future timestamp in the form of an integer representing milliseconds since epoch. If a file already is in governance mode, disabling retention or shortening it's period requires providing ``--bypass-governance``. If a file already is in compliance mode, disabling retention or shortening it's period is impossible. {FILE_RETENTION_COMPATIBILITY_WARNING} In both cases prolonging the retention period is possible. Changing from governance to compliance is also supported. {FileIdAndOptionalFileNameMixin} Requires capability: - **writeFileRetentions** - **readFiles** (if file name not provided) and optionally: - **bypassGovernance** """ @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument( 'retentionMode', choices=(RetentionMode.GOVERNANCE.value, RetentionMode.COMPLIANCE.value, 'none') ) add_normalized_argument( parser, '--retain-until', type=parse_millis_from_float_timestamp, metavar='TIMESTAMP', default=None ) add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) def _run(self, args): file_name = self._get_file_name_from_args(args) if args.retentionMode == 'none': file_retention = FileRetentionSetting(RetentionMode.NONE) else: file_retention = FileRetentionSetting( RetentionMode(args.retentionMode), args.retain_until ) self.api.update_file_retention( args.fileId, file_name, file_retention, args.bypass_governance ) return 0 class ReplicationSetup(Command): """ Sets up replication between two buckets (potentially from different accounts), creating and replacing keys if necessary. Requires capabilities on both profiles: - **listKeys** - **createKeys** - **readReplications** - **writeReplications** """ @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) add_normalized_argument(parser, '--destination-profile', default=None) parser.add_argument('source', metavar='SOURCE_BUCKET_NAME') parser.add_argument('destination', metavar='DESTINATION_BUCKET_NAME') add_normalized_argument( parser, '--name', help='name for the new replication rule on the source side' ) add_normalized_argument( parser, '--priority', help= 'priority for the new replication rule on the source side [%d-%d]. Will be set automatically when not specified.' % ( ReplicationRule.MIN_PRIORITY, ReplicationRule.MAX_PRIORITY, ), type=int, default=None, ) add_normalized_argument( parser, '--file-name-prefix', metavar='PREFIX', help='only replicate files starting with PREFIX' ) add_normalized_argument( parser, '--include-existing-files', action='store_true', help='if given, also replicates files uploaded prior to creation of the replication rule' ) def _run(self, args): if args.destination_profile is None: destination_api = self.api else: destination_api = _get_b2api_for_profile(args.destination_profile) helper = ReplicationSetupHelper() helper.setup_both( source_bucket=self.api.get_bucket_by_name(args.source).get_fresh_state(), destination_bucket=destination_api.get_bucket_by_name(args.destination ).get_fresh_state(), name=args.name, priority=args.priority, prefix=args.file_name_prefix, include_existing_files=args.include_existing_files, ) return 0 class ReplicationRuleChanger(Command, metaclass=ABCMeta): @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('source', metavar='SOURCE_BUCKET_NAME') parser.add_argument('rule_name', metavar='REPLICATION_RULE_NAME') def _run(self, args): bucket = self.api.get_bucket_by_name(args.source).get_fresh_state() found, altered = self.alter_rule_by_name(bucket, args.rule_name) if not found: print('ERROR: replication rule could not be found!') return 1 elif not altered: print('ERROR: replication rule was found, but could not be changed!') return 1 return 0 @classmethod def alter_rule_by_name(cls, bucket: Bucket, name: str) -> tuple[bool, bool]: """ returns False if rule could not be found """ if not bucket.replication or not bucket.replication.rules: return False, False found = False altered = False new_rules = [] for rule in bucket.replication.rules: if rule.name == name: found = True old_dict_form = rule.as_dict() new = cls.alter_one_rule(rule) if new is None: altered = True continue if old_dict_form != new.as_dict(): altered = True new_rules.append(rule) if altered: new_replication_configuration = ReplicationConfiguration( **{ 'rules': new_rules, 'source_key_id': bucket.replication.source_key_id, }, **bucket.replication.get_destination_configuration_as_dict(), ) bucket.update( if_revision_is=bucket.revision, replication=new_replication_configuration, ) return found, altered @classmethod @abstractmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: """ return None to delete a rule """ pass class ReplicationDelete(ReplicationRuleChanger): """ Deletes a replication rule Requires capabilities: - **readReplications** - **writeReplications** """ @classmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: """ return None to delete rule """ return None class ReplicationPause(ReplicationRuleChanger): """ Pauses a replication rule Requires capabilities: - **readReplications** - **writeReplications** """ @classmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: """ return None to delete rule """ rule.is_enabled = False return rule class ReplicationUnpause(ReplicationRuleChanger): """ Unpauses a replication rule Requires capabilities: - **readReplications** - **writeReplications** """ @classmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: """ return None to delete rule """ rule.is_enabled = True return rule class ReplicationStatus(Command): """ Inspects files in only source or both source and destination buckets (potentially from different accounts) and provides detailed replication statistics. Please be aware that only latest file versions are inspected, so any previous file versions are not represented in these statistics. --output-format "Console" output format is meant to be human-readable and is subject to change in any further release. One should use "json" for reliable "no-breaking-changes" output format. When piping "csv" format to some .csv file, it's handy to use --no-progress flag which will disable interactive reporting output, otherwise it will also go to target csv file's first line. --columns Comma-separated list of columns to be shown. The rows are still grouped by _all_ columns, no matter which of them are shown / hidden when using --columns flag. """ @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('source', metavar='SOURCE_BUCKET_NAME') add_normalized_argument(parser, '--rule', metavar='REPLICATION_RULE_NAME', default=None) add_normalized_argument(parser, '--destination-profile') add_normalized_argument(parser, '--dont-scan-destination', action='store_true') add_normalized_argument( parser, '--output-format', default='console', choices=('console', 'json', 'csv') ) add_normalized_argument(parser, '--no-progress', action='store_true') add_normalized_argument( parser, '--columns', default=['all'], type=lambda value: re.split(r', ?', value), metavar='COLUMN ONE,COLUMN TWO' ) def _run(self, args): destination_api = args.destination_profile and _get_b2api_for_profile( args.destination_profile ) try: bucket = self.api.list_buckets(args.source)[0] except IndexError: self._print_stderr(f'ERROR: bucket "{args.source}" not found') return 1 rules = bucket.replication.rules if args.rule: rules = [rule for rule in rules if rule.name == args.rule] if not rules: self._print_stderr( f'ERROR: no replication rule "{args.rule}" set up for bucket "{args.source}"' ) return 1 results = { rule.name: self.get_results_for_rule( bucket=bucket, rule=rule, destination_api=destination_api, scan_destination=not args.dont_scan_destination, quiet=args.no_progress or args.quiet, ) for rule in rules } if args.columns[0] != 'all': results = { rule_name: self.filter_results_columns( rule_results, [column.replace(' ', '_') for column in args.columns ], # allow users to use spaces instead of underscores ) for rule_name, rule_results in results.items() } if args.output_format == 'json': self.output_json(results) elif args.output_format == 'console': self.output_console(results) elif args.output_format == 'csv': self.output_csv(results) else: self._print_stderr(f'ERROR: format "{args.output_format}" is not supported') return 0 @classmethod def get_results_for_rule( cls, bucket: Bucket, rule: ReplicationRule, destination_api: B2Api | None, scan_destination: bool, quiet: bool ) -> list[dict]: monitor = ReplicationMonitor( bucket=bucket, rule=rule, destination_api=destination_api, report=ProgressReport(sys.stdout, quiet), ) report = monitor.scan(scan_destination=scan_destination) return [ { **dataclasses.asdict(result), 'count': count, } for result, count in report.counter_by_status.items() ] @classmethod def filter_results_columns(cls, results: list[dict], columns: list[str]) -> list[dict]: return [{key: result[key] for key in columns} for result in results] @classmethod def to_human_readable(cls, value: Any) -> str: if isinstance(value, Enum): return value.name if isinstance(value, bool): return 'Yes' if value else 'No' if value is None: return '' return str(value) def output_json(self, results: dict[str, list[dict]]) -> None: self._print_json(results) def output_console(self, results: dict[str, list[dict]]) -> None: for rule_name, rule_results in results.items(): self._print(f'Replication "{rule_name}":') rule_results = [ { key.replace('_', '\n'): # split key to minimize column size self.to_human_readable(value) for key, value in result.items() } for result in rule_results ] self._print(tabulate(rule_results, headers='keys', tablefmt='grid')) def output_csv(self, results: dict[str, list[dict]]) -> None: rows = [] for rule_name, rule_results in results.items(): rows += [ { 'rule name': rule_name, **{ key.replace('_', '\n'): # split key to minimize column size self.to_human_readable(value) for key, value in result.items() }, } for result in rule_results ] if not rows: return writer = csv.DictWriter(sys.stdout, fieldnames=list(rows[0].keys())) writer.writeheader() writer.writerows(rows) class Version(Command): """ Prints the version number of this tool. """ REQUIRES_AUTH = False @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--short', action='store_true') super()._setup_parser(parser) def _run(self, args): if args.short: self._print(VERSION) else: self._print('b2 command line tool, version', VERSION) return 0 class License(Command): # pragma: no cover """ Prints the license of B2 Command line tool and all libraries shipped with it. """ LICENSE_OUTPUT_FILE = pathlib.Path(__file__).parent.parent / 'licenses_output.txt' REQUIRES_AUTH = False IGNORE_MODULES = {'b2', 'distlib', 'patchelf-wrapper', 'platformdirs'} REQUEST_TIMEOUT_S = 5 # In case of some modules, we provide manual # overrides to the license text extracted by piplicenses. # Thanks to this set, we make sure the module is still used # PTable is used on versions below Python 3.11 MODULES_TO_OVERRIDE_LICENSE_TEXT = {'rst2ansi', 'b2sdk'} LICENSES = { 'argcomplete': 'https://raw.githubusercontent.com/kislyuk/argcomplete/develop/LICENSE.rst', 'atomicwrites': 'https://raw.githubusercontent.com/untitaker/python-atomicwrites/master/LICENSE', 'platformdirs': 'https://raw.githubusercontent.com/platformdirs/platformdirs/main/LICENSE.txt', 'PTable': 'https://raw.githubusercontent.com/jazzband/prettytable/main/LICENSE', 'pipx': 'https://raw.githubusercontent.com/pypa/pipx/main/LICENSE', 'userpath': 'https://raw.githubusercontent.com/ofek/userpath/master/LICENSE.txt', 'future': 'https://raw.githubusercontent.com/PythonCharmers/python-future/master/LICENSE.txt', 'pefile': 'https://raw.githubusercontent.com/erocarrera/pefile/master/LICENSE', 'https://github.com/python/typeshed': 'https://raw.githubusercontent.com/python/typeshed/main/LICENSE', } class NormalizingStringIO(io.StringIO): def write(self, text, *args, **kwargs): super().write(unicodedata.normalize('NFKD', text), *args, **kwargs) def __init__(self, console_tool): super().__init__(console_tool) self.request_session = requests.session() @classmethod def _setup_parser(cls, parser): # these are for building, users should not call it: add_normalized_argument( parser, '--dump', action='store_true', default=False, help=argparse.SUPPRESS ) add_normalized_argument( parser, '--with-packages', action='store_true', default=False, help=argparse.SUPPRESS ) super()._setup_parser(parser) def _run(self, args): if self.LICENSE_OUTPUT_FILE.exists() and not args.dump: self._print(self.LICENSE_OUTPUT_FILE.read_text(encoding='utf8')) return 0 if args.dump: with self.LICENSE_OUTPUT_FILE.open('w', encoding='utf8') as file: self._put_license_text(file, with_packages=args.with_packages) else: stream = self.NormalizingStringIO() self._put_license_text(stream, with_packages=args.with_packages) stream.seek(0) self._print(stream.read()) return 0 def _put_license_text(self, stream: io.StringIO, with_packages: bool = False): if with_packages: self._put_license_text_for_packages(stream) b2_call_name = self.console_tool.b2_binary_name included_sources = get_included_sources() if included_sources: stream.write( f'\n\nThird party libraries modified and included in {b2_call_name} or {b2sdk.__name__}:\n' ) for src in included_sources: stream.write('\n') stream.write(src.name) stream.write('\n') stream.write(src.comment) stream.write('\n') stream.write('Files included for legal compliance reasons:\n') files_table = prettytable.PrettyTable(['File name', 'Content'], hrules=prettytable.ALL) for file_name, file_content in src.files.items(): files_table.add_row([file_name, file_content]) stream.write(str(files_table)) stream.write(f'\n\n{b2_call_name} license:\n') b2_license_file_text = (pathlib.Path(__file__).parent.parent / 'LICENSE').read_text(encoding='utf8') stream.write(b2_license_file_text) def _put_license_text_for_packages(self, stream: io.StringIO): license_table = prettytable.PrettyTable( ['Module name', 'License text'], hrules=prettytable.ALL ) summary_table = prettytable.PrettyTable( ['Module name', 'Version', 'License', 'Author', 'URL'], hrules=prettytable.ALL ) licenses = self._get_licenses_dicts() modules_added = set() for module_info in licenses: if module_info['Name'] in self.IGNORE_MODULES: continue summary_table.add_row( [ module_info['Name'], module_info['Version'], module_info['License'].replace(';', '\n'), module_info['Author'], module_info['URL'], ] ) license_table.add_row([module_info['Name'], self._get_single_license(module_info)]) modules_added.add(module_info['Name']) assert not (self.MODULES_TO_OVERRIDE_LICENSE_TEXT - modules_added) b2_call_name = self.console_tool.b2_binary_name stream.write( f'Licenses of all modules used by {b2_call_name}, shipped with it in binary form:\n' ) stream.write(str(license_table)) stream.write( f'\n\nSummary of all modules used by {b2_call_name}, shipped with it in binary form:\n' ) stream.write(str(summary_table)) @classmethod def _get_licenses_dicts(cls) -> list[dict]: assert piplicenses, 'In order to run this command, you need to install the `license` extra: pip install b2[license]' pipdeptree_run = subprocess.run( ["pipdeptree", "--json", "-p", "b2"], capture_output=True, text=True, check=True, ) pipdeptree = json.loads(pipdeptree_run.stdout) used_packages = [dep["package"]['package_name'] for dep in pipdeptree] parser = piplicenses.create_parser() args = parser.parse_args( [ '--format', 'j', '--with-system', '--with-authors', '--with-urls', '--with-license-file', '--packages', *used_packages, ] ) licenses_output = piplicenses.create_output_string(args) licenses = validated_loads(licenses_output) return licenses def _fetch_license_from_url(self, url: str) -> str: response = self.request_session.get(url, timeout=self.REQUEST_TIMEOUT_S) response.raise_for_status() return response.text def _get_single_license(self, module_dict: dict): license_ = module_dict['LicenseText'] module_name = module_dict['Name'] if module_name == 'rst2ansi': # this one module is problematic, we need to extract the license text from its docstring assert license_ == piplicenses.LICENSE_UNKNOWN # let's make sure they didn't fix it license_ = rst2ansi.__doc__ assert 'MIT License' in license_ # let's make sure the license is still there elif module_name == 'b2sdk': license_ = (pathlib.Path(b2sdk.__file__).parent / 'LICENSE').read_text() else: license_url = self.LICENSES.get(module_name) or self.LICENSES.get( module_dict.get('URL') ) if license_url: license_ = self._fetch_license_from_url(license_url) assert license_ != piplicenses.LICENSE_UNKNOWN, module_name return license_ class InstallAutocomplete(Command): """ Installs autocomplete for supported shells. Autocomplete is installed for the current user only and will become available after shell reload. Any existing autocomplete configuration for same executable name will be overwritten. --shell SHELL Shell to install autocomplete for. Autodetected if not specified. Manually specify "bash" to force bash autocomplete installation when running under different shell. .. note:: Please note this command WILL modify your shell configuration file (e.g. ~/.bashrc). """ REQUIRES_AUTH = False @classmethod def _setup_parser(cls, parser): add_normalized_argument(parser, '--shell', choices=SUPPORTED_SHELLS, default=None) super()._setup_parser(parser) def _run(self, args): shell = args.shell or detect_shell() if shell not in SUPPORTED_SHELLS: self._print_stderr( f'ERROR: unsupported shell: {shell}. Supported shells: {SUPPORTED_SHELLS}. Use --shell to specify a target shell manually.' ) return 1 try: autocomplete_install(self.console_tool.b2_binary_name, shell=shell) except AutocompleteInstallError as e: raise CommandError(str(e)) from e self._print(f'Autocomplete successfully installed for {shell}.') self._print( f'Spawn a new shell instance to use it (log in again or just type `{shell}` in your current shell to start a new session inside of the existing session).' ) return 0 class NotificationRulesWarningMixin(Described): """ .. warning:: Event Notifications feature is in \"Private Preview\" state and may change without notice. See https://www.backblaze.com/blog/announcing-event-notifications/ for details. """ class NotificationRules(NotificationRulesWarningMixin, Command): """ Bucket notification rules management subcommands. {NotificationRulesWarningMixin} For more information on each subcommand, use ``{NAME} notification-rules SUBCOMMAND --help``. Examples: .. code-block:: {NAME} notification-rules create b2://bucketName/optionalSubPath/ ruleName --event-type "b2:ObjectCreated:*" --webhook-url https://example.com/webhook {NAME} notification-rules list b2://bucketName {NAME} notification-rules update b2://bucketName/newPath/ ruleName --disable --event-type "b2:ObjectCreated:*" --event-type "b2:ObjectHidden:*" {NAME} notification-rules delete b2://bucketName ruleName """ subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @NotificationRules.subcommands_registry.register class NotificationRulesList(JSONOptionMixin, NotificationRulesWarningMixin, Command): """ Allows listing bucket notification rules of the given bucket. {NotificationRulesWarningMixin} {JSONOptionMixin} Examples: .. code-block:: {NAME} notification-rules list b2://bucketName Requires capability: - **readBucketNotifications** """ COMMAND_NAME = 'list' @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( parser, help= "B2 URI of the bucket with optional path prefix, e.g. b2://bucketName or b2://bucketName/optionalSubPath/" ) super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) rules = sorted( ( rule for rule in bucket.get_notification_rules() if rule["objectNamePrefix"].startswith(args.B2_URI.path) ), key=lambda rule: rule["name"] ) if args.json: self._print_json(rules) else: if rules: self._print(f'Notification rules for {args.B2_URI} :') self._print_human_readable_structure(rules) else: self._print(f'No notification rules for {args.B2_URI}') return 0 class NotificationRulesCreateBase(JSONOptionMixin, NotificationRulesWarningMixin, Command): @classmethod def _validate_secret(cls, value: str) -> str: if not re.match(r'^[a-zA-Z0-9]{32}$', value): raise argparse.ArgumentTypeError( f'the secret has to be exactly 32 alphanumeric characters, got: {value!r}' ) return value @classmethod def setup_rule_fields_parser(cls, parser, creation: bool): add_b2_uri_argument( parser, help= "B2 URI of the bucket with optional path prefix, e.g. b2://bucketName or b2://bucketName/optionalSubPath/" ) parser.add_argument('ruleName', help="Name of the rule") parser.add_argument( '--event-type', action='append', help= "Events scope, e.g., 'b2:ObjectCreated:*'. Can be used multiple times to set multiple scopes.", required=creation ) parser.add_argument( '--webhook-url', help="URL to send the notification to", required=creation ) parser.add_argument( '--sign-secret', help="optional signature key consisting of 32 alphanumeric characters ", type=cls._validate_secret, default=None, ) parser.add_argument( '--custom-header', action='append', help= "Custom header to be sent with the notification. Can be used multiple times to set multiple headers. Format: HEADER_NAME=VALUE" ) parser.add_argument( '--enable', action='store_true', help="Flag to enable the notification rule", default=None ) parser.add_argument( '--disable', action='store_false', help="Flag to disable the notification rule", dest='enable' ) def get_rule_from_args(self, args): custom_headers = None if args.custom_header is not None: custom_headers = {} for header in args.custom_header: try: name, value = header.split('=', 1) except ValueError: name, value = header, '' custom_headers[name] = value rule = { 'name': args.ruleName, 'eventTypes': args.event_type, 'isEnabled': args.enable, 'objectNamePrefix': args.B2_URI.path, 'targetConfiguration': { 'url': args.webhook_url, 'customHeaders': custom_headers, 'hmacSha256SigningSecret': args.sign_secret, }, } return filter_out_empty_values(rule) def print_rule(self, args, rule): if args.json: self._print_json(rule) else: self._print_human_readable_structure(rule) class NotificationRulesUpdateBase(NotificationRulesCreateBase): def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) rules_by_name = {rule["name"]: rule for rule in bucket.get_notification_rules()} rule = rules_by_name.get(args.ruleName) if not rule: raise CommandError( f'rule with name {args.ruleName!r} does not exist on bucket {bucket.name!r}, ' f'available rules: {sorted(rules_by_name)}' ) rules_by_name[args.ruleName] = override_dict( rule, self.get_rule_from_args(args), ) rules = bucket.set_notification_rules( [notification_rule_response_to_request(rule) for rule in rules_by_name.values()] ) rule = next(rule for rule in rules if rule["name"] == args.ruleName) self.print_rule(args=args, rule=rule) return 0 @NotificationRules.subcommands_registry.register class NotificationRulesCreate(NotificationRulesCreateBase): """ Allows creating bucket notification rules for the given bucket. {NotificationRulesWarningMixin} Examples: .. code-block:: {NAME} notification-rules create b2://bucketName/optionalSubPath/ ruleName --event-type "b2:ObjectCreated:*" --webhook-url https://example.com/webhook Requires capability: - **readBucketNotifications** - **writeBucketNotifications** """ COMMAND_NAME = 'create' NEW_RULE_DEFAULTS = { 'isEnabled': True, 'objectNamePrefix': '', 'targetConfiguration': { 'targetType': 'webhook', }, } @classmethod def _setup_parser(cls, parser): cls.setup_rule_fields_parser(parser, creation=True) super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) rules_by_name = {rule["name"]: rule for rule in bucket.get_notification_rules()} if args.ruleName in rules_by_name: raise CommandError( f'rule with name {args.ruleName!r} already exists on bucket {bucket.name!r}' ) rule = override_dict( self.NEW_RULE_DEFAULTS, self.get_rule_from_args(args), ) rules_by_name[args.ruleName] = rule rules = bucket.set_notification_rules( [ notification_rule_response_to_request(rule) for rule in sorted(rules_by_name.values(), key=lambda r: r["name"]) ] ) rule = next(rule for rule in rules if rule["name"] == args.ruleName) self.print_rule(args=args, rule=rule) return 0 @NotificationRules.subcommands_registry.register class NotificationRulesUpdate(NotificationRulesUpdateBase): """ Allows updating notification rule of the given bucket. {NotificationRulesWarningMixin} Examples: .. code-block:: {NAME} notification-rules update b2://bucketName/newPath/ ruleName --disable --event-type "b2:ObjectCreated:*" --event-type "b2:ObjectHidden:*" {NAME} notification-rules update b2://bucketName/newPath/ ruleName --enable Requires capability: - **readBucketNotifications** - **writeBucketNotifications** """ COMMAND_NAME = 'update' @classmethod def _setup_parser(cls, parser): cls.setup_rule_fields_parser(parser, creation=False) super()._setup_parser(parser) @NotificationRules.subcommands_registry.register class NotificationRulesEnable(NotificationRulesUpdateBase): """ Allows enabling notification rule of the given bucket. {NotificationRulesWarningMixin} Examples: .. code-block:: {NAME} notification-rules enable b2://bucketName/ ruleName Requires capability: - **readBucketNotifications** - **writeBucketNotifications** """ COMMAND_NAME = 'enable' @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( parser, help="B2 URI of the bucket to enable the rule for, e.g. b2://bucketName" ) parser.add_argument('ruleName', help="Name of the rule to enable") super()._setup_parser(parser) def get_rule_from_args(self, args): logger.warning("WARNING: ignoring path from %r", args.B2_URI) return {'name': args.ruleName, 'isEnabled': True} @NotificationRules.subcommands_registry.register class NotificationRulesDisable(NotificationRulesUpdateBase): """ Allows disabling notification rule of the given bucket. {NotificationRulesWarningMixin} Examples: .. code-block:: {NAME} notification-rules disable b2://bucketName/ ruleName Requires capability: - **readBucketNotifications** - **writeBucketNotifications** """ COMMAND_NAME = 'disable' @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( parser, help="B2 URI of the bucket to enable the rule for, e.g. b2://bucketName" ) parser.add_argument('ruleName', help="Name of the rule to enable") super()._setup_parser(parser) def get_rule_from_args(self, args): logger.warning("WARNING: ignoring path from %r", args.B2_URI) return {'name': args.ruleName, 'isEnabled': False} @NotificationRules.subcommands_registry.register class NotificationRulesDelete(Command): """ Allows deleting bucket notification rule of the given bucket. Requires capability: - **readBucketNotifications** - **writeBucketNotifications** """ COMMAND_NAME = 'delete' @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( parser, help="B2 URI of the bucket to delete the rule from, e.g. b2://bucketName" ) parser.add_argument('ruleName', help="Name of the rule to delete") super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) rules_by_name = {rule["name"]: rule for rule in bucket.get_notification_rules()} try: del rules_by_name[args.ruleName] except KeyError: raise CommandError( f'no such rule to delete: {args.ruleName!r}, ' f'available rules: {sorted(rules_by_name.keys())!r}; No rules have been deleted.' ) bucket.set_notification_rules( [notification_rule_response_to_request(rule) for rule in rules_by_name.values()] ) self._print(f'Rule {args.ruleName!r} has been deleted from {args.B2_URI}') return 0 class ConsoleTool: """ Implements the commands available in the B2 command-line tool using the B2Api library. Uses a ``b2sdk.SqlitedAccountInfo`` object to keep account data between runs (unless authorization is performed via environment variables). """ def __init__(self, stdout, stderr): self.stdout = stdout self.stderr = stderr self.b2_binary_name = 'b2' def _get_default_escape_cc_setting(self): escape_cc_env_var = os.environ.get(B2_ESCAPE_CONTROL_CHARACTERS, None) if escape_cc_env_var is not None: if int(escape_cc_env_var) in (0, 1): return int(escape_cc_env_var) == 1 else: logger.warning( "WARNING: invalid value for {B2_ESCAPE_CONTROL_CHARACTERS} environment variable, available options are 0 or 1 - will assume variable is not set" ) return self.stdout.isatty() def run_command(self, argv): signal.signal(signal.SIGINT, keyboard_interrupt_handler) self.b2_binary_name = resolve_b2_bin_call_name(argv) parser = B2.create_parser(name=self.b2_binary_name, b2_binary_name=self.b2_binary_name) AUTOCOMPLETE.cache_and_autocomplete(parser) args = parser.parse_args(argv[1:]) self._setup_logging(args, argv) if args.escape_control_characters is None: args.escape_control_characters = self._get_default_escape_cc_setting() if args.escape_control_characters: # in case any control characters slip through escaping, just delete them self.stdout = NoControlCharactersStdout(self.stdout) self.stderr = NoControlCharactersStdout(self.stderr) kwargs = {} with suppress(AttributeError): kwargs['save_to_buffer_size'] = args.write_buffer_size with suppress(AttributeError): kwargs['check_download_hash'] = not args.skip_hash_verification with suppress(AttributeError): kwargs['max_download_streams_per_file'] = args.max_download_streams_per_file self.api = self._initialize_b2_api(args=args, kwargs=kwargs) b2_command = B2(self) command_class = b2_command.run(args) command = command_class(self) if command.FORBID_LOGGING_ARGUMENTS: logger.info('starting command [%s] (arguments hidden)', command) else: logger.info('starting command [%s] with arguments: %s', command, argv) try: if command_class.REQUIRES_AUTH: auth_ret = self.authorize_from_env() if auth_ret: return auth_ret return command.run(args) except MissingAccountData as e: logger.exception('ConsoleTool missing account data error') self._print_stderr( f'ERROR: {e} Use: {self.b2_binary_name} authorize-account or provide auth data with ' f'{B2_APPLICATION_KEY_ID_ENV_VAR!r} and {B2_APPLICATION_KEY_ENV_VAR!r} environment variables' ) return 1 except B2Error as e: logger.exception('ConsoleTool command error') self._print_stderr(f'ERROR: {e}') return 1 except KeyboardInterrupt: logger.exception('ConsoleTool command interrupt') self._print_stderr('\nInterrupted. Shutting down...\n') return 1 except Exception: logger.exception('ConsoleTool unexpected exception') raise @classmethod def _initialize_b2_api(cls, args: argparse.Namespace, kwargs: dict) -> B2Api: b2_api = None key_id, key = get_keyid_and_key_from_env_vars() if key_id and key: try: # here we initialize regular b2 api on disk and check whether it matches # the keys from env vars; if they indeed match then there's no need to # initialize in-memory account info cause it's already stored on disk b2_api = _get_b2api_for_profile( profile=args.profile, raise_if_does_not_exist=True, **kwargs ) realm = os.environ.get(B2_ENVIRONMENT_ENV_VAR) or 'production' is_same_key_on_disk = b2_api.account_info.is_same_key(key_id, realm) except MissingAccountData: is_same_key_on_disk = False if not is_same_key_on_disk and args.command_class not in ( AuthorizeAccount, ClearAccount ): # when user specifies keys via env variables, we switch to in-memory account info return _get_inmemory_b2api(**kwargs) return b2_api or _get_b2api_for_profile(profile=args.profile, **kwargs) def authorize_from_env(self) -> int: key_id, key = get_keyid_and_key_from_env_vars() if key_id is None and key is None: return 0 if (key_id is None) or (key is None): self._print_stderr( f'Please provide both "{B2_APPLICATION_KEY_ENV_VAR}" and "{B2_APPLICATION_KEY_ID_ENV_VAR}" environment variables or none of them' ) return 1 realm = os.environ.get(B2_ENVIRONMENT_ENV_VAR) if self.api.account_info.is_same_key(key_id, realm or 'production'): return 0 logger.info('authorize-account is being run from env variables') return AuthorizeAccount(self).authorize(key_id, key, realm) def _print(self, *args, **kwargs): print(*args, file=self.stdout, **kwargs) def _print_stderr(self, *args, **kwargs): print(*args, file=self.stderr, **kwargs) @classmethod def _setup_logging(cls, args, argv): if args.log_config and (args.verbose or args.debug_logs): raise ValueError('Please provide either --log-config or --verbose/--debug-logs') if args.log_config: logging.config.fileConfig(args.log_config) elif args.verbose or args.debug_logs: # set log level to DEBUG for ALL loggers (even those not belonging to B2), but without any handlers, # those will added as needed (file and/or stderr) logging.basicConfig(level=logging.DEBUG, handlers=[]) else: logger.setLevel(logging.CRITICAL + 1) # No logs! if args.verbose: formatter = logging.Formatter('%(levelname)s:%(name)s:%(message)s') handler = logging.StreamHandler() handler.setFormatter(formatter) # logs from ALL loggers sent to stderr should be formatted this way logging.root.addHandler(handler) if args.debug_logs: formatter = logging.Formatter( '%(asctime)s\t%(process)d\t%(thread)d\t%(name)s\t%(levelname)s\t%(message)s' ) formatter.converter = time.gmtime handler = logging.FileHandler('b2_cli.log') handler.setFormatter(formatter) # logs from ALL loggers sent to the log file should be formatted this way logging.root.addHandler(handler) if not args.debug_logs and not args.verbose: warnings.showwarning = lambda message, category, *arg_, **_: print( f'{category.__name__}: {message}', file=sys.stderr ) logger.info(r'// %s %s %s \\', SEPARATOR, VERSION.center(8), SEPARATOR) logger.debug('platform is %s', platform.platform()) logger.debug( 'Python version is %s %s', platform.python_implementation(), sys.version.replace('\n', ' ') ) logger.debug('b2sdk version is %s', b2sdk_version) logger.debug('locale is %s', locale.getlocale()) logger.debug('filesystem encoding is %s', sys.getfilesystemencoding()) # used by Sphinx get_parser = functools.partial(B2.create_parser, for_docs=True) def main(): ct = ConsoleTool(stdout=sys.stdout, stderr=sys.stderr) exit_status = ct.run_command(sys.argv) logger.info('\\\\ %s %s %s //', SEPARATOR, ('exit=%s' % exit_status).center(8), SEPARATOR) # I haven't tracked down the root cause yet, but in Python 2.7, the futures # packages is hanging on exit sometimes, waiting for a thread to finish. # This happens when using sync to upload files. sys.stdout.flush() sys.stderr.flush() logging.shutdown() os._exit(exit_status) if __name__ == '__main__': main() B2_Command_Line_Tool-3.19.1/b2/_internal/json_encoder.py000066400000000000000000000020031461201031300227340ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/json_encoder.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import json from enum import Enum from b2sdk.v2 import Bucket, DownloadVersion, FileIdAndName, FileVersion class B2CliJsonEncoder(json.JSONEncoder): """ Makes it possible to serialize b2sdk objects (specifically bucket['options'] set and FileVersionInfo/FileIdAndName) to json. >>> json.dumps(set([1,2,3,'a','b','c']), cls=json_encoder.B2CliJsonEncoder) '[1, 2, 3, "c", "b", "a"]' >>> """ def default(self, obj): if isinstance(obj, set): return list(obj) elif isinstance(obj, (DownloadVersion, FileVersion, FileIdAndName, Bucket)): return obj.as_dict() elif isinstance(obj, Enum): return obj.value return super().default(obj) B2_Command_Line_Tool-3.19.1/b2/_internal/version.py000066400000000000000000000006631461201031300217630ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/version.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### try: from importlib.metadata import version except ModuleNotFoundError: from importlib_metadata import version VERSION = version('b2') B2_Command_Line_Tool-3.19.1/b2/_internal/version_listing.py000066400000000000000000000016261461201031300235140ustar00rootroot00000000000000###################################################################### # # File: b2/_internal/version_listing.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import pathlib import re from typing import List RE_VERSION = re.compile(r'[_]*b2v(\d+)') def get_versions() -> List[str]: return [path.name for path in sorted(pathlib.Path(__file__).parent.glob('*b2v*'))] def get_int_version(version: str) -> int: match = RE_VERSION.match(version) assert match, f'Version {version} does not match pattern {RE_VERSION.pattern}' return int(match.group(1)) CLI_VERSIONS = get_versions() UNSTABLE_CLI_VERSION = max(CLI_VERSIONS, key=get_int_version) LATEST_STABLE_VERSION = max( [elem for elem in CLI_VERSIONS if not elem.startswith('_')], key=get_int_version ) B2_Command_Line_Tool-3.19.1/changelog.d/000077500000000000000000000000001461201031300176125ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/changelog.d/.gitkeep000066400000000000000000000000001461201031300212310ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/contrib/000077500000000000000000000000001461201031300171015ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/contrib/color-b2-logs.sh000077500000000000000000000007461461201031300220300ustar00rootroot00000000000000#!/bin/bash -eu awk -F '\t' '{print $1 " " $4 " " $5 " " $6}' | colorex --green=DEBUG \ --bgreen=INFO \ --bred=ERROR \ --byellow=WARNING \ --bmagenta='calling [\w\.]+' \ --bblue='INFO // =+ [0-9\.]+ =+ \\' \ --bblue='INFO // =+ [0-9\.]+ =+ \\' \ --bblue='starting command .* with arguments:' \ --bblue='starting command .* \(arguments hidden\)' \ --red=Traceback \ --green='\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d,\d\d\d' \ --cyan='b2\.sync' B2_Command_Line_Tool-3.19.1/contrib/debug_logs.ini000066400000000000000000000013101461201031300217070ustar00rootroot00000000000000############################################################ [loggers] keys=root,b2 [logger_root] level=DEBUG handlers=fileHandler [logger_b2] level=DEBUG handlers=fileHandler qualname=b2 propagate=0 ############################################################ [handlers] keys=fileHandler [handler_fileHandler] class=logging.handlers.TimedRotatingFileHandler level=DEBUG formatter=simpleFormatter args=('b2_cli.log', 'midnight') ############################################################ [formatters] keys=simpleFormatter [formatter_simpleFormatter] format=%(asctime)s %(process)d %(thread)d %(name)s %(levelname)s %(message)s datefmt= ############################################################ B2_Command_Line_Tool-3.19.1/contrib/macos/000077500000000000000000000000001461201031300202035ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/contrib/macos/entitlements.plist000066400000000000000000000005361461201031300237770ustar00rootroot00000000000000 com.apple.security.cs.allow-unsigned-executable-memory com.apple.security.cs.disable-library-validation B2_Command_Line_Tool-3.19.1/doc/000077500000000000000000000000001461201031300162065ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/doc/bash_completion.md000066400000000000000000000002601461201031300216740ustar00rootroot00000000000000Install bash completion by running: ```sh b2 install-autocomplete ``` For support of other shells see https://pypi.org/project/argcomplete/#activating-global-completion . B2_Command_Line_Tool-3.19.1/doc/source/000077500000000000000000000000001461201031300175065ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/doc/source/commands.rst000066400000000000000000000002301461201031300220340ustar00rootroot00000000000000######################################### Commands ######################################### .. toctree:: :maxdepth: 2 :glob: subcommands/* B2_Command_Line_Tool-3.19.1/doc/source/conf.py000066400000000000000000000164041461201031300210120ustar00rootroot00000000000000###################################################################### # # File: doc/source/conf.py # # Copyright 2019, Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # -*- coding: utf-8 -*- # # B2_Command_Line_Tool documentation build configuration file, created by # sphinx-quickstart on Fri Oct 20 18:27:20 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import datetime import importlib import os import re import sys import textwrap from os import path sys.path.insert(0, os.path.abspath('../..')) from b2._internal.version import VERSION from b2._internal.version_listing import LATEST_STABLE_VERSION B2 = importlib.import_module(f'b2._internal.{LATEST_STABLE_VERSION}.registry').B2 # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', 'sphinxarg.ext' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'B2_Command_Line_Tool' year = datetime.date.today().strftime("%Y") author = 'Backblaze' copyright = f'{year}, {author}' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = VERSION.rsplit('.', 1)[0] # The full version, including alpha/beta/rc tags. release = VERSION # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = os.environ.get('B2_SPHINX_TODO', False) and True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { 'prev_next_buttons_location': 'both', 'collapse_navigation': True, } autodoc_default_options = { 'member-order': 'bysource', 'exclude-members': '__weakref__, _abc_cache, _abc_negative_cache, _abc_negative_cache_version, _abc_registry, _abc_impl', 'members': True, 'undoc-members': True, } # yapf: disable # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { '**': [ 'about.html', 'navigation.html', 'relations.html', # needs 'show_related': True theme option to display 'searchbox.html', 'donate.html', ] } # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'B2_Command_Line_Tooldoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, 'B2_Command_Line_Tool.tex', 'B2\\_Command\\_Line\\_Tool Documentation', 'Backblaze', 'manual' ), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'b2_command_line_tool', 'B2_Command_Line_Tool Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, 'B2_Command_Line_Tool', 'B2_Command_Line_Tool Documentation', author, 'B2_Command_Line_Tool', 'One line description of project.', 'Miscellaneous' ), ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'python3': ('https://docs.python.org/3', None)} white_spaces_start = re.compile(r'^\s*') def setup(_): """ Building docs requires dumping the main CLI help text to an .rst file before the actual build. This is the most reasonable way to piggy back that behaviour. Checking if the new file contents would the same as the old one (if any) is important, so that the automatic file-watcher/doc-builder doesn't fall into an endless loop. """ main_help_text = str(B2.lazy_get_description(NAME='b2')) main_help_text = textwrap.dedent(main_help_text) main_help_path = path.join(path.dirname(__file__), 'main_help.rst') if path.exists(main_help_path): with open(main_help_path) as main_help_file: if main_help_file.read() == main_help_text: return with open(main_help_path, 'w') as main_help_file: main_help_file.write(main_help_text) B2_Command_Line_Tool-3.19.1/doc/source/index.rst000066400000000000000000000011561461201031300213520ustar00rootroot00000000000000.. note:: **Event Notifications** feature is now in **Private Preview**. See https://www.backblaze.com/blog/announcing-event-notifications/ for details. ######################################### Overview ######################################### .. include:: main_help.rst ######################################### Documentation index ######################################### .. toctree:: :maxdepth: 2 :glob: quick_start commands replication ######################################### Indices and tables ######################################### * :ref:`genindex` * :ref:`modindex` * :ref:`search` B2_Command_Line_Tool-3.19.1/doc/source/quick_start.rst000066400000000000000000000037401461201031300225750ustar00rootroot00000000000000.. _quick_start: ######################## Quick Start Guide ######################## .. _prepare_b2cli: *********************** Prepare B2 cli *********************** .. code-block:: sh $ b2 authorize-account 4ab123456789 001aabbccddeeff123456789012345678901234567 Using https://api.backblazeb2.com .. tip:: Get credentials from `B2 website `_ .. warning:: Local users might be able to access your process list and read command arguments. To avoid exposing credentials, you can provide application key ID and application key using environment variables ``B2_APPLICATION_KEY_ID`` and ``B2_APPLICATION_KEY`` respectively. Those will be picked up automatically, so after defining those you'll just need to run ``b2 authorize-account`` with no extra parameters. .. code-block:: sh $ export B2_APPLICATION_KEY_ID="$(`. *********************** Automatic setup *********************** Setup replication ================= .. code-block:: sh $ b2 replication-setup --destination-profile myprofile2 my-bucket my-bucket2 You can optionally choose source rule priority and source rule name. See :ref:`replication-setup command `. .. note:: ``replication-setup`` will reuse or provision a source key with no prefix and full reading capabilities and a destination key with no prefix and full writing capabilities .. _replication_manual_setup: *************** Manual setup *************** Setup source key ================ .. code-block:: sh $ b2 create-key my-bucket-rplsrc readFiles,readFileLegalHolds,readFileRetentions 0014ab1234567890000000123 K001ZA12345678901234567890ABCDE Setup source replication ======================== .. code-block:: sh $ b2 update-bucket --replication '{ "asReplicationSource": { "replicationRules": [ { "destinationBucketId": "85644d98debc657d880b0e1e", "fileNamePrefix": "files-to-share/", "includeExistingFiles": false, "isEnabled": true, "priority": 128, "replicationRuleName": "my-replication-rule-name" } ], "sourceApplicationKeyId": "0014ab1234567890000000123" } }' my-bucket Setup destination key ===================== .. code-block:: sh $ b2 create-key --profile myprofile2 my-bucket-rpldst writeFiles,writeFileLegalHolds,writeFileRetentions,deleteFiles 0024ab2345678900000000234 K001YYABCDE12345678901234567890 Setup destination replication ============================= .. code-block:: sh $ b2 update-bucket --profile myprofile2 --replication '{ "asReplicationDestination": { "sourceToDestinationKeyMapping": { "0014ab1234567890000000123": "0024ab2345678900000000234" } } }' my-bucket B2_Command_Line_Tool-3.19.1/doc/source/subcommands/000077500000000000000000000000001461201031300220215ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/doc/source/subcommands/authorize_account.rst000066400000000000000000000002471461201031300263040ustar00rootroot00000000000000Authorize-account command ************************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: authorize-account B2_Command_Line_Tool-3.19.1/doc/source/subcommands/cancel_all_unfinished_large_files.rst000066400000000000000000000003271461201031300314020ustar00rootroot00000000000000Cancel-all-unfinished-large-files command ***************************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: cancel-all-unfinished-large-files B2_Command_Line_Tool-3.19.1/doc/source/subcommands/cancel_large_file.rst000066400000000000000000000002471461201031300261540ustar00rootroot00000000000000Cancel-large-file command ************************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: cancel-large-file B2_Command_Line_Tool-3.19.1/doc/source/subcommands/cat.rst000066400000000000000000000002021461201031300233140ustar00rootroot00000000000000Cat command **************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: cat B2_Command_Line_Tool-3.19.1/doc/source/subcommands/clear_account.rst000066400000000000000000000002331461201031300253530ustar00rootroot00000000000000Clear-account command ********************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: clear-account B2_Command_Line_Tool-3.19.1/doc/source/subcommands/copy_file_by_id.rst000066400000000000000000000002411461201031300256670ustar00rootroot00000000000000Copy-file-by-id command *********************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: copy-file-by-id B2_Command_Line_Tool-3.19.1/doc/source/subcommands/create_bucket.rst000066400000000000000000000002331461201031300253510ustar00rootroot00000000000000Create-bucket command ********************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: create-bucket B2_Command_Line_Tool-3.19.1/doc/source/subcommands/create_key.rst000066400000000000000000000002221461201031300246620ustar00rootroot00000000000000Create-key command ****************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: create-key B2_Command_Line_Tool-3.19.1/doc/source/subcommands/delete_bucket.rst000066400000000000000000000002331461201031300253500ustar00rootroot00000000000000Delete-bucket command ********************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: delete-bucket B2_Command_Line_Tool-3.19.1/doc/source/subcommands/delete_file_version.rst000066400000000000000000000002551461201031300265630ustar00rootroot00000000000000Delete-file-version command *************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: delete-file-version B2_Command_Line_Tool-3.19.1/doc/source/subcommands/delete_key.rst000066400000000000000000000002221461201031300246610ustar00rootroot00000000000000Delete-key command ****************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: delete-key B2_Command_Line_Tool-3.19.1/doc/source/subcommands/download_file.rst000066400000000000000000000002411461201031300253560ustar00rootroot00000000000000Download-file command *************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: download-file B2_Command_Line_Tool-3.19.1/doc/source/subcommands/download_file_by_id.rst000066400000000000000000000002551461201031300265310ustar00rootroot00000000000000Download-file-by-id command *************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: download-file-by-id B2_Command_Line_Tool-3.19.1/doc/source/subcommands/download_file_by_name.rst000066400000000000000000000002631461201031300270540ustar00rootroot00000000000000Download-file-by-name command ***************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: download-file-by-name B2_Command_Line_Tool-3.19.1/doc/source/subcommands/file_info.rst000066400000000000000000000002231461201031300245020ustar00rootroot00000000000000File-info command ********************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: file-info B2_Command_Line_Tool-3.19.1/doc/source/subcommands/get_account_info.rst000066400000000000000000000002441461201031300260610ustar00rootroot00000000000000Get-account-info command ************************ .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: get-account-info B2_Command_Line_Tool-3.19.1/doc/source/subcommands/get_bucket.rst000066400000000000000000000002221461201031300246630ustar00rootroot00000000000000Get-bucket command ****************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: get-bucket B2_Command_Line_Tool-3.19.1/doc/source/subcommands/get_download_auth.rst000066400000000000000000000002471461201031300262450ustar00rootroot00000000000000Get-download-auth command ************************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: get-download-auth B2_Command_Line_Tool-3.19.1/doc/source/subcommands/get_download_url_with_auth.rst000066400000000000000000000003021461201031300301520ustar00rootroot00000000000000Get-download-url-with-auth command ********************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: get-download-url-with-auth B2_Command_Line_Tool-3.19.1/doc/source/subcommands/get_file_info.rst000066400000000000000000000002331461201031300253420ustar00rootroot00000000000000Get-file-info command ********************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: get-file-info B2_Command_Line_Tool-3.19.1/doc/source/subcommands/get_url.rst000066400000000000000000000002121461201031300242070ustar00rootroot00000000000000Get-url command **************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: get-url B2_Command_Line_Tool-3.19.1/doc/source/subcommands/hide_file.rst000066400000000000000000000002171461201031300244630ustar00rootroot00000000000000Hide-file command ***************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: hide-file B2_Command_Line_Tool-3.19.1/doc/source/subcommands/install_autocomplete.rst000066400000000000000000000002601461201031300270000ustar00rootroot00000000000000install-autocomplete command **************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: install-autocomplete B2_Command_Line_Tool-3.19.1/doc/source/subcommands/list_buckets.rst000066400000000000000000000002301461201031300252410ustar00rootroot00000000000000List-buckets command ******************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: list-buckets B2_Command_Line_Tool-3.19.1/doc/source/subcommands/list_keys.rst000066400000000000000000000002171461201031300245610ustar00rootroot00000000000000List-keys command ***************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: list-keys B2_Command_Line_Tool-3.19.1/doc/source/subcommands/list_parts.rst000066400000000000000000000002221461201031300247330ustar00rootroot00000000000000List-parts command ****************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: list-parts B2_Command_Line_Tool-3.19.1/doc/source/subcommands/list_unfinished_large_files.rst000066400000000000000000000003051461201031300302740ustar00rootroot00000000000000List-unfinished-large-files command *********************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: list-unfinished-large-files B2_Command_Line_Tool-3.19.1/doc/source/subcommands/ls.rst000066400000000000000000000001721461201031300231710ustar00rootroot00000000000000Ls command ********** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: ls B2_Command_Line_Tool-3.19.1/doc/source/subcommands/make_friendly_url.rst000066400000000000000000000002471461201031300262510ustar00rootroot00000000000000Make-friendly-url command ************************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: make-friendly-url B2_Command_Line_Tool-3.19.1/doc/source/subcommands/make_url.rst000066400000000000000000000002141461201031300243470ustar00rootroot00000000000000Make-url command **************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: make-url B2_Command_Line_Tool-3.19.1/doc/source/subcommands/replication-setup.rst000066400000000000000000000003071461201031300262220ustar00rootroot00000000000000.. _replication_setup_command: replication-setup command ************************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: replication-setup B2_Command_Line_Tool-3.19.1/doc/source/subcommands/rm.rst000066400000000000000000000001721461201031300231710ustar00rootroot00000000000000Rm command ********** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: rm B2_Command_Line_Tool-3.19.1/doc/source/subcommands/sync.rst000066400000000000000000000002001461201031300235170ustar00rootroot00000000000000Sync command ************ .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: sync B2_Command_Line_Tool-3.19.1/doc/source/subcommands/update_bucket.rst000066400000000000000000000002331461201031300253700ustar00rootroot00000000000000Update-bucket command ********************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: update-bucket B2_Command_Line_Tool-3.19.1/doc/source/subcommands/update_file_legal_hold.rst000066400000000000000000000002661461201031300272120ustar00rootroot00000000000000Update-file-legal-hold command ****************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: update-file-legal-hold B2_Command_Line_Tool-3.19.1/doc/source/subcommands/update_file_retention.rst000066400000000000000000000002631461201031300271240ustar00rootroot00000000000000Update-file-retention command ***************************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: update-file-retention B2_Command_Line_Tool-3.19.1/doc/source/subcommands/upload_file.rst000066400000000000000000000002251461201031300250350ustar00rootroot00000000000000Upload-file command ******************* .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: upload-file B2_Command_Line_Tool-3.19.1/doc/source/subcommands/version.rst000066400000000000000000000002111461201031300242320ustar00rootroot00000000000000Version command *************** .. argparse:: :module: b2._internal.console_tool :func: get_parser :prog: b2 :path: version B2_Command_Line_Tool-3.19.1/noxfile.py000066400000000000000000000636431461201031300174730ustar00rootroot00000000000000###################################################################### # # File: noxfile.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import datetime import hashlib import os import pathlib import platform import re import string import subprocess import nox # Required for PDM to use nox's virtualenvs os.environ["PDM_IGNORE_SAVED_PYTHON"] = "1" os.environ["PDM_NO_LOCK"] = "1" UPSTREAM_REPO_URL = 'git@github.com:Backblaze/B2_Command_Line_Tool.git' CI = os.environ.get('CI') is not None CD = CI and (os.environ.get('CD') is not None) INSTALL_SDK_FROM = os.environ.get('INSTALL_SDK_FROM') NO_STATICX = os.environ.get('NO_STATICX') is not None NOX_PYTHONS = os.environ.get('NOX_PYTHONS') PYTHON_VERSIONS = [ 'pypy3.9', 'pypy3.10', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12', ] if NOX_PYTHONS is None else NOX_PYTHONS.split(',') def _detect_python_nox_id() -> str: major, minor, *_ = platform.python_version_tuple() python_nox_id = f"{major}.{minor}" if platform.python_implementation() == 'PyPy': python_nox_id = f"pypy{python_nox_id}" return python_nox_id if CI and not NOX_PYTHONS: # this is done to allow it to work even if `nox -p` was passed to nox PYTHON_VERSIONS = [_detect_python_nox_id()] print(f"CI job mode; using provided interpreter only; PYTHON_VERSIONS={PYTHON_VERSIONS!r}") PYTHON_DEFAULT_VERSION = PYTHON_VERSIONS[-1] PY_PATHS = ['b2', 'test', 'noxfile.py'] DOCKER_TEMPLATE = pathlib.Path('./Dockerfile.template') SYSTEM = platform.system().lower() WINDOWS_TIMESTAMP_SERVER = 'http://timestamp.digicert.com' WINDOWS_SIGNTOOL_PATH = 'C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe' nox.options.reuse_existing_virtualenvs = not CI nox.options.sessions = [ 'lint', 'test', ] PYTEST_GLOBAL_ARGS = [] if CI: PYTEST_GLOBAL_ARGS.append("-vv") def pdm_install( session: nox.Session, *groups: str, dev: bool = True, editable: bool = False ) -> None: args = [] if not dev: args.append('--prod') if not editable: args.append('--no-editable') for group in groups: args.extend(['--group', group]) session.run('pdm', 'install', *args, external=True) def github_output(name, value, *, secret=False): gh_output_path = os.environ.get('GITHUB_OUTPUT') if secret: print(f"::add-mask::{value}") if gh_output_path: with open(gh_output_path, "a") as file: file.write(f"{name}={value}\n") else: print(f"github_output {name}={'******' if secret else value}") def get_version_key(path: pathlib.Path) -> int: version_name = path.name # There is no version 0, thus we can provide it to the element starting with an underscore. if version_name.startswith('_'): return 0 version_match = re.match(r'[_]*b2v(\d+)', version_name) assert version_match, f'Version {version_name} does not match pattern B2Cli version pattern.' version_number = int(version_match.group(1)) return version_number def get_versions() -> list[str]: """ "Almost" a copy of b2/_internal/version_listing.py:get_versions(), because importing the file directly seems impossible from the noxfile. """ # This sorting ensures that: # - the first element is the latest unstable version (starts with an underscore) # - the last element is the latest stable version (highest version number) return [ path.name for path in sorted( (pathlib.Path(__file__).parent / 'b2' / '_internal').glob('*b2v*'), key=get_version_key, ) ] @nox.session(name='format', python=PYTHON_DEFAULT_VERSION) def format_(session): """Lint the code and apply fixes in-place whenever possible.""" pdm_install(session, 'format') # TODO: incremental mode for yapf session.run('yapf', '--in-place', '--parallel', '--recursive', *PY_PATHS) session.run('ruff', 'check', '--fix', *PY_PATHS) # session.run( # 'docformatter', # '--in-place', # '--recursive', # '--wrap-summaries=100', # '--wrap-descriptions=100', # *PY_PATHS, # ) @nox.session(python=PYTHON_DEFAULT_VERSION) def lint(session): """Run linters in readonly mode.""" pdm_install(session, 'lint', 'doc', 'full', 'license') session.run('yapf', '--diff', '--parallel', '--recursive', *PY_PATHS) session.run('ruff', 'check', *PY_PATHS) # session.run( # 'docformatter', # '--check', # '--recursive', # '--wrap-summaries=100', # '--wrap-descriptions=100', # *PY_PATHS, # ) session.run('pytest', 'test/static', *PYTEST_GLOBAL_ARGS) session.run('liccheck', '-s', 'pyproject.toml') session.run('pdm', 'lock', '--check', external=True) @nox.session(python=PYTHON_VERSIONS) def unit(session): """Run unit tests.""" pdm_install(session, 'test') command = [ 'pytest', '-n', 'auto', '--cov=b2', '--cov-branch', '--cov-report=xml', '--doctest-modules', *PYTEST_GLOBAL_ARGS, *session.posargs, 'test/unit', ] versions = get_versions() session.run(*command, '--cli', versions[0]) command.append('--cov-append') if not session.posargs: session.notify('cover') for cli_version in versions[1:]: session.run(*command, '--cli', cli_version) def run_integration_test(session, pytest_posargs): """Run integration tests.""" pdm_install(session, 'license', 'test') command = [ 'pytest', 'test/integration', '-s', '-n', '2' if CI else 'auto', '--log-level', 'INFO', '-W', 'ignore::DeprecationWarning:rst2ansi.visitor:', *PYTEST_GLOBAL_ARGS, *pytest_posargs, ] # sut can be provided explicitly (like in docker) or like `"--sut=path/b2"`. provided_sut = any('--sut' in elem for elem in pytest_posargs) # If `sut` was provided, we just run this one. # If not, we're running the test on all known versions. if provided_sut: session.run(*command) else: versions = get_versions() for cli_version in versions: # If we're in a virtualenv, we want to extract the path to the executable # that's installed in the virtualenv. This may not be elegant but shutil # gives us a cross-platform solution out of the box. exe_path = session.run( 'python', '-c', f'import shutil; print(shutil.which("{cli_version}"))', silent=True ).strip() session.run(*command, '--sut', exe_path) @nox.session(python=PYTHON_VERSIONS) def integration(session): """Run integration tests.""" run_integration_test(session, session.posargs) @nox.session(python=PYTHON_VERSIONS) def test(session): """Run all tests.""" if session.python: session.notify(f'unit-{session.python}') session.notify(f'integration-{session.python}') else: session.notify('unit') session.notify('integration') @nox.session(python=PYTHON_DEFAULT_VERSION) def cleanup_buckets(session): """Remove buckets from previous test runs.""" pdm_install(session, 'test') session.run( 'pytest', '-s', '-x', *PYTEST_GLOBAL_ARGS, *session.posargs, 'test/integration/cleanup_buckets.py' ) @nox.session def cover(session): """Perform coverage analysis.""" pdm_install(session, 'test') session.run('coverage', 'report', '--fail-under=75', '--show-missing', '--skip-covered') session.run('coverage', 'erase') @nox.session(python=PYTHON_DEFAULT_VERSION) def build(session): """Build the distribution.""" session.run('nox', '-s', 'dump_license', '-fb', 'venv', external=True) session.run('pdm', 'build', external=True) # Path have to be specified with unix style slashes even for windows, # otherwise glob won't find files on windows in action-gh-release. github_output('asset_path', 'dist/*') version = os.environ['GITHUB_REF'].replace('refs/tags/v', '') github_output('version', version) @nox.session(python=PYTHON_DEFAULT_VERSION) def dump_license(session: nox.Session): pdm_install(session, 'license', editable=True) session.run('b2', 'license', '--dump', '--with-packages') @nox.session(python=PYTHON_DEFAULT_VERSION) def bundle(session: nox.Session): """Bundle the distribution.""" # We're running dump_license in another session because: # 1. `b2 license --dump` dumps the licence where the module is installed. # 2. We don't want to install b2 as editable module in the current session # because that would make `b2 versions` show the versions as editable. session.run('nox', '-s', 'dump_license', '-fb', 'venv', external=True) pdm_install(session, 'bundle', 'full') template_spec = string.Template(pathlib.Path('b2.spec.template').read_text()) versions = get_versions() # It is assumed that the last element will be the "latest stable". for binary_name, version in [('b2', versions[-1])] + list(zip(versions, versions)): spec = template_spec.safe_substitute({ 'VERSION': version, 'NAME': binary_name, }) pathlib.Path(f'{binary_name}.spec').write_text(spec) session.run('pyinstaller', *session.posargs, f'{binary_name}.spec') if SYSTEM == 'linux' and not NO_STATICX: session.run( 'staticx', '--no-compress', '--strip', '--loglevel', 'INFO', f'dist/{binary_name}', f'dist/{binary_name}-static' ) session.run( 'mv', '-f', f'dist/{binary_name}-static', f'dist/{binary_name}', external=True, ) # Path have to be specified with unix style slashes even for windows, # otherwise glob won't find files on windows in action-gh-release. github_output('asset_path', 'dist/*') # Note: this should pick the shortest named executable from the directory. # But, for yet unknown reason, the `./dist/b2` doesn't play well with `--sut` and the autocomplete. # For this reason, we're returning here the "latest, stable version" instead. # This current implementation works fine up until version 10, when it will break. # By that time, we should have come back to picking the shortest named binary (`b2`) up. executable = max( str(path) for path in pathlib.Path('dist').glob('*') if not path.name.startswith('_') ) github_output('sut_path', executable) @nox.session(python=False) def sign(session): """Sign the bundled distribution (macOS and Windows only).""" def sign_windows(cert_file, cert_password): session.run('certutil', '-f', '-p', cert_password, '-importpfx', cert_file) for binary_name in ['b2'] + get_versions(): session.run( WINDOWS_SIGNTOOL_PATH, 'sign', '/f', cert_file, '/p', cert_password, '/tr', WINDOWS_TIMESTAMP_SERVER, '/td', 'sha256', '/fd', 'sha256', f'dist/{binary_name}.exe', external=True, ) session.run( WINDOWS_SIGNTOOL_PATH, 'verify', '/pa', '/all', f'dist/{binary_name}.exe', external=True, ) if SYSTEM == 'windows': try: certificate_file, certificate_password = session.posargs except ValueError: session.error('pass the certificate file and the password as positional arguments') return sign_windows(certificate_file, certificate_password) elif SYSTEM == 'linux': session.log('signing is not supported for Linux') else: session.error(f'unrecognized platform: {SYSTEM}') # Append OS name to all the binaries. for asset in pathlib.Path('dist').glob('*'): name = asset.stem ext = asset.suffix asset_path = f'dist/{name}-{SYSTEM}{ext}' session.run('mv', '-f', asset, asset_path, external=True) # Path have to be specified with unix style slashes even for windows, # otherwise glob won't find files on windows in action-gh-release. github_output('asset_path', 'dist/*') def _calculate_hashes( file_path: pathlib.Path, algorithms: list[str], ) -> list[hashlib._Hash]: # noqa read_size = 1024 * 1024 hash_structures = [hashlib.new(algo) for algo in algorithms] with open(file_path, 'rb') as f: while True: buffer = f.read(read_size) if not buffer: break for hash_struct in hash_structures: hash_struct.update(buffer) return hash_structures def _save_hashes(output_file: pathlib.Path, hashes: list[hashlib._Hash]) -> None: # noqa longest_algo_name = max(len(elem.name) for elem in hashes) line_format = '{algo:<%s} {hash_value}' % longest_algo_name output_lines = [] for hash_struct in hashes: hash_value = hash_struct.hexdigest() output_lines.append(line_format.format(algo=hash_struct.name, hash_value=hash_value)) output_file.write_bytes('\n'.join(output_lines).encode('ascii')) @nox.session(python=PYTHON_DEFAULT_VERSION) def make_dist_digest(_session): wanted_algos = ['sha256', 'sha512', 'sha3_256', 'sha3_512'] available_algos = [algo for algo in wanted_algos if algo in hashlib.algorithms_available] directory = pathlib.Path('dist') glob_match = '*' hashes_file_suffix = '_hashes' did_find_any_file = False for dist_file in directory.glob(glob_match): if dist_file.stem.endswith(hashes_file_suffix): continue hashes_list = _calculate_hashes(dist_file, available_algos) output_file = dist_file.with_stem(dist_file.name + hashes_file_suffix).with_suffix('.txt') _save_hashes(output_file, hashes_list) did_find_any_file = True if not did_find_any_file: raise RuntimeError( f'No file found in {str(directory / glob_match)}, but was expected to find some.' ) @nox.session(python=PYTHON_DEFAULT_VERSION) def doc(session): """Build the documentation.""" pdm_install(session, 'doc') session.cd('doc') sphinx_args = ['-b', 'html', '-T', '-W', 'source', 'build/html'] session.run('rm', '-rf', 'build', external=True) if not session.interactive: session.run('sphinx-build', *sphinx_args) # session.notify('doc_cover') # disabled due to https://github.com/sphinx-doc/sphinx/issues/11678 else: sphinx_args[-2:-2] = [ '-E', '--open-browser', '--watch', '../b2', '--ignore', '*.pyc', '--ignore', '*~' ] session.run('sphinx-autobuild', *sphinx_args) @nox.session def doc_cover(session): """ Perform coverage analysis for the documentation. At the time of writing B2 CLI does not have object documentation, hence this always returns 0 out 0 objects. Which errors out in Sphinx 7.2 (https://github.com/sphinx-doc/sphinx/issues/11678). """ pdm_install(session, 'doc') session.cd('doc') sphinx_args = ['-b', 'coverage', '-T', '-W', 'source', 'build/coverage'] session.run('sphinx-build', *sphinx_args) def _read_readme_name_and_description() -> tuple[str, str]: """ Get name and the description from the readme. First line is assumed to be the project name, second contains list of all different checks. Third one and the following contains some description. We assume that description can be multiline, and it ends with an empty line. An example of the content from README.md can look like this: ..note: # B2 Command Line Tool  [![Continuous Integration](https://github.com/Backblaze/B2_Command_Line_Tool/ ... (a very long line) (a few empty lines) The command-line tool that gives easy access to all of the capabilities of B2 Cloud Storage. This program provides command-line access to the B2 service. From this we should parse the following: "B2 Command Line Tool" as the name and "The command-line tool that gives easy access to all of the capabilities of B2 Cloud Storage." as the description. """ with open('README.md') as f: non_empty_lines = 0 full_name = None description_parts = [] for line_with_ends in f.readlines(): line = line_with_ends.strip() if len(line) == 0: # If we found an empty line after we got anything for our description – finish. if len(description_parts) > 0: break continue non_empty_lines += 1 if non_empty_lines == 1: # Markdown header starts with some "# ", we strip everything up to first space. full_name = line.split(' ', maxsplit=1)[1] if non_empty_lines < 3: continue description_parts.append(line) return full_name, ' '.join(description_parts) @nox.session(python=PYTHON_DEFAULT_VERSION) def generate_dockerfile(session): """Generate Dockerfile from Dockerfile.template""" build(session) pdm_install(session) # This string is like `b2 command line tool, version ` version = session.run('b2', 'version', '--short', silent=True).strip() dist_path = 'dist' full_name, description = _read_readme_name_and_description() vcs_ref = session.run("git", "rev-parse", "HEAD", external=True, silent=True).strip() built_distribution = list(pathlib.Path('.').glob(f'{dist_path}/*'))[0] template_mapping = dict( python_version=PYTHON_DEFAULT_VERSION, vendor='Backblaze', name=full_name, description=description, version=version, url='https://www.backblaze.com', # TODO: consider fetching it from `git ls-remote --get-url origin` vcs_url='https://github.com/Backblaze/B2_Command_Line_Tool', vcs_ref=vcs_ref, build_date=datetime.datetime.utcnow().isoformat(), tar_path=dist_path, tar_name=built_distribution.name, ) template_file = DOCKER_TEMPLATE.read_text() template = string.Template(template_file) dockerfile = template.substitute(template_mapping) pathlib.Path('./Dockerfile').write_text(dockerfile) def run_docker_tests(session, image_tag): """Run unittests against a docker image.""" user_id = session.run('id', '-u', silent=True, external=True).strip() group_id = session.run('id', '-g', silent=True, external=True).strip() docker_run_cmd = f"docker run -i --user {user_id}:{group_id} -v /tmp:/tmp:rw --env-file ENVFILE" run_integration_test( session, [ "--sut", f"{docker_run_cmd} {image_tag}", "--env-file-cmd-placeholder", "ENVFILE", ] ) for binary_name in get_versions(): run_integration_test( session, [ "--sut", f"{docker_run_cmd} --entrypoint {binary_name} {image_tag}", "--env-file-cmd-placeholder", "ENVFILE", ] ) @nox.session(python=PYTHON_DEFAULT_VERSION) def docker_test(session): """Run unittests against a docker image.""" if session.posargs: image_tag = session.posargs[0] else: raise ValueError('Provide -- {docker_image_tag}') run_docker_tests(session, image_tag) @nox.session(python=PYTHON_DEFAULT_VERSION) def build_and_test_docker(session): """ For running locally, CI uses a different set of sessions """ test_image_tag = 'b2:test' generate_dockerfile(session) session.run('docker', 'build', '-t', test_image_tag, '.', external=True) run_docker_tests(session, test_image_tag) @nox.session(python=PYTHON_DEFAULT_VERSION) def make_release_commit(session): """ Runs `towncrier build`, commits changes, tags, all that is left to do is pushing """ if session.posargs: version = session.posargs[0] else: session.error('Provide -- {release_version} (X.Y.Z - without leading "v")') requirements = session.run('pdm', 'export', '--no-hashes', silent=True) # if b2sdk requirement points to git, it won't have a version definition b2sdk== assert ('b2sdk==' in requirements) and ( 'git+' not in requirements ), 'release version must depend on released b2sdk version' if not re.match(r'^\d+\.\d+\.\d+$', version): session.error( f'Provided version="{version}". Version must be of the form X.Y.Z where ' f'X, Y and Z are integers' ) local_changes = subprocess.check_output(['git', 'diff', '--stat']) if local_changes: session.error('Uncommitted changes detected') current_branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode() if current_branch != 'master': session.log('WARNING: releasing from a branch different than master') pdm_install(session, 'release') session.run('towncrier', 'build', '--yes', '--version', version) session.log( f'CHANGELOG updated, changes ready to commit and push\n' f' git remote add upstream {UPSTREAM_REPO_URL!r} 2>/dev/null || git remote get-url upstream\n' f' git commit -m "release {version}"\n' f' git tag v{version}\n' f' git push upstream v{version}\n' f' git push upstream {current_branch}' ) def load_allowed_change_types(project_toml: pathlib.Path = pathlib.Path('./pyproject.toml') ) -> set[str]: """ Load the list of allowed change types from the pyproject.toml file. """ import tomllib configuration = tomllib.loads(project_toml.read_text()) return set(entry['directory'] for entry in configuration['tool']['towncrier']['type']) def is_changelog_filename_valid(filename: str, allowed_change_types: set[str]) -> tuple[bool, str]: """ Validates whether the given filename matches our rules. Provides information about why it doesn't match them. """ error_reasons = [] wanted_extension = 'md' try: description, change_type, extension = filename.rsplit('.', maxsplit=2) except ValueError: # Not enough values to unpack. return False, "Doesn't follow the \"..md\" pattern." # Check whether the filename ends with .md. if extension != wanted_extension: error_reasons.append(f"Doesn't end with {wanted_extension} extension.") # Check whether the change type is valid. if change_type not in allowed_change_types: error_reasons.append( f"Change type '{change_type}' doesn't match allowed types: {allowed_change_types}." ) # Check whether the description makes sense. try: int(description) except ValueError: if description[0] != '+': error_reasons.append("Doesn't start with a number nor a plus sign.") return len(error_reasons) == 0, ' / '.join(error_reasons) if error_reasons else '' def is_changelog_entry_valid(file_content: str) -> tuple[bool, str]: """ We expect the changelog entry to be a valid sentence in the English language. This includes, but not limits to, providing a capital letter at the start and the full-stop character at the end. Note: to do this "properly", tools like `nltk` and `spacy` should be used. """ error_reasons = [] # Check whether the first character is a capital letter. # Not allowing special characters nor numbers at the very start. if not file_content[0].isalpha() or not file_content[0].isupper(): error_reasons.append('The first character is not a capital letter.') # Check if the last character is a full-stop character. if file_content.strip()[-1] != '.': error_reasons.append('The last character is not a full-stop character.') return len(error_reasons) == 0, ' / '.join(error_reasons) if error_reasons else '' @nox.session(python=PYTHON_DEFAULT_VERSION) def towncrier_check(session): """ Check whether all the entries in the changelog.d follow the expected naming convention as well as some basic rules as to their format. """ expected_non_md_files = {'.gitkeep'} allowed_change_types = load_allowed_change_types() is_error = False for filename in pathlib.Path('./changelog.d/').glob('*'): # If that's an expected file, it's all right. if filename.name in expected_non_md_files: continue # Check whether the file matches the expected pattern. is_valid, error_message = is_changelog_filename_valid(filename.name, allowed_change_types) if not is_valid: session.log(f"File {filename.name} doesn't match the expected pattern: {error_message}") is_error = True continue # Check whether the file isn't too big. if filename.lstat().st_size > 16 * 1024: session.log( f'File {filename.name} content is too big – it should be smaller than 16kB.' ) is_error = True continue # Check whether the file can be loaded as UTF-8 file. try: file_content = filename.read_text(encoding='utf-8') except UnicodeDecodeError: session.log(f'File {filename.name} is not a valid UTF-8 file.') is_error = True continue # Check whether the content of the file is anyhow valid. is_valid, error_message = is_changelog_entry_valid(file_content) if not is_valid: session.log(f'File {filename.name} is not a valid changelog entry: {error_message}') is_error = True continue if is_error: session.error( 'Found errors in the changelog.d directory. Check logs above for more information' ) B2_Command_Line_Tool-3.19.1/pdm.lock000066400000000000000000003660631461201031300171110ustar00rootroot00000000000000# This file is @generated by PDM. # It is not intended for manual editing. [metadata] groups = ["default", "bundle", "doc", "format", "full", "license", "lint", "release", "test"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.1" content_hash = "sha256:c41ed236c40d0db7d7eeaf871f99aad2ff11e96179c48761a035ff3e43f314c9" [[package]] name = "alabaster" version = "0.7.16" requires_python = ">=3.9" summary = "A light, configurable Sphinx theme" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] name = "altgraph" version = "0.17.4" summary = "Python graph (network) package" groups = ["bundle"] marker = "python_version < \"3.13\"" files = [ {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"}, {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"}, ] [[package]] name = "annotated-types" version = "0.5.0" requires_python = ">=3.7" summary = "Reusable constraint types to use with typing.Annotated" groups = ["full"] dependencies = [ "typing-extensions>=4.0.0; python_version < \"3.9\"", ] files = [ {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, ] [[package]] name = "argcomplete" version = "3.1.2" requires_python = ">=3.6" summary = "Bash tab completion for argparse" groups = ["default"] dependencies = [ "importlib-metadata<7,>=0.23; python_version < \"3.8\"", ] files = [ {file = "argcomplete-3.1.2-py3-none-any.whl", hash = "sha256:d97c036d12a752d1079f190bc1521c545b941fda89ad85d15afa909b4d1b9a99"}, {file = "argcomplete-3.1.2.tar.gz", hash = "sha256:d5d1e5efd41435260b8f85673b74ea2e883affcbec9f4230c582689e8e78251b"}, ] [[package]] name = "arrow" version = "1.2.3" requires_python = ">=3.6" summary = "Better dates & times for Python" groups = ["default"] dependencies = [ "python-dateutil>=2.7.0", "typing-extensions; python_version < \"3.8\"", ] files = [ {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, ] [[package]] name = "atomicwrites" version = "1.4.1" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" summary = "Atomic file writes." groups = ["lint", "test"] marker = "sys_platform == \"win32\"" files = [ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] [[package]] name = "attrs" version = "23.2.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" groups = ["lint", "test"] dependencies = [ "importlib-metadata; python_version < \"3.8\"", ] files = [ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [[package]] name = "b2sdk" version = "2.1.0" requires_python = ">=3.7" summary = "Backblaze B2 SDK" groups = ["default"] dependencies = [ "importlib-metadata>=3.3.0; python_version < \"3.8\"", "logfury<2.0.0,>=1.0.1", "requests<3.0.0,>=2.9.1", "typing-extensions>=4.7.1; python_version < \"3.12\"", ] files = [ {file = "b2sdk-2.1.0-py3-none-any.whl", hash = "sha256:c88c9ca94034b5c490884280f921df10a3fa98a757eccca3fb57fb257fb04bde"}, {file = "b2sdk-2.1.0.tar.gz", hash = "sha256:39116cc539ffa09c45eb9802b96416efafd255698d514303bdf3b7f7cf105f3f"}, ] [[package]] name = "babel" version = "2.14.0" requires_python = ">=3.7" summary = "Internationalization utilities" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [[package]] name = "backoff" version = "2.1.2" requires_python = ">=3.7,<4.0" summary = "Function decoration for backoff and retry" groups = ["test"] files = [ {file = "backoff-2.1.2-py3-none-any.whl", hash = "sha256:b135e6d7c7513ba2bfd6895bc32bc8c66c6f3b0279b4c6cd866053cfd7d3126b"}, {file = "backoff-2.1.2.tar.gz", hash = "sha256:407f1bc0f22723648a8880821b935ce5df8475cf04f7b6b5017ae264d30f6069"}, ] [[package]] name = "certifi" version = "2024.2.2" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default", "doc"] files = [ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] name = "charset-normalizer" version = "3.3.2" requires_python = ">=3.7.0" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." groups = ["default", "doc"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" version = "8.1.7" requires_python = ">=3.7" summary = "Composable command line interface toolkit" groups = ["release"] marker = "python_version >= \"3.8\"" dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [[package]] name = "colorama" version = "0.4.6" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" summary = "Cross-platform colored terminal text." groups = ["default", "doc", "lint", "release", "test"] marker = "sys_platform == \"win32\" or platform_system == \"Windows\" or python_version >= \"3.9\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "coverage" version = "7.2.7" requires_python = ">=3.7" summary = "Code coverage measurement for Python" groups = ["test"] files = [ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [[package]] name = "coverage" version = "7.2.7" extras = ["toml"] requires_python = ">=3.7" summary = "Code coverage measurement for Python" groups = ["test"] dependencies = [ "coverage==7.2.7", "tomli; python_full_version <= \"3.11.0a6\"", ] files = [ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [[package]] name = "docutils" version = "0.18.1" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" summary = "Docutils -- Python Documentation Utilities" groups = ["default", "doc"] files = [ {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, ] [[package]] name = "execnet" version = "2.0.2" requires_python = ">=3.7" summary = "execnet: rapid multi-Python deployment" groups = ["test"] files = [ {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, ] [[package]] name = "greenlet" version = "3.0.3" requires_python = ">=3.7" summary = "Lightweight in-process concurrent programming" groups = ["doc"] marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version >= \"3.9\"" files = [ {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [[package]] name = "idna" version = "3.6" requires_python = ">=3.5" summary = "Internationalized Domain Names in Applications (IDNA)" groups = ["default", "doc"] files = [ {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "imagesize" version = "1.4.1" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" summary = "Getting image size from png/jpeg/jpeg2000/gif file" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "importlib-metadata" version = "6.7.0" requires_python = ">=3.7" summary = "Read metadata from Python packages" groups = ["bundle", "default", "doc", "full", "lint", "test"] marker = "python_version < \"3.10\"" dependencies = [ "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", ] files = [ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [[package]] name = "importlib-resources" version = "6.3.0" requires_python = ">=3.8" summary = "Read resources from Python packages" groups = ["release"] marker = "python_version < \"3.10\" and python_version >= \"3.8\"" dependencies = [ "zipp>=3.1.0; python_version < \"3.10\"", ] files = [ {file = "importlib_resources-6.3.0-py3-none-any.whl", hash = "sha256:783407aa1cd05550e3aa123e8f7cfaebee35ffa9cb0242919e2d1e4172222705"}, {file = "importlib_resources-6.3.0.tar.gz", hash = "sha256:166072a97e86917a9025876f34286f549b9caf1d10b35a1b372bffa1600c6569"}, ] [[package]] name = "incremental" version = "22.10.0" summary = "\"A small library that versions your Python projects.\"" groups = ["release"] marker = "python_version >= \"3.8\"" files = [ {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, ] [[package]] name = "iniconfig" version = "2.0.0" requires_python = ">=3.7" summary = "brain-dead simple config-ini parsing" groups = ["lint", "test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "jinja2" version = "3.1.3" requires_python = ">=3.7" summary = "A very fast and expressive template engine." groups = ["doc", "release"] marker = "python_version >= \"3.8\"" dependencies = [ "MarkupSafe>=2.0", ] files = [ {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [[package]] name = "liccheck" version = "0.9.2" requires_python = ">=3.5" summary = "Check python packages from requirement.txt and report issues" groups = ["lint"] dependencies = [ "semantic-version>=2.7.0", "toml", ] files = [ {file = "liccheck-0.9.2-py2.py3-none-any.whl", hash = "sha256:15cbedd042515945fe9d58b62e0a5af2f2a7795def216f163bb35b3016a16637"}, {file = "liccheck-0.9.2.tar.gz", hash = "sha256:bdc2190f8e95af3c8f9c19edb784ba7d41ecb2bf9189422eae6112bf84c08cd5"}, ] [[package]] name = "livereload" version = "2.6.3" summary = "Python LiveReload is an awesome tool for web developers" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "six", "tornado; python_version > \"2.7\"", ] files = [ {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] [[package]] name = "logfury" version = "1.0.1" summary = "('Toolkit for responsible, low-boilerplate logging of library method calls',)" groups = ["default"] files = [ {file = "logfury-1.0.1-py3-none-any.whl", hash = "sha256:b4f04be1701a1df644afc3384d6167d64c899f8036b7eefc3b6c570c6a9b290b"}, {file = "logfury-1.0.1.tar.gz", hash = "sha256:130a5daceab9ad534924252ddf70482aa2c96662b3a3825a7d30981d03b76a26"}, ] [[package]] name = "macholib" version = "1.16.3" summary = "Mach-O header analysis and editing" groups = ["bundle"] marker = "sys_platform == \"darwin\" and python_version < \"3.13\"" dependencies = [ "altgraph>=0.17", ] files = [ {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"}, {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"}, ] [[package]] name = "markupsafe" version = "2.1.5" requires_python = ">=3.7" summary = "Safely add untrusted strings to HTML/XML markup." groups = ["doc", "release"] marker = "python_version >= \"3.8\"" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "more-itertools" version = "8.13.0" requires_python = ">=3.5" summary = "More routines for operating on iterables, beyond itertools" groups = ["test"] files = [ {file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"}, {file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"}, ] [[package]] name = "packaging" version = "24.0" requires_python = ">=3.7" summary = "Core utilities for Python packages" groups = ["bundle", "doc", "lint", "test"] files = [ {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "patchelf-wrapper" version = "1.2.0" summary = "A wrapper for patchelf" groups = ["bundle"] marker = "platform_system == \"Linux\"" files = [ {file = "patchelf-wrapper-1.2.0.tar.gz", hash = "sha256:67c8802e7c3e79eaa28811ce5e3245e8264b54e16afd450a3b8d727bc0dbccb8"}, ] [[package]] name = "pefile" version = "2023.2.7" requires_python = ">=3.6.0" summary = "Python PE parsing module" groups = ["bundle"] marker = "sys_platform == \"win32\" and python_version < \"3.13\"" files = [ {file = "pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"}, {file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"}, ] [[package]] name = "pexpect" version = "4.8.0" summary = "Pexpect allows easy control of interactive console applications." groups = ["test"] dependencies = [ "ptyprocess>=0.5", ] files = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, ] [[package]] name = "phx-class-registry" version = "4.0.6" requires_python = ">=3" summary = "Factory+Registry pattern for Python classes" groups = ["default"] files = [ {file = "phx-class-registry-4.0.6.tar.gz", hash = "sha256:66e9818de0a9d62e8cfe311587fcd3853ba941b71c11a7a73e5808d6550db125"}, {file = "phx_class_registry-4.0.6-py3-none-any.whl", hash = "sha256:90f8c44b9840ac1cb350876157669bf0d1f9d3be614a4f21d739a219a0640601"}, ] [[package]] name = "pip" version = "23.3.2" requires_python = ">=3.7" summary = "The PyPA recommended tool for installing Python packages." groups = ["license"] files = [ {file = "pip-23.3.2-py3-none-any.whl", hash = "sha256:5052d7889c1f9d05224cd41741acb7c5d6fa735ab34e339624a614eaaa7e7d76"}, {file = "pip-23.3.2.tar.gz", hash = "sha256:7fd9972f96db22c8077a1ee2691b172c8089b17a5652a44494a9ecb0d78f9149"}, ] [[package]] name = "pip-licenses" version = "4.3.4" requires_python = "~=3.8" summary = "Dump the software license list of Python packages installed with pip." groups = ["license"] marker = "python_version >= \"3.9\"" dependencies = [ "prettytable>=2.3.0", ] files = [ {file = "pip-licenses-4.3.4.tar.gz", hash = "sha256:9c6c9c3252b976d08735bdffb0eb4c5eaa50dfd46f5e075532c0248ffe94fed1"}, {file = "pip_licenses-4.3.4-py3-none-any.whl", hash = "sha256:85706ec30781076eb611fed3934f27a1f18437d3211f747567cd3c4e943fce1b"}, ] [[package]] name = "pipdeptree" version = "2.16.2" requires_python = ">=3.8" summary = "Command line utility to show dependency tree of packages." groups = ["license"] marker = "python_version >= \"3.9\"" dependencies = [ "pip>=23.1.2", ] files = [ {file = "pipdeptree-2.16.2-py3-none-any.whl", hash = "sha256:4b60a20f632aa3449880141d1cd0bc99cb5f93ed46d54d689fd1c9b95f0e53d0"}, {file = "pipdeptree-2.16.2.tar.gz", hash = "sha256:96ecde8e6f40c95998491a385e4af56d387f94ff7d3b8f209aa34982a721bc43"}, ] [[package]] name = "platformdirs" version = "4.0.0" requires_python = ">=3.7" summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." groups = ["default"] dependencies = [ "typing-extensions>=4.7.1; python_version < \"3.8\"", ] files = [ {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, ] [[package]] name = "pluggy" version = "1.2.0" requires_python = ">=3.7" summary = "plugin and hook calling mechanisms for python" groups = ["lint", "test"] dependencies = [ "importlib-metadata>=0.12; python_version < \"3.8\"", ] files = [ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [[package]] name = "prettytable" version = "3.10.0" requires_python = ">=3.8" summary = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" groups = ["license"] marker = "python_version >= \"3.9\"" dependencies = [ "wcwidth", ] files = [ {file = "prettytable-3.10.0-py3-none-any.whl", hash = "sha256:6536efaf0757fdaa7d22e78b3aac3b69ea1b7200538c2c6995d649365bddab92"}, {file = "prettytable-3.10.0.tar.gz", hash = "sha256:9665594d137fb08a1117518c25551e0ede1687197cf353a4fdc78d27e1073568"}, ] [[package]] name = "ptyprocess" version = "0.7.0" summary = "Run a subprocess in a pseudo terminal" groups = ["test"] files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] [[package]] name = "py" version = "1.11.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" summary = "library with cross-python path, ini-parsing, io, code, log facilities" groups = ["lint", "test"] files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] [[package]] name = "pydantic" version = "2.5.3" requires_python = ">=3.7" summary = "Data validation using Python type hints" groups = ["full"] dependencies = [ "annotated-types>=0.4.0", "importlib-metadata; python_version == \"3.7\"", "pydantic-core==2.14.6", "typing-extensions>=4.6.1", ] files = [ {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, ] [[package]] name = "pydantic-core" version = "2.14.6" requires_python = ">=3.7" summary = "" groups = ["full"] dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] files = [ {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, ] [[package]] name = "pyelftools" version = "0.31" summary = "Library for analyzing ELF files and DWARF debugging information" groups = ["bundle"] marker = "platform_system == \"Linux\"" files = [ {file = "pyelftools-0.31-py3-none-any.whl", hash = "sha256:f52de7b3c7e8c64c8abc04a79a1cf37ac5fb0b8a49809827130b858944840607"}, {file = "pyelftools-0.31.tar.gz", hash = "sha256:c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99"}, ] [[package]] name = "pygments" version = "2.17.2" requires_python = ">=3.7" summary = "Pygments is a syntax highlighting package written in Python." groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [[package]] name = "pyinstaller" version = "5.13.2" requires_python = "<3.13,>=3.7" summary = "PyInstaller bundles a Python application and all its dependencies into a single package." groups = ["bundle"] marker = "python_version < \"3.13\"" dependencies = [ "altgraph", "importlib-metadata>=1.4; python_version < \"3.8\"", "macholib>=1.8; sys_platform == \"darwin\"", "pefile>=2022.5.30; sys_platform == \"win32\"", "pyinstaller-hooks-contrib>=2021.4", "pywin32-ctypes>=0.2.1; sys_platform == \"win32\"", "setuptools>=42.0.0", ] files = [ {file = "pyinstaller-5.13.2-py3-none-macosx_10_13_universal2.whl", hash = "sha256:16cbd66b59a37f4ee59373a003608d15df180a0d9eb1a29ff3bfbfae64b23d0f"}, {file = "pyinstaller-5.13.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8f6dd0e797ae7efdd79226f78f35eb6a4981db16c13325e962a83395c0ec7420"}, {file = "pyinstaller-5.13.2-py3-none-manylinux2014_i686.whl", hash = "sha256:65133ed89467edb2862036b35d7c5ebd381670412e1e4361215e289c786dd4e6"}, {file = "pyinstaller-5.13.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:7d51734423685ab2a4324ab2981d9781b203dcae42839161a9ee98bfeaabdade"}, {file = "pyinstaller-5.13.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:2c2fe9c52cb4577a3ac39626b84cf16cf30c2792f785502661286184f162ae0d"}, {file = "pyinstaller-5.13.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c63ef6133eefe36c4b2f4daf4cfea3d6412ece2ca218f77aaf967e52a95ac9b8"}, {file = "pyinstaller-5.13.2-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:aadafb6f213549a5906829bb252e586e2cf72a7fbdb5731810695e6516f0ab30"}, {file = "pyinstaller-5.13.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:b2e1c7f5cceb5e9800927ddd51acf9cc78fbaa9e79e822c48b0ee52d9ce3c892"}, {file = "pyinstaller-5.13.2-py3-none-win32.whl", hash = "sha256:421cd24f26144f19b66d3868b49ed673176765f92fa9f7914cd2158d25b6d17e"}, {file = "pyinstaller-5.13.2-py3-none-win_amd64.whl", hash = "sha256:ddcc2b36052a70052479a9e5da1af067b4496f43686ca3cdda99f8367d0627e4"}, {file = "pyinstaller-5.13.2-py3-none-win_arm64.whl", hash = "sha256:27cd64e7cc6b74c5b1066cbf47d75f940b71356166031deb9778a2579bb874c6"}, {file = "pyinstaller-5.13.2.tar.gz", hash = "sha256:c8e5d3489c3a7cc5f8401c2d1f48a70e588f9967e391c3b06ddac1f685f8d5d2"}, ] [[package]] name = "pyinstaller-hooks-contrib" version = "2024.3" requires_python = ">=3.7" summary = "Community maintained hooks for PyInstaller" groups = ["bundle"] dependencies = [ "importlib-metadata>=4.6; python_version < \"3.10\"", "packaging>=22.0", "setuptools>=42.0.0", ] files = [ {file = "pyinstaller-hooks-contrib-2024.3.tar.gz", hash = "sha256:d18657c29267c63563a96b8fc78db6ba9ae40af6702acb2f8c871df12c75b60b"}, {file = "pyinstaller_hooks_contrib-2024.3-py2.py3-none-any.whl", hash = "sha256:6701752d525e1f4eda1eaec2c2affc206171e15c7a4e188a152fcf3ed3308024"}, ] [[package]] name = "pytest" version = "6.2.5" requires_python = ">=3.6" summary = "pytest: simple powerful testing with Python" groups = ["lint", "test"] dependencies = [ "atomicwrites>=1.0; sys_platform == \"win32\"", "attrs>=19.2.0", "colorama; sys_platform == \"win32\"", "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", "pluggy<2.0,>=0.12", "py>=1.8.2", "toml", ] files = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] [[package]] name = "pytest-cov" version = "3.0.0" requires_python = ">=3.6" summary = "Pytest plugin for measuring coverage." groups = ["test"] dependencies = [ "coverage[toml]>=5.2.1", "pytest>=4.6", ] files = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] [[package]] name = "pytest-forked" version = "1.4.0" requires_python = ">=3.6" summary = "run tests in isolated forked subprocesses" groups = ["test"] dependencies = [ "py", "pytest>=3.10", ] files = [ {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, ] [[package]] name = "pytest-xdist" version = "2.5.0" requires_python = ">=3.6" summary = "pytest xdist plugin for distributed testing and loop-on-failing modes" groups = ["test"] dependencies = [ "execnet>=1.1", "pytest-forked", "pytest>=6.2.0", ] files = [ {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, ] [[package]] name = "python-dateutil" version = "2.9.0.post0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Extensions to the standard Python datetime module" groups = ["default"] dependencies = [ "six>=1.5", ] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [[package]] name = "pywin32-ctypes" version = "0.2.2" requires_python = ">=3.6" summary = "A (partial) reimplementation of pywin32 using ctypes/cffi" groups = ["bundle"] marker = "sys_platform == \"win32\" and python_version < \"3.13\"" files = [ {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, ] [[package]] name = "requests" version = "2.31.0" requires_python = ">=3.7" summary = "Python HTTP for Humans." groups = ["default", "doc"] dependencies = [ "certifi>=2017.4.17", "charset-normalizer<4,>=2", "idna<4,>=2.5", "urllib3<3,>=1.21.1", ] files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [[package]] name = "rst2ansi" version = "0.1.5" summary = "A rst converter to ansi-decorated console output" groups = ["default"] files = [ {file = "rst2ansi-0.1.5-py3-none-any.whl", hash = "sha256:b2cf192e38975918d07540bba7d673550cd7d28ca7443410984e22d5ab058fb3"}, {file = "rst2ansi-0.1.5.tar.gz", hash = "sha256:1b17fb9a628d40f57933ad1a3aa952346444be069469508e73e95060da33fe6f"}, ] [[package]] name = "ruff" version = "0.0.272" requires_python = ">=3.7" summary = "An extremely fast Python linter, written in Rust." groups = ["format", "lint"] files = [ {file = "ruff-0.0.272-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:ae9b57546e118660175d45d264b87e9b4c19405c75b587b6e4d21e6a17bf4fdf"}, {file = "ruff-0.0.272-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1609b864a8d7ee75a8c07578bdea0a7db75a144404e75ef3162e0042bfdc100d"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee76b4f05fcfff37bd6ac209d1370520d509ea70b5a637bdf0a04d0c99e13dff"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48eccf225615e106341a641f826b15224b8a4240b84269ead62f0afd6d7e2d95"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:677284430ac539bb23421a2b431b4ebc588097ef3ef918d0e0a8d8ed31fea216"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9c4bfb75456a8e1efe14c52fcefb89cfb8f2a0d31ed8d804b82c6cf2dc29c42c"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86bc788245361a8148ff98667da938a01e1606b28a45e50ac977b09d3ad2c538"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b2ea68d2aa69fff1b20b67636b1e3e22a6a39e476c880da1282c3e4bf6ee5a"}, {file = "ruff-0.0.272-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd2bbe337a3f84958f796c77820d55ac2db1e6753f39d1d1baed44e07f13f96d"}, {file = "ruff-0.0.272-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d5a208f8ef0e51d4746930589f54f9f92f84bb69a7d15b1de34ce80a7681bc00"}, {file = "ruff-0.0.272-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:905ff8f3d6206ad56fcd70674453527b9011c8b0dc73ead27618426feff6908e"}, {file = "ruff-0.0.272-py3-none-musllinux_1_2_i686.whl", hash = "sha256:19643d448f76b1eb8a764719072e9c885968971bfba872e14e7257e08bc2f2b7"}, {file = "ruff-0.0.272-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:691d72a00a99707a4e0b2846690961157aef7b17b6b884f6b4420a9f25cd39b5"}, {file = "ruff-0.0.272-py3-none-win32.whl", hash = "sha256:dc406e5d756d932da95f3af082814d2467943631a587339ee65e5a4f4fbe83eb"}, {file = "ruff-0.0.272-py3-none-win_amd64.whl", hash = "sha256:a37ec80e238ead2969b746d7d1b6b0d31aa799498e9ba4281ab505b93e1f4b28"}, {file = "ruff-0.0.272-py3-none-win_arm64.whl", hash = "sha256:06b8ee4eb8711ab119db51028dd9f5384b44728c23586424fd6e241a5b9c4a3b"}, {file = "ruff-0.0.272.tar.gz", hash = "sha256:273a01dc8c3c4fd4c2af7ea7a67c8d39bb09bce466e640dd170034da75d14cab"}, ] [[package]] name = "sadisplay" version = "0.4.9" summary = "SqlAlchemy schema display script" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "SQLAlchemy>=0.5", ] files = [ {file = "sadisplay-0.4.9-py2.py3-none-any.whl", hash = "sha256:bf456f582b8f5da19fedef7a9afe969b49231d79724710bc7d35c9439f44c2fc"}, {file = "sadisplay-0.4.9.tar.gz", hash = "sha256:af67160f89123886ab42b247262862bfcde0a3c236229ecdd59de0a1e8e35d96"}, ] [[package]] name = "semantic-version" version = "2.10.0" requires_python = ">=2.7" summary = "A library implementing the 'SemVer' scheme." groups = ["lint"] files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] [[package]] name = "setuptools" version = "68.0.0" requires_python = ">=3.7" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["bundle", "default", "lint"] files = [ {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [[package]] name = "six" version = "1.16.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Python 2 and 3 compatibility utilities" groups = ["default", "doc"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "snowballstemmer" version = "2.2.0" summary = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] name = "sphinx" version = "7.2.6" requires_python = ">=3.9" summary = "Python documentation generator" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "Jinja2>=3.0", "Pygments>=2.14", "alabaster<0.8,>=0.7", "babel>=2.9", "colorama>=0.4.5; sys_platform == \"win32\"", "docutils<0.21,>=0.18.1", "imagesize>=1.3", "importlib-metadata>=4.8; python_version < \"3.10\"", "packaging>=21.0", "requests>=2.25.0", "snowballstemmer>=2.0", "sphinxcontrib-applehelp", "sphinxcontrib-devhelp", "sphinxcontrib-htmlhelp>=2.0.0", "sphinxcontrib-jsmath", "sphinxcontrib-qthelp", "sphinxcontrib-serializinghtml>=1.1.9", ] files = [ {file = "sphinx-7.2.6-py3-none-any.whl", hash = "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560"}, {file = "sphinx-7.2.6.tar.gz", hash = "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"}, ] [[package]] name = "sphinx-argparse" version = "0.4.0" requires_python = ">=3.7" summary = "A sphinx extension that automatically documents argparse commands and options" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "sphinx>=1.2.0", ] files = [ {file = "sphinx_argparse-0.4.0-py3-none-any.whl", hash = "sha256:73bee01f7276fae2bf621ccfe4d167af7306e7288e3482005405d9f826f9b037"}, {file = "sphinx_argparse-0.4.0.tar.gz", hash = "sha256:e0f34184eb56f12face774fbc87b880abdb9017a0998d1ec559b267e9697e449"}, ] [[package]] name = "sphinx-autobuild" version = "2024.2.4" requires_python = ">=3.9" summary = "Rebuild Sphinx documentation on changes, with live-reload in the browser." groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "colorama", "livereload", "sphinx", ] files = [ {file = "sphinx_autobuild-2024.2.4-py3-none-any.whl", hash = "sha256:63fd87ab7505872a89aef468ce6503f65e794a195f4ae62269db3b85b72d4854"}, {file = "sphinx_autobuild-2024.2.4.tar.gz", hash = "sha256:cb9d2121a176d62d45471624872afc5fad7755ad662738abe400ecf4a7954303"}, ] [[package]] name = "sphinx-rtd-theme" version = "1.3.0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" summary = "Read the Docs theme for Sphinx" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "docutils<0.19", "sphinx<8,>=1.6", "sphinxcontrib-jquery<5,>=4", ] files = [ {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, ] [[package]] name = "sphinxcontrib-applehelp" version = "1.0.8" requires_python = ">=3.9" summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, ] [[package]] name = "sphinxcontrib-devhelp" version = "1.0.6" requires_python = ">=3.9" summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, ] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.0.5" requires_python = ">=3.9" summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, ] [[package]] name = "sphinxcontrib-jquery" version = "4.1" requires_python = ">=2.7" summary = "Extension to include jQuery on newer Sphinx releases" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "Sphinx>=1.8", ] files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, ] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" requires_python = ">=3.5" summary = "A sphinx extension which renders display math in HTML via JavaScript" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] [[package]] name = "sphinxcontrib-plantuml" version = "0.29" summary = "Sphinx \"plantuml\" extension" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "Sphinx>=1.6", ] files = [ {file = "sphinxcontrib-plantuml-0.29.tar.gz", hash = "sha256:97a4f2a26af91db88770ccf8a3b2e03305bcda7ec41a7f969fc8cb27b84a3c44"}, ] [[package]] name = "sphinxcontrib-qthelp" version = "1.0.7" requires_python = ">=3.9" summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, ] [[package]] name = "sphinxcontrib-serializinghtml" version = "1.1.10" requires_python = ">=3.9" summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, ] [[package]] name = "sqlalchemy" version = "2.0.28" requires_python = ">=3.7" summary = "Database Abstraction Library" groups = ["doc"] marker = "python_version >= \"3.9\"" dependencies = [ "greenlet!=0.4.17; platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\"", "typing-extensions>=4.6.0", ] files = [ {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"}, {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"}, {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"}, {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"}, {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"}, {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"}, {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"}, {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"}, {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"}, ] [[package]] name = "staticx" version = "0.13.9" requires_python = ">=3.5" summary = "Build static self-extracting app from dynamic executable" groups = ["bundle"] marker = "platform_system == \"Linux\"" dependencies = [ "pyelftools", ] files = [ {file = "staticx-0.13.9-py3-none-manylinux1_x86_64.whl", hash = "sha256:4fade92c50bc0208571ac2a0742235c0522f6c3a7ccf3f329eb0b986d830043a"}, {file = "staticx-0.13.9.tar.gz", hash = "sha256:002cfa1869125997564b938ddefff32bdfa455c9f04635e49f87b6a96219d2d5"}, ] [[package]] name = "tabulate" version = "0.9.0" requires_python = ">=3.7" summary = "Pretty-print tabular data" groups = ["default"] files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, ] [[package]] name = "toml" version = "0.10.2" requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Python Library for Tom's Obvious, Minimal Language" groups = ["lint", "test"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] [[package]] name = "tomli" version = "2.0.1" requires_python = ">=3.7" summary = "A lil' TOML parser" groups = ["release", "test"] marker = "python_version < \"3.11\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "tornado" version = "6.4" requires_python = ">= 3.8" summary = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." groups = ["doc"] marker = "python_version >= \"3.9\"" files = [ {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] name = "towncrier" version = "23.11.0" requires_python = ">=3.8" summary = "Building newsfiles for your project." groups = ["release"] marker = "python_version >= \"3.8\"" dependencies = [ "click", "importlib-resources>=5; python_version < \"3.10\"", "incremental", "jinja2", "tomli; python_version < \"3.11\"", ] files = [ {file = "towncrier-23.11.0-py3-none-any.whl", hash = "sha256:2e519ca619426d189e3c98c99558fe8be50c9ced13ea1fc20a4a353a95d2ded7"}, {file = "towncrier-23.11.0.tar.gz", hash = "sha256:13937c247e3f8ae20ac44d895cf5f96a60ad46cfdcc1671759530d7837d9ee5d"}, ] [[package]] name = "tqdm" version = "4.66.2" requires_python = ">=3.7" summary = "Fast, Extensible Progress Meter" groups = ["default"] dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [[package]] name = "typing-extensions" version = "4.7.1" requires_python = ">=3.7" summary = "Backported and Experimental Type Hints for Python 3.7+" groups = ["bundle", "default", "doc", "full", "lint", "test"] files = [ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "urllib3" version = "2.0.7" requires_python = ">=3.7" summary = "HTTP library with thread-safe connection pooling, file post, and more." groups = ["default", "doc"] files = [ {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [[package]] name = "wcwidth" version = "0.2.13" summary = "Measures the displayed width of unicode strings in a terminal" groups = ["license"] marker = "python_version >= \"3.9\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] name = "yapf" version = "0.27.0" summary = "A formatter for Python code." groups = ["format", "lint"] files = [ {file = "yapf-0.27.0-py2.py3-none-any.whl", hash = "sha256:613deba14233623ff3432d9d5032631b5f600be97b39f66932cbe67648bfa8ea"}, {file = "yapf-0.27.0.tar.gz", hash = "sha256:34f6f80c446dcb2c44bd644c4037a2024b6645e293a4c9c4521983dd0bb247a1"}, ] [[package]] name = "zipp" version = "3.15.0" requires_python = ">=3.7" summary = "Backport of pathlib-compatible object wrapper for zip files" groups = ["bundle", "default", "doc", "full", "lint", "release", "test"] marker = "python_version < \"3.10\"" files = [ {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] B2_Command_Line_Tool-3.19.1/pyinstaller-hooks/000077500000000000000000000000001461201031300211305ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/pyinstaller-hooks/hook-b2.py000066400000000000000000000023221461201031300227420ustar00rootroot00000000000000###################################################################### # # File: pyinstaller-hooks/hook-b2.py # # Copyright 2022 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from pathlib import Path license_file = Path('b2/licenses_output.txt') assert license_file.exists() datas = [ # When '.' was provided here, the license file was copied to the root of the executable. # Before ApiVer, it pasted the file to the `b2/` directory. # I have no idea why it worked before or how it works now. # If you mean to debug it in the future, know that `pyinstaller` provides a special # attribute in the `sys` module whenever it runs. # # Example: # import sys # if hasattr(sys, '_MEIPASS'): # self._print(f'{NAME}') # self._print(f'{sys._MEIPASS}') # elems = [elem for elem in pathlib.Path(sys._MEIPASS).glob('**/*')] # self._print(f'{elems}') # # If used at the very start of the `_run` of `Licenses` command, it will print # all the files that were unpacked from the executable. (str(license_file), 'b2/'), ] B2_Command_Line_Tool-3.19.1/pyinstaller-hooks/hook-prettytable.py000066400000000000000000000011311461201031300247730ustar00rootroot00000000000000###################################################################### # # File: pyinstaller-hooks/hook-prettytable.py # # Copyright 2022 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from PyInstaller.utils.hooks import collect_all # prettytable is excluded because `prettytable` module in provided by `PTable` package; # pyinstaller fails to resolve this, thus we do it manually here excludedimports = ['prettytable'] datas, binaries, hiddenimports = collect_all('prettytable') B2_Command_Line_Tool-3.19.1/pyproject.toml000066400000000000000000000132511461201031300203570ustar00rootroot00000000000000[project] name = "b2" description = "Command Line Tool for Backblaze B2" authors = [ { name = "Backblaze Inc", email = "support@backblaze.com" }, ] dynamic = ["version"] requires-python = ">=3.7" keywords = ["backblaze b2 cloud storage"] license = {text = "MIT"} readme = "README.md" classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dependencies = [ "argcomplete>=2,<4", "arrow>=1.0.2,<2.0.0", "b2sdk>=2.1.0,<3", "docutils>=0.18.1", "idna~=3.4; platform_system == 'Java'", "importlib-metadata>=3.3; python_version < '3.8'", "phx-class-registry>=4.0,<5", "rst2ansi==0.1.5", "tabulate==0.9.0", "tqdm>=4.65.0,<5", "platformdirs>=3.11.0,<5", "setuptools>=60; python_version < '3.10'", # required by phx-class-registry<4.1 ] [project.optional-dependencies] # doc and licence are actually dev requirements, not optional # requirements. They should be removed from this section when # a breaking version is released. doc = [ "sadisplay>=0.4.9; python_version>='3.9'", "sphinx>=7.2,<8; python_version>='3.9'", "sphinx-argparse; python_version>='3.9'", "sphinx-autobuild; python_version>='3.9'", "sphinx-rtd-theme>=1.3,<2; python_version>='3.9'", "sphinxcontrib-plantuml; python_version>='3.9'" ] license = [ "pip>=23.1.0,<24", "pip-licenses==3.5.5; python_version < '3.9'", "pip-licenses~=4.3.3; python_version >= '3.9'", "pipdeptree>=2.9,<3; python_version>='3.9'", "prettytable~=3.7; python_version < '3.9'", "prettytable~=3.9; python_version >= '3.9'", ] full = [ "pydantic>=2.0.1,<3" ] [project.urls] Homepage = "https://github.com/Backblaze/B2_Command_Line_Tool" [project.scripts] b2 = "b2._internal.b2v3.__main__:main" b2v3 = "b2._internal.b2v3.__main__:main" _b2v4 = "b2._internal._b2v4.__main__:main" [build-system] requires = ["pdm-backend"] build-backend = "pdm.backend" [tool.liccheck] authorized_licenses = [ "bsd", "new bsd", "bsd license", "new bsd license", "simplified bsd", "apache", "apache 2.0", "apache software", "apache software license", "lgpl", "gnu lgpl", "gnu library or lesser general public license (lgpl)", "lgpl with exceptions or zpl", "isc license", "isc license (iscl)", "mit", "mit license", "mozilla public license 2.0 (mpl 2.0)", "mpl-2.0", "psf", "python software foundation", "python software foundation license", "zpl 2.1", ] unauthorized_licences = [ "affero", "agpl", "gpl v3", "gpl v2", "gpl", ] dependencies = true optional_dependencies = ["doc", "full", "license"] [tool.ruff] target-version = "py37" # to be replaced by project:requires-python when we will have that section in here # TODO add D select = ["E", "F", "I", "UP"] # TODO: remove E501 once docstrings are formatted ignore = [ "D100", "D105", "D107", "D200", "D202", "D203", "D205", "D212", "D400", "D401", "D415", "D101", "D102", "D103", "D104", # TODO remove once we have docstring for all public methods "E501", # TODO: remove E501 once docstrings are formatted ] line-length = 100 [tool.ruff.per-file-ignores] "__init__.py" = ["F401"] "test/**" = ["D", "F403", "F405"] "b2/console_tool.py" = ["E402"] [tool.towncrier] directory = "changelog.d" filename = "CHANGELOG.md" start_string = "\n" underlines = ["", "", ""] title_format = "## [{version}](https://github.com/Backblaze/B2_Command_Line_Tool/releases/tag/v{version}) - {project_date}" issue_format = "[#{issue}](https://github.com/Backblaze/B2_Command_Line_Tool/issues/{issue})" [[tool.towncrier.type]] directory = "removed" name = "Removed" showcontent = true [[tool.towncrier.type]] directory = "changed" name = "Changed" showcontent = true [[tool.towncrier.type]] directory = "fixed" name = "Fixed" showcontent = true [[tool.towncrier.type]] directory = "deprecated" name = "Deprecated" showcontent = true [[tool.towncrier.type]] directory = "added" name = "Added" showcontent = true [[tool.towncrier.type]] directory = "doc" name = "Doc" showcontent = true [[tool.towncrier.type]] directory = "infrastructure" name = "Infrastructure" showcontent = true [tool.pdm] distribution = "true" [tool.pdm.build] includes = ["b2"] [tool.pdm.version] source = "scm" [tool.pdm.scripts] assert_prod_python = "pdm run python -c 'import sys; assert sys.version_info >= (3, 11)'" lock_prod_no_cross_platform = "pdm lock --lockfile pdm.prod.lock --group full --group test --strategy no_cross_platform" lock_bundle = {composite=["assert_prod_python", "lock_prod_no_cross_platform"]} [tool.pdm.dev-dependencies] format = [ "yapf==0.27", "ruff==0.0.272", ] lint = [ "yapf==0.27", "ruff==0.0.272", "pytest==6.2.5", "liccheck==0.9.2", "setuptools>=60", # required by liccheck ] release = [ "towncrier==23.11.0; python_version>='3.8'", ] test = [ "coverage==7.2.7", "pexpect==4.8.0", "pytest==6.2.5", "pytest-cov==3.0.0", "pytest-forked==1.4.0", "pytest-xdist==2.5.0", "backoff==2.1.2", "more-itertools==8.13.0", ] bundle = [ "pyinstaller<6,>=5.13; python_version < \"3.13\"", "pyinstaller-hooks-contrib>=2023.6", "patchelf-wrapper==1.2.0; platform_system == \"Linux\"", "staticx~=0.13.9; platform_system == \"Linux\"", ] B2_Command_Line_Tool-3.19.1/setup.cfg000066400000000000000000000004061461201031300172620ustar00rootroot00000000000000[yapf] based_on_style=facebook COLUMN_LIMIT=100 SPACE_BETWEEN_ENDING_COMMA_AND_CLOSING_BRACKET=False SPLIT_PENALTY_AFTER_OPENING_BRACKET=0 [tool:pytest] markers = require_secrets: mark a test as requiring secrets such as API keys [coverage:run] branch=trueB2_Command_Line_Tool-3.19.1/test/000077500000000000000000000000001461201031300164205ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/__init__.py000066400000000000000000000004331461201031300205310ustar00rootroot00000000000000###################################################################### # # File: test/__init__.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### B2_Command_Line_Tool-3.19.1/test/conftest.py000066400000000000000000000040721461201031300206220ustar00rootroot00000000000000###################################################################### # # File: test/conftest.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import sys import pytest from b2._internal._utils.python_compat import removeprefix @pytest.hookimpl def pytest_configure(config): config.addinivalue_line( "markers", "apiver(from_ver, to_ver): run tests only on certain apiver versions", ) @pytest.fixture(scope='session') def apiver(request): """Get apiver as a v-prefixed string, e.g. "v2".""" return removeprefix(request.config.getoption('--cli', '').lstrip('_'), "b2") or None @pytest.fixture(scope='session') def apiver_int(apiver) -> int: return int(apiver[1:]) if apiver else -1 @pytest.fixture(autouse=True) def run_on_apiver_handler(request, apiver_int): """ Auto-fixture that allows skipping tests based on the CLI apiver versions. Usage: @pytest.mark.apiver(1, 3) def test_foo(): # Test is run only for versions 1 and 3 ... @pytest.mark.apiver(from_ver=2, to_ver=5) def test_bar(): # Test is run only for versions 2, 3, 4 and 5 ... Note that it requires the `cli_int_version` fixture to be defined. Both unit tests and integration tests handle it a little bit different, thus two different fixtures are provided. """ node = request.node.get_closest_marker('apiver') if not node: return if not node.args and not node.kwargs: return assert apiver_int >= 0, 'apiver_int fixture is not defined' if node.args: if apiver_int in node.args: # Run the test. return if node.kwargs: from_ver = node.kwargs.get('from_ver', 0) to_ver = node.kwargs.get('to_ver', sys.maxsize) if from_ver <= apiver_int <= to_ver: # Run the test. return pytest.skip('Not supported on this apiver version') B2_Command_Line_Tool-3.19.1/test/helpers.py000066400000000000000000000014521461201031300204360ustar00rootroot00000000000000###################################################################### # # File: test/helpers.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import platform import pytest _MISSING = object() def skip_on_windows(*args, reason='Not supported on Windows', **kwargs): return pytest.mark.skipif( platform.system() == 'Windows', reason=reason, )(*args, **kwargs) def b2_uri_args_v3(bucket_name, path=_MISSING): if path is _MISSING: return [bucket_name] else: return [bucket_name, path] def b2_uri_args_v4(bucket_name, path=_MISSING): if path is _MISSING: path = '' return [f'b2://{bucket_name}/{path}'] B2_Command_Line_Tool-3.19.1/test/integration/000077500000000000000000000000001461201031300207435ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/integration/__init__.py000066400000000000000000000010101461201031300230440ustar00rootroot00000000000000###################################################################### # # File: test/integration/__init__.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """ B2 CLI integrations tests This package contains tests that require interaction with remote server. Integration tests should be runnable against any arbitrary command implementing `b2` CLI, supplied via `--sut` flag. """ B2_Command_Line_Tool-3.19.1/test/integration/cleanup_buckets.py000066400000000000000000000010341461201031300244620ustar00rootroot00000000000000###################################################################### # # File: test/integration/cleanup_buckets.py # # Copyright 2022 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### def test_cleanup_buckets(b2_api): # this is not a test, but it is intended to be called # via pytest because it reuses fixtures which have everything # set up pass # b2_api calls b2_api.clean_buckets() in its finalizer B2_Command_Line_Tool-3.19.1/test/integration/conftest.py000077500000000000000000000277201461201031300231550ustar00rootroot00000000000000###################################################################### # # File: test/integration/conftest.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import logging import os import pathlib import re import subprocess import sys import tempfile from os import environ, path from tempfile import TemporaryDirectory import pytest from b2sdk.v2 import B2_ACCOUNT_INFO_ENV_VAR, XDG_CONFIG_HOME_ENV_VAR, Bucket from b2._internal.version_listing import ( CLI_VERSIONS, LATEST_STABLE_VERSION, UNSTABLE_CLI_VERSION, get_int_version, ) from ..helpers import b2_uri_args_v3, b2_uri_args_v4 from .helpers import NODE_DESCRIPTION, RNG_SEED, Api, CommandLine, bucket_name_part, random_token logger = logging.getLogger(__name__) GENERAL_BUCKET_NAME_PREFIX = 'clitst' TEMPDIR = tempfile.gettempdir() ROOT_PATH = pathlib.Path(__file__).parent.parent.parent @pytest.fixture(scope='session', autouse=True) def summary_notes(request, worker_id): capmanager = request.config.pluginmanager.getplugin("capturemanager") with capmanager.global_and_fixture_disabled(): log_handler = logging.StreamHandler(sys.stderr) log_fmt = logging.Formatter(f'{worker_id} %(asctime)s %(levelname).1s %(message)s') log_handler.setFormatter(log_fmt) logger.addHandler(log_handler) class Notes: def append(self, note): logger.info(note) return Notes() @pytest.fixture(scope='session', autouse=True) def node_stats(summary_notes): summary_notes.append(f"NODE={NODE_DESCRIPTION} seed={RNG_SEED}") @pytest.hookimpl def pytest_addoption(parser): parser.addoption( '--sut', default=f'{sys.executable} -m b2._internal.{UNSTABLE_CLI_VERSION}', help='Path to the System Under Test', ) parser.addoption( '--env-file-cmd-placeholder', default=None, help=( 'If specified, all occurrences of this string in `--sut` will be substituted with a ' 'path to a tmp file containing env vars to be used when running commands in tests. Useful ' 'for docker.' ) ) parser.addoption( '--as_version', default=None, help='Force running tests as a particular version of the CLI, ' 'useful if version cannot be determined easily from the executable', ) parser.addoption('--cleanup', action='store_true', help='Perform full cleanup at exit') def get_raw_cli_int_version(config) -> int | None: forced_version = config.getoption('--as_version') if forced_version: return int(forced_version) executable = config.getoption('--sut') # If the executable contains anything that looks like a proper version, we can try to pick it up. versions_list = '|'.join(CLI_VERSIONS) versions_match = re.search(rf'({versions_list})', executable) if versions_match: return get_int_version(versions_match.group(1)) return None def get_cli_int_version(config) -> int: return get_raw_cli_int_version(config) or get_int_version(LATEST_STABLE_VERSION) @pytest.fixture(scope='session') def apiver_int(request): return get_cli_int_version(request.config) @pytest.fixture(scope='session') def apiver(apiver_int): return f"v{apiver_int}" @pytest.hookimpl def pytest_report_header(config): cli_version = get_cli_int_version(config) return f'b2 apiver: {cli_version}' @pytest.fixture(scope='session') def cli_version(request) -> str: """ Get CLI version name, i.e. b2v3, _b2v4, etc. """ # The default stable version could be provided directly as e.g.: b2v3, but also indirectly as b2. # In case there is no direct version, we return the default binary name instead. raw_cli_version = get_raw_cli_int_version(request.config) if raw_cli_version is None: return 'b2' for version in CLI_VERSIONS: if get_int_version(version) == raw_cli_version: return version raise pytest.UsageError(f'Unknown CLI version: {raw_cli_version}') @pytest.fixture(scope='session') def application_key() -> str: key = environ.get('B2_TEST_APPLICATION_KEY') assert application_key, 'B2_TEST_APPLICATION_KEY is not set' yield key @pytest.fixture(scope='session') def application_key_id() -> str: key_id = environ.get('B2_TEST_APPLICATION_KEY_ID') assert key_id, 'B2_TEST_APPLICATION_KEY_ID is not set' yield key_id @pytest.fixture(scope='session') def realm() -> str: yield environ.get('B2_TEST_ENVIRONMENT', 'production') @pytest.fixture def bucket(bucket_factory) -> Bucket: return bucket_factory() @pytest.fixture def bucket_factory(b2_api, schedule_bucket_cleanup): def create_bucket(**kwargs): new_bucket = b2_api.create_bucket(**kwargs) schedule_bucket_cleanup(new_bucket.name, new_bucket.bucket_dict) return new_bucket yield create_bucket @pytest.fixture(scope='function') def bucket_name(bucket) -> str: yield bucket.name @pytest.fixture(scope='function') def file_name(bucket) -> str: file_ = bucket.upload_bytes(b'test_file', f'{random_token(8)}.txt') yield file_.file_name @pytest.fixture(scope='function') # , autouse=True) def debug_print_buckets(b2_api): print('-' * 30) print('Buckets before test ' + environ['PYTEST_CURRENT_TEST']) num_buckets = b2_api.count_and_print_buckets() print('-' * 30) try: yield finally: print('-' * 30) print('Buckets after test ' + environ['PYTEST_CURRENT_TEST']) delta = b2_api.count_and_print_buckets() - num_buckets print(f'DELTA: {delta}') print('-' * 30) @pytest.fixture(scope='session') def this_run_bucket_name_prefix() -> str: yield GENERAL_BUCKET_NAME_PREFIX + bucket_name_part(8) @pytest.fixture(scope='session') def monkeysession(): with pytest.MonkeyPatch.context() as mp: yield mp @pytest.fixture(scope='session', autouse=True) def auto_change_account_info_dir(monkeysession) -> str: """ Automatically for the whole testing: 1) temporary remove B2_APPLICATION_KEY and B2_APPLICATION_KEY_ID from environment 2) create a temporary directory for storing account info database 3) set B2_ACCOUNT_INFO_ENV_VAR to point to the temporary account info file """ monkeysession.delenv('B2_APPLICATION_KEY_ID', raising=False) monkeysession.delenv('B2_APPLICATION_KEY', raising=False) # make b2sdk use temp dir for storing default & per-profile account information with TemporaryDirectory() as temp_dir: monkeysession.setenv(B2_ACCOUNT_INFO_ENV_VAR, path.join(temp_dir, '.b2_account_info')) monkeysession.setenv(XDG_CONFIG_HOME_ENV_VAR, temp_dir) yield temp_dir @pytest.fixture(scope='session') def b2_api( application_key_id, application_key, realm, this_run_bucket_name_prefix, auto_change_account_info_dir, summary_notes, ) -> Api: api = Api( application_key_id, application_key, realm, general_bucket_name_prefix=GENERAL_BUCKET_NAME_PREFIX, this_run_bucket_name_prefix=this_run_bucket_name_prefix, ) yield api api.clean_buckets() # showing account_id in the logs is safe; so we explicitly prevent it from being redacted summary_notes.append(f"B2 Account ID: {api.account_id[:1]!r}{api.account_id[1:]!r}") summary_notes.append(f"Buckets names used during this tests: {api.bucket_name_log!r}") @pytest.fixture(scope='module') def global_b2_tool( request, application_key_id, application_key, realm, this_run_bucket_name_prefix, b2_api, auto_change_account_info_dir, b2_uri_args ) -> CommandLine: tool = CommandLine( request.config.getoption('--sut'), application_key_id, application_key, realm, this_run_bucket_name_prefix, request.config.getoption('--env-file-cmd-placeholder'), api_wrapper=b2_api, b2_uri_args=b2_uri_args, ) tool.reauthorize(check_key_capabilities=True) # reauthorize for the first time (with check) yield tool @pytest.fixture(scope='function') def b2_tool(global_b2_tool): """Automatically reauthorized b2_tool for each test (without check)""" global_b2_tool.reauthorize(check_key_capabilities=False) return global_b2_tool @pytest.fixture def account_info_file() -> pathlib.Path: return pathlib.Path(os.environ[B2_ACCOUNT_INFO_ENV_VAR]).expanduser() @pytest.fixture def schedule_bucket_cleanup(global_b2_tool): """ Explicitly ask for buckets cleanup after the test This should be only used when testing `create-bucket` command; otherwise use `bucket_factory` fixture. """ buckets_to_clean = {} def add_bucket_to_cleanup(bucket_name, bucket_dict: dict | None = None): buckets_to_clean[bucket_name] = bucket_dict yield add_bucket_to_cleanup if buckets_to_clean: global_b2_tool.reauthorize( check_key_capabilities=False ) # test may have mangled authorization global_b2_tool.cleanup_buckets(buckets_to_clean) @pytest.fixture(autouse=True, scope='session') def sample_filepath(): """Copy the README.md file to /tmp so that docker tests can access it""" tmp_readme = pathlib.Path(TEMPDIR) / 'README.md' if not tmp_readme.exists(): tmp_readme.write_text((ROOT_PATH / 'README.md').read_text()) return tmp_readme @pytest.fixture(autouse=True, scope='session') def sample_file(sample_filepath): return str(sample_filepath) @pytest.fixture(scope='session') def is_running_on_docker(pytestconfig): return pytestconfig.getoption('--sut').startswith('docker') SECRET_FIXTURES = {'application_key', 'application_key_id'} @pytest.fixture(scope="session") def homedir(tmp_path_factory): yield tmp_path_factory.mktemp("test_homedir") @pytest.fixture(scope="session") def b2_in_path(tmp_path_factory): """ Create a dummy b2 executable in a temporary directory and add it to PATH. This allows us to test the b2 command from shell level even if tested `b2` package was not installed. """ tempdir = tmp_path_factory.mktemp("temp_bin") temp_executable = tempdir / "b2" with open(temp_executable, "w") as f: f.write( f"#!{sys.executable}\n" "import sys\n" f"sys.path.insert(0, {os.getcwd()!r})\n" # ensure relative imports work even if command is run in different directory "from b2.console_tool import main\n" "main()\n" ) temp_executable.chmod(0o700) original_path = os.environ["PATH"] new_path = f"{tempdir}:{original_path}" yield new_path @pytest.fixture(scope="module") def env(b2_in_path, homedir, monkeysession, is_running_on_docker): """Get ENV for running b2 command from shell level.""" if not is_running_on_docker: monkeysession.setenv('PATH', b2_in_path) monkeysession.setenv('HOME', str(homedir)) monkeysession.setenv('SHELL', "/bin/bash") # fix for running under github actions yield os.environ @pytest.fixture def bash_runner(env): """Run command in bash shell.""" def run_command(command: str): try: return subprocess.run( ["/bin/bash", "-c", command], capture_output=True, check=True, env=env, text=True, ) except subprocess.CalledProcessError as e: print(f"Command {command!r} failed with exit code {e.returncode}") print(e.stdout) print(e.stderr, file=sys.stderr) raise return run_command def pytest_collection_modifyitems(items): """ Add 'require_secrets' marker to all tests that use secrets. """ for item in items: if SECRET_FIXTURES & set(getattr(item, 'fixturenames', ())): item.add_marker('require_secrets') @pytest.fixture(scope='module') def b2_uri_args(apiver_int): if apiver_int >= 4: return b2_uri_args_v4 else: return b2_uri_args_v3 B2_Command_Line_Tool-3.19.1/test/integration/helpers.py000077500000000000000000000516301461201031300227670ustar00rootroot00000000000000###################################################################### # # File: test/integration/helpers.py # # Copyright 2022 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import dataclasses import json import logging import os import pathlib import platform import random import re import secrets import shutil import string import subprocess import sys import threading import time import warnings from dataclasses import dataclass from datetime import datetime, timedelta from os import environ, linesep from pathlib import Path from tempfile import mkdtemp, mktemp import backoff from b2sdk.v2 import ( ALL_CAPABILITIES, BUCKET_NAME_CHARS_UNIQ, BUCKET_NAME_LENGTH_RANGE, NO_RETENTION_FILE_SETTING, B2Api, Bucket, EncryptionAlgorithm, EncryptionKey, EncryptionMode, EncryptionSetting, InMemoryAccountInfo, InMemoryCache, LegalHold, RetentionMode, fix_windows_path_limit, ) from b2sdk.v2.exception import ( BadRequest, BucketIdNotFound, FileNotPresent, TooManyRequests, v3BucketIdNotFound, ) from b2._internal.console_tool import Command, current_time_millis logger = logging.getLogger(__name__) # A large period is set here to avoid issues related to clock skew or other time-related issues under CI BUCKET_CLEANUP_PERIOD_MILLIS = timedelta(days=1).total_seconds() * 1000 ONE_HOUR_MILLIS = 60 * 60 * 1000 ONE_DAY_MILLIS = ONE_HOUR_MILLIS * 24 BUCKET_NAME_LENGTH = BUCKET_NAME_LENGTH_RANGE[1] BUCKET_CREATED_AT_MILLIS = 'created_at_millis' NODE_DESCRIPTION = f"{platform.node()}: {platform.platform()}" def get_seed(): """ Get seed for random number generator. GH Actions machines seem to offer a very limited entropy pool """ return b''.join( ( secrets.token_bytes(32), str(time.time_ns()).encode(), NODE_DESCRIPTION.encode(), str(os.getpid()).encode(), # needed due to pytest-xdist str(environ).encode('utf8', errors='ignore' ), # especially helpful under GitHub (and similar) CI ) ) RNG = random.Random(get_seed()) RNG_SEED = RNG.randint(0, 2 << 31) RNG_COUNTER = 0 if sys.version_info < (3, 9): RNG.randbytes = lambda n: RNG.getrandbits(n * 8).to_bytes(n, 'little') SSE_NONE = EncryptionSetting(mode=EncryptionMode.NONE,) SSE_B2_AES = EncryptionSetting( mode=EncryptionMode.SSE_B2, algorithm=EncryptionAlgorithm.AES256, ) _SSE_KEY = RNG.randbytes(32) SSE_C_AES = EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, key=EncryptionKey(secret=_SSE_KEY, key_id='user-generated-key-id') ) SSE_C_AES_2 = EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, key=EncryptionKey(secret=_SSE_KEY, key_id='another-user-generated-key-id') ) def random_token(length: int, chars=string.ascii_letters) -> str: return ''.join(RNG.choice(chars) for _ in range(length)) def bucket_name_part(length: int) -> str: assert length >= 1 global RNG_COUNTER RNG_COUNTER += 1 name_part = random_token(length, BUCKET_NAME_CHARS_UNIQ) logger.info('RNG_SEED: %s', RNG_SEED) logger.info('RNG_COUNTER: %i, length: %i', RNG_COUNTER, length) logger.info('name_part: %s', name_part) return name_part @dataclass class Api: account_id: str application_key: str realm: str general_bucket_name_prefix: str this_run_bucket_name_prefix: str api: B2Api = None bucket_name_log: list[str] = dataclasses.field(default_factory=list) def __post_init__(self): info = InMemoryAccountInfo() cache = InMemoryCache() self.api = B2Api(info, cache=cache) self.api.authorize_account(self.realm, self.account_id, self.application_key) assert BUCKET_NAME_LENGTH - len( self.this_run_bucket_name_prefix ) > 5, self.this_run_bucket_name_prefix def new_bucket_name(self) -> str: bucket_name = self.this_run_bucket_name_prefix + bucket_name_part( BUCKET_NAME_LENGTH - len(self.this_run_bucket_name_prefix) ) self.bucket_name_log.append(bucket_name) return bucket_name def new_bucket_info(self) -> dict: return { BUCKET_CREATED_AT_MILLIS: str(current_time_millis()), "created_by": NODE_DESCRIPTION, } def create_bucket(self, bucket_type: str = 'allPublic', **kwargs) -> Bucket: bucket_name = self.new_bucket_name() return self.api.create_bucket( bucket_name, bucket_type=bucket_type, bucket_info=self.new_bucket_info(), **kwargs, ) def _should_remove_bucket(self, bucket: Bucket) -> tuple[bool, str]: if bucket.name.startswith(self.this_run_bucket_name_prefix): return True, 'it is a bucket for this very run' if bucket.name.startswith(self.general_bucket_name_prefix): if BUCKET_CREATED_AT_MILLIS in bucket.bucket_info: delete_older_than = current_time_millis() - BUCKET_CLEANUP_PERIOD_MILLIS this_bucket_creation_time = int(bucket.bucket_info[BUCKET_CREATED_AT_MILLIS]) if this_bucket_creation_time < delete_older_than: return True, f"this_bucket_creation_time={this_bucket_creation_time} < delete_older_than={delete_older_than}" return False, f"this_bucket_creation_time={this_bucket_creation_time} >= delete_older_than={delete_older_than}" else: return True, 'undefined ' + BUCKET_CREATED_AT_MILLIS return False, f'does not start with {self.general_bucket_name_prefix!r}' def clean_buckets(self, quick=False): # even with use_cache=True, if cache is empty API call will be made buckets = self.api.list_buckets(use_cache=quick) print('Total bucket count:', len(buckets)) remaining_buckets = [] for bucket in buckets: should_remove, why = self._should_remove_bucket(bucket) if not should_remove: print(f'Skipping bucket removal {bucket.name!r} because {why}') remaining_buckets.append(bucket) continue print('Trying to remove bucket:', bucket.name, 'because', why) try: self.clean_bucket(bucket) except BucketIdNotFound: print(f'It seems that bucket {bucket.name} has already been removed') print('Total bucket count after cleanup:', len(remaining_buckets)) for bucket in remaining_buckets: print(bucket) @backoff.on_exception( backoff.expo, TooManyRequests, max_tries=8, ) def clean_bucket(self, bucket: Bucket | str): if isinstance(bucket, str): bucket = self.api.get_bucket_by_name(bucket) # try optimistic bucket removal first, since it is completely free (as opposed to `ls` call) try: return self.api.delete_bucket(bucket) except (BucketIdNotFound, v3BucketIdNotFound): return # bucket was already removed except BadRequest as exc: assert exc.code == 'cannot_delete_non_empty_bucket' files_leftover = False file_versions = bucket.ls(latest_only=False, recursive=True) for file_version_info, _ in file_versions: if file_version_info.file_retention: if file_version_info.file_retention.mode == RetentionMode.GOVERNANCE: print('Removing retention from file version:', file_version_info.id_) self.api.update_file_retention( file_version_info.id_, file_version_info.file_name, NO_RETENTION_FILE_SETTING, True ) elif file_version_info.file_retention.mode == RetentionMode.COMPLIANCE: if file_version_info.file_retention.retain_until > current_time_millis(): # yapf: disable print( f'File version: {file_version_info.id_} cannot be removed due to compliance mode retention' ) files_leftover = True continue elif file_version_info.file_retention.mode == RetentionMode.NONE: pass else: raise ValueError( f'Unknown retention mode: {file_version_info.file_retention.mode}' ) if file_version_info.legal_hold.is_on(): print('Removing legal hold from file version:', file_version_info.id_) self.api.update_file_legal_hold( file_version_info.id_, file_version_info.file_name, LegalHold.OFF ) print('Removing file version:', file_version_info.id_) try: self.api.delete_file_version(file_version_info.id_, file_version_info.file_name) except FileNotPresent: print( f'It seems that file version {file_version_info.id_} has already been removed' ) if files_leftover: print('Unable to remove bucket because some retained files remain') else: print('Removing bucket:', bucket.name) try: self.api.delete_bucket(bucket) except BucketIdNotFound: print(f'It seems that bucket {bucket.name} has already been removed') print() def count_and_print_buckets(self) -> int: buckets = self.api.list_buckets() count = len(buckets) print(f'Total bucket count at {datetime.now()}: {count}') for i, bucket in enumerate(buckets, start=1): print(f'- {i}\t{bucket.name} [{bucket.id_}]') return count def print_text_indented(text): """ Prints text that may include weird characters, indented four spaces. """ for line in text.split(linesep): Command._print_helper(sys.stdout, sys.stdout.encoding, ' ', repr(line)[1:-1]) def print_output(status, stdout, stderr): print(' status:', status) if stdout != '': print(' stdout:') print_text_indented(stdout) if stderr != '': print(' stderr:') print_text_indented(stderr) print() def serialize_enc_settings(value): if not isinstance(value, EncryptionSetting): raise TypeError return value.as_dict() def print_json_indented(value): """ Converts the value to JSON, then prints it. """ print_text_indented( json.dumps( value, indent=4, sort_keys=True, ensure_ascii=True, default=serialize_enc_settings ) ) def remove_warnings(text): return linesep.join(line for line in text.split(linesep) if 'DeprecationWarning' not in line) class StringReader: def __init__(self): self.string = None def get_string(self): return self.string def read_from(self, f): try: self.string = f.read() except Exception as e: print(e) self.string = str(e) def should_equal(expected, actual): print(' expected:') print_json_indented(expected) print(' actual:') print_json_indented(actual) assert expected == actual print() class CommandLine: EXPECTED_STDERR_PATTERNS = [ re.compile(r'^Using https?://[\w.]+$'), # account auth re.compile(r'.*B/s]$', re.DOTALL), # progress bar re.compile(r'^\r?$'), # empty line re.compile( r'Encrypting file\(s\) with SSE-C without providing key id. ' r'Set B2_DESTINATION_SSE_C_KEY_ID to allow key identification' ), re.compile( r'WARNING: Unable to print unicode. Encoding for stdout is: ' r'\'[a-zA-Z0-9]+\'' ), # windows-bundle tests on CI use cp1252 re.compile(r'Trying to print: .*'), ] def __init__( self, command, account_id, application_key, realm, bucket_name_prefix, env_file_cmd_placeholder, api_wrapper: Api, b2_uri_args, ): self.command = command self.account_id = account_id self.application_key = application_key self.realm = realm self.bucket_name_prefix = bucket_name_prefix self.env_file_cmd_placeholder = env_file_cmd_placeholder self.api_wrapper = api_wrapper self.b2_uri_args = b2_uri_args def generate_bucket_name(self): return self.api_wrapper.new_bucket_name() def get_bucket_info_args(self) -> tuple[str, str]: return '--bucket-info', json.dumps(self.api_wrapper.new_bucket_info(), ensure_ascii=True) def run_command(self, args, additional_env: dict | None = None): """ Runs the command with the given arguments, returns a tuple in form of (succeeded, stdout) """ status, stdout, stderr = self.execute(args, additional_env) return status == 0 and stderr == '', stdout def should_succeed( self, args: list[str] | None, expected_pattern: str | None = None, additional_env: dict | None = None, expected_stderr_pattern: str | re.Pattern = None, ) -> str: """ Runs the command-line with the given arguments. Raises an exception if there was an error; otherwise, returns the stdout of the command as string. """ status, stdout, stderr = self.execute(args, additional_env) assert status == 0, f'FAILED with status {status}, stderr={stderr}' if expected_stderr_pattern: assert expected_stderr_pattern.search(stderr), \ f'stderr did not match pattern="{expected_stderr_pattern}", stderr="{stderr}"' elif stderr != '': for line in (s.strip() for s in stderr.split(os.linesep)): assert any(p.match(line) for p in self.EXPECTED_STDERR_PATTERNS), \ f'Unexpected stderr line: {repr(line)}' if expected_pattern is not None: assert re.search(expected_pattern, stdout), \ f'did not match pattern="{expected_pattern}", stdout="{stdout}"' return stdout.replace(os.linesep, '\n') @classmethod def prepare_env(self, additional_env: dict | None = None): environ['PYTHONIOENCODING'] = 'utf-8' env = environ.copy() env.update(additional_env or {}) return env def parse_command(self, env): """ Split `self.command` into a list of strings. If necessary, dump the env vars to a tmp file and substitute one the command's argument with that file's path. """ command = self.command.split(' ') if self.env_file_cmd_placeholder: if any('\n' in var_value for var_value in env.values()): raise ValueError( 'Env vars containing new line characters will break env file format' ) env_file_path = mktemp() pathlib.Path(env_file_path).write_text('\n'.join(f'{k}={v}' for k, v in env.items())) command = [ (c if c != self.env_file_cmd_placeholder else env_file_path) for c in command ] return command def execute( self, args: list[str | Path | int] | None = None, additional_env: dict | None = None, ): """ :param cmd: a command to run :param args: command's arguments :param additional_env: environment variables to pass to the command, overwriting parent process ones :return: (status, stdout, stderr) """ # We'll run the b2 command-line by running the b2 module from # the current directory or provided as parameter env = self.prepare_env(additional_env) command = self.parse_command(env) args: list[str] = [str(arg) for arg in args] if args else [] command.extend(args) print('Running:', ' '.join(command)) stdout = StringReader() stderr = StringReader() p = subprocess.Popen( command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=platform.system() != 'Windows', env=env, ) p.stdin.close() reader1 = threading.Thread(target=stdout.read_from, args=[p.stdout]) reader1.start() reader2 = threading.Thread(target=stderr.read_from, args=[p.stderr]) reader2.start() p.wait() reader1.join() reader2.join() stdout_decoded = remove_warnings(stdout.get_string().decode('utf-8', errors='replace')) stderr_decoded = remove_warnings(stderr.get_string().decode('utf-8', errors='replace')) print_output(p.returncode, stdout_decoded, stderr_decoded) return p.returncode, stdout_decoded, stderr_decoded def should_succeed_json(self, args, additional_env: dict | None = None, **kwargs): """ Runs the command-line with the given arguments. Raises an exception if there was an error; otherwise, treats the stdout as JSON and returns the data in it. """ result = self.should_succeed(args, additional_env=additional_env, **kwargs) try: loaded_result = json.loads(result) except json.JSONDecodeError: raise ValueError(f'{result} is not a valid json') return loaded_result def should_fail(self, args, expected_pattern, additional_env: dict | None = None): """ Runs the command-line with the given args, expecting the given pattern to appear in stderr. """ status, stdout, stderr = self.execute(args, additional_env) assert status != 0, 'ERROR: should have failed' assert re.search(expected_pattern, stdout + stderr), \ f'did not match pattern="{expected_pattern}", stdout="{stdout}", stderr="{stderr}"' def reauthorize(self, check_key_capabilities=False): """Clear and authorize again to the account.""" self.should_succeed(['clear-account']) self.should_succeed( [ 'authorize-account', '--environment', self.realm, self.account_id, self.application_key ] ) if check_key_capabilities: auth_dict = self.should_succeed_json(['get-account-info']) private_preview_caps = { 'readBucketNotifications', 'writeBucketNotifications', } missing_capabilities = set(ALL_CAPABILITIES) - { 'readBuckets', 'listAllBucketNames' } - private_preview_caps - set(auth_dict['allowed']['capabilities']) assert not missing_capabilities, f'it appears that the raw_api integration test is being run with a non-full key. Missing capabilities: {missing_capabilities}' def list_file_versions(self, bucket_name): return self.should_succeed_json( ['ls', '--json', '--recursive', '--versions', *self.b2_uri_args(bucket_name)] ) def cleanup_buckets(self, buckets: dict[str, dict | None]) -> None: for bucket_name, bucket_dict in buckets.items(): self.cleanup_bucket(bucket_name, bucket_dict) def cleanup_bucket(self, bucket_name: str, bucket_dict: dict | None = None) -> None: """ Cleanup bucket Since bucket was being handled by the tool, it is safe to assume it is cached in its cache and we don't need to call C class API list_buckets endpoint to get it. """ if not bucket_dict: try: bucket_dict = self.should_succeed_json(['get-bucket', bucket_name]) except (ValueError, AssertionError): # bucket doesn't exist return bucket = self.api_wrapper.api.BUCKET_CLASS( api=self.api_wrapper.api, id_=bucket_dict['bucketId'], name=bucket_name, ) self.api_wrapper.clean_bucket(bucket) class TempDir: def __init__(self): warnings.warn( 'TempDir is deprecated; use pytest tmp_path fixture instead', DeprecationWarning, stacklevel=2, ) self.dirpath = None def __enter__(self): self.dirpath = mkdtemp() return Path(self.dirpath) def __exit__(self, exc_type, exc_val, exc_tb): shutil.rmtree(fix_windows_path_limit(self.dirpath)) def read_file(path: str | Path): with open(path, 'rb') as f: return f.read() def write_file(path: str | Path, contents: bytes): with open(path, 'wb') as f: f.write(contents) def file_mod_time_millis(path: str | Path) -> int: return int(os.path.getmtime(path) * 1000) def set_file_mod_time_millis(path: str | Path, time): os.utime(path, (os.path.getatime(path), time / 1000)) def random_hex(length): return ''.join(RNG.choice('0123456789abcdef') for _ in range(length)) B2_Command_Line_Tool-3.19.1/test/integration/test_autocomplete.py000066400000000000000000000062151461201031300250610ustar00rootroot00000000000000###################################################################### # # File: test/integration/test_autocomplete.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import sys from test.helpers import skip_on_windows import pexpect import pytest TIMEOUT = 120 # CI can be slow at times when parallelization is extreme BASHRC_CONTENT = """\ # ~/.bashrc dummy file echo "Just testing if we don't replace existing script" > /dev/null # >>> just a test section >>> # regardless what is in there already # <<< just a test section <<< """ @pytest.fixture(scope="session") def bashrc(homedir): bashrc_path = (homedir / '.bashrc') bashrc_path.write_text(BASHRC_CONTENT) yield bashrc_path @pytest.fixture(scope="module") def cli_command(request) -> str: return request.config.getoption('--sut') @pytest.fixture(scope="module") def autocomplete_installed(env, homedir, bashrc, cli_version, cli_command, is_running_on_docker): if is_running_on_docker: pytest.skip('Not supported on Docker') shell = pexpect.spawn( f'bash -i -c "{cli_command} install-autocomplete"', env=env, logfile=sys.stderr.buffer ) try: shell.expect_exact('Autocomplete successfully installed for bash', timeout=TIMEOUT) finally: shell.close() shell.wait() assert (homedir / '.bash_completion.d' / cli_version).is_file() assert bashrc.read_text().startswith(BASHRC_CONTENT) @pytest.fixture def shell(env): shell = pexpect.spawn('bash -i', env=env, maxread=1000) shell.setwinsize(100, 100) # required to see all suggestions in tests yield shell shell.close() @skip_on_windows def test_autocomplete_b2_commands(autocomplete_installed, is_running_on_docker, shell, cli_version): if is_running_on_docker: pytest.skip('Not supported on Docker') shell.send(f'{cli_version} \t\t') shell.expect_exact(["authorize-account", "download-file", "get-bucket"], timeout=TIMEOUT) @skip_on_windows def test_autocomplete_b2_only_matching_commands( autocomplete_installed, is_running_on_docker, shell, cli_version ): if is_running_on_docker: pytest.skip('Not supported on Docker') shell.send(f'{cli_version} delete-\t\t') shell.expect_exact("file", timeout=TIMEOUT) # common part of remaining cmds is autocompleted with pytest.raises(pexpect.exceptions.TIMEOUT): # no other commands are suggested shell.expect_exact("get-bucket", timeout=0.5) @skip_on_windows def test_autocomplete_b2__download_file__b2uri( autocomplete_installed, shell, b2_tool, bucket_name, file_name, is_running_on_docker, cli_version, ): """Test that autocomplete suggests bucket names and file names.""" if is_running_on_docker: pytest.skip('Not supported on Docker') shell.send(f'{cli_version} download_file \t\t') shell.expect_exact("b2://", timeout=TIMEOUT) shell.send('b2://\t\t') shell.expect_exact(bucket_name, timeout=TIMEOUT) shell.send(f'{bucket_name}/\t\t') shell.expect_exact(file_name, timeout=TIMEOUT) B2_Command_Line_Tool-3.19.1/test/integration/test_b2_command_line.py000077500000000000000000003367651461201031300254130ustar00rootroot00000000000000#!/usr/bin/env python3 ###################################################################### # # File: test/integration/test_b2_command_line.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from __future__ import annotations import base64 import contextlib import hashlib import itertools import json import os import os.path import pathlib import re import sys import time from pathlib import Path from tempfile import mkdtemp import pytest from b2sdk.v2 import ( B2_ACCOUNT_INFO_ENV_VAR, SSE_C_KEY_ID_FILE_INFO_KEY_NAME, UNKNOWN_FILE_RETENTION_SETTING, EncryptionMode, EncryptionSetting, FileRetentionSetting, LegalHold, RetentionMode, SqliteAccountInfo, fix_windows_path_limit, ) from b2sdk.v2.exception import MissingAccountData from b2._internal._cli.const import ( B2_APPLICATION_KEY_ENV_VAR, B2_APPLICATION_KEY_ID_ENV_VAR, B2_ENVIRONMENT_ENV_VAR, ) from b2._internal.console_tool import current_time_millis from ..helpers import skip_on_windows from .helpers import ( ONE_DAY_MILLIS, ONE_HOUR_MILLIS, SSE_B2_AES, SSE_C_AES, SSE_C_AES_2, SSE_NONE, TempDir, file_mod_time_millis, random_hex, read_file, set_file_mod_time_millis, should_equal, write_file, ) def test_authorize_account_via_params_saving_credentials( b2_tool, realm, application_key, application_key_id, account_info_file, ): """ When calling `authorize-account` and passing credentials as params, we want the credentials to be saved. """ b2_tool.should_succeed(['clear-account']) assert B2_APPLICATION_KEY_ID_ENV_VAR not in os.environ assert B2_APPLICATION_KEY_ENV_VAR not in os.environ b2_tool.should_succeed( ['authorize-account', '--environment', realm, application_key_id, application_key] ) assert account_info_file.exists() account_info = SqliteAccountInfo() assert account_info.get_application_key() == application_key assert account_info.get_application_key_id() == application_key_id def test_authorize_account_via_env_vars_saving_credentials( b2_tool, realm, application_key, application_key_id, account_info_file, ): """ When calling `authorize-account` and passing credentials via env vars, we still want the credentials to be saved. """ b2_tool.should_succeed(['clear-account']) assert B2_APPLICATION_KEY_ID_ENV_VAR not in os.environ assert B2_APPLICATION_KEY_ENV_VAR not in os.environ b2_tool.should_succeed( ['authorize-account'], additional_env={ B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, } ) assert account_info_file.exists() account_info = SqliteAccountInfo() assert account_info.get_application_key() == application_key assert account_info.get_application_key_id() == application_key_id def test_clear_account_with_env_vars( b2_tool, realm, application_key, application_key_id, account_info_file, ): """ When calling `clear-account` and passing credentials via env vars, we want the credentials to be removed from the file. """ assert account_info_file.exists() account_info = SqliteAccountInfo() assert account_info.get_application_key() == application_key assert account_info.get_application_key_id() == application_key_id b2_tool.should_succeed( ['clear-account'], additional_env={ B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, } ) assert account_info_file.exists() account_info = SqliteAccountInfo() with pytest.raises(MissingAccountData): account_info.get_application_key() with pytest.raises(MissingAccountData): account_info.get_application_key_id() @pytest.mark.apiver(to_ver=3) def test_command_with_env_vars_saving_credentials( b2_tool, realm, application_key, application_key_id, account_info_file, bucket_name, b2_uri_args, ): """ When calling any command other then `authorize-account` and passing credentials via env vars, we don't want them to be saved. """ b2_tool.should_succeed(['clear-account']) assert B2_APPLICATION_KEY_ID_ENV_VAR not in os.environ assert B2_APPLICATION_KEY_ENV_VAR not in os.environ b2_tool.should_succeed( ['ls', '--long', *b2_uri_args(bucket_name)], additional_env={ B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, } ) assert account_info_file.exists() account_info = SqliteAccountInfo() assert account_info.get_application_key() == application_key assert account_info.get_application_key_id() == application_key_id @pytest.mark.apiver(from_ver=4) def test_command_with_env_vars_not_saving_credentials( b2_tool, realm, application_key, application_key_id, account_info_file, bucket_name, b2_uri_args, ): """ When calling any command other then `authorize-account` and passing credentials via env vars, we don't want them to be saved. """ b2_tool.should_succeed(['clear-account']) assert B2_APPLICATION_KEY_ID_ENV_VAR not in os.environ assert B2_APPLICATION_KEY_ENV_VAR not in os.environ b2_tool.should_succeed( ['ls', '--long', *b2_uri_args(bucket_name)], additional_env={ B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, } ) assert account_info_file.exists() account_info = SqliteAccountInfo() with pytest.raises(MissingAccountData): account_info.get_application_key() with pytest.raises(MissingAccountData): account_info.get_application_key_id() @pytest.mark.apiver(from_ver=4) def test_command_with_env_vars_reusing_existing_account_info( b2_tool, realm, application_key, application_key_id, account_info_file, bucket_name, b2_uri_args, ): """ When calling any command with credentials passed via env vars, and the account info file already contains the same credentials, we want to use filesystem for storing cache, not the in-memory cache. """ assert B2_APPLICATION_KEY_ID_ENV_VAR not in os.environ assert B2_APPLICATION_KEY_ENV_VAR not in os.environ assert account_info_file.exists() account_info = SqliteAccountInfo() assert account_info.get_application_key() == application_key assert account_info.get_application_key_id() == application_key_id account_info.remove_bucket_name(bucket_name) assert account_info.get_bucket_id_or_none_from_bucket_name(bucket_name) is None b2_tool.should_succeed( ['ls', '--long', *b2_uri_args(bucket_name)], additional_env={ B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, } ) assert account_info_file.exists() account_info = SqliteAccountInfo() assert account_info.get_bucket_id_or_none_from_bucket_name(bucket_name) is not None @pytest.fixture def uploaded_sample_file(b2_tool, bucket_name, sample_filepath): return b2_tool.should_succeed_json( ['upload-file', '--quiet', bucket_name, str(sample_filepath), 'sample_file'] ) def test_download(b2_tool, bucket_name, sample_filepath, uploaded_sample_file, tmp_path): output_a = tmp_path / 'a' b2_tool.should_succeed( [ 'download-file', '--quiet', f"b2://{bucket_name}/{uploaded_sample_file['fileName']}", str(output_a) ] ) assert output_a.read_text() == sample_filepath.read_text() output_b = tmp_path / 'b' b2_tool.should_succeed( ['download-file', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", str(output_b)] ) assert output_b.read_text() == sample_filepath.read_text() def test_basic(b2_tool, bucket_name, sample_file, tmp_path, b2_uri_args): file_mod_time_str = str(file_mod_time_millis(sample_file)) file_data = read_file(sample_file) hex_sha1 = hashlib.sha1(file_data).hexdigest() list_of_buckets = b2_tool.should_succeed_json(['list-buckets', '--json']) should_equal( [bucket_name], [b['bucketName'] for b in list_of_buckets if b['bucketName'] == bucket_name] ) b2_tool.should_succeed(['upload-file', '--quiet', bucket_name, sample_file, 'a']) b2_tool.should_succeed(['ls', '--long', '--replication', *b2_uri_args(bucket_name)]) b2_tool.should_succeed(['upload-file', '--no-progress', bucket_name, sample_file, 'a']) b2_tool.should_succeed(['upload-file', '--no-progress', bucket_name, sample_file, 'b/1']) b2_tool.should_succeed(['upload-file', '--no-progress', bucket_name, sample_file, 'b/2']) b2_tool.should_succeed( [ 'upload-file', '--no-progress', '--sha1', hex_sha1, '--info', 'foo=bar=baz', '--info', 'color=blue', bucket_name, sample_file, 'c' ] ) b2_tool.should_fail( [ 'upload-file', '--no-progress', '--sha1', hex_sha1, '--info', 'foo-bar', '--info', 'color=blue', bucket_name, sample_file, 'c' ], r'ERROR: Bad file info: foo-bar' ) b2_tool.should_succeed( [ 'upload-file', '--no-progress', '--content-type', 'text/plain', bucket_name, sample_file, 'd' ] ) b2_tool.should_succeed(['upload-file', '--no-progress', bucket_name, sample_file, 'rm']) b2_tool.should_succeed(['upload-file', '--no-progress', bucket_name, sample_file, 'rm1']) # with_wildcard allows us to target a single file. rm will be removed, rm1 will be left alone b2_tool.should_succeed( ['rm', '--recursive', '--with-wildcard', *b2_uri_args(bucket_name, 'rm')] ) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', '--with-wildcard', *b2_uri_args(bucket_name, 'rm*')] ) should_equal(['rm1'], [f['fileName'] for f in list_of_files]) b2_tool.should_succeed( ['rm', '--recursive', '--with-wildcard', *b2_uri_args(bucket_name, 'rm1')] ) b2_tool.should_succeed(['download-file', '--quiet', f'b2://{bucket_name}/b/1', tmp_path / 'a']) b2_tool.should_succeed(['hide-file', bucket_name, 'c']) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', *b2_uri_args(bucket_name)] ) should_equal(['a', 'b/1', 'b/2', 'd'], [f['fileName'] for f in list_of_files]) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', '--versions', *b2_uri_args(bucket_name)] ) should_equal(['a', 'a', 'b/1', 'b/2', 'c', 'c', 'd'], [f['fileName'] for f in list_of_files]) should_equal( ['upload', 'upload', 'upload', 'upload', 'hide', 'upload', 'upload'], [f['action'] for f in list_of_files] ) first_a_version = list_of_files[0] first_c_version = list_of_files[4] second_c_version = list_of_files[5] list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', '--versions', *b2_uri_args(bucket_name, 'c')] ) should_equal([], [f['fileName'] for f in list_of_files]) b2_tool.should_succeed(['copy-file-by-id', first_a_version['fileId'], bucket_name, 'x']) b2_tool.should_succeed(['ls', *b2_uri_args(bucket_name)], '^a{0}b/{0}d{0}'.format(os.linesep)) # file_id, action, date, time, size(, replication), name b2_tool.should_succeed( ['ls', '--long', *b2_uri_args(bucket_name)], '^4_z.* upload .* {1} a{0}.* - .* b/{0}4_z.* upload .* {1} d{0}'.format( os.linesep, len(file_data) ) ) b2_tool.should_succeed( ['ls', '--long', '--replication', *b2_uri_args(bucket_name)], '^4_z.* upload .* {1} - a{0}.* - .* - b/{0}4_z.* upload .* {1} - d{0}'.format( os.linesep, len(file_data) ) ) b2_tool.should_succeed( ['ls', '--versions', *b2_uri_args(bucket_name)], f'^a{os.linesep}a{os.linesep}b/{os.linesep}c{os.linesep}c{os.linesep}d{os.linesep}' ) b2_tool.should_succeed( ['ls', *b2_uri_args(bucket_name, 'b')], f'^b/1{os.linesep}b/2{os.linesep}' ) b2_tool.should_succeed( ['ls', *b2_uri_args(bucket_name, 'b/')], f'^b/1{os.linesep}b/2{os.linesep}' ) file_info = b2_tool.should_succeed_json(['file-info', f"b2id://{second_c_version['fileId']}"]) expected_info = { 'color': 'blue', 'foo': 'bar=baz', 'src_last_modified_millis': file_mod_time_str } should_equal(expected_info, file_info['fileInfo']) b2_tool.should_succeed(['delete-file-version', 'c', first_c_version['fileId']]) b2_tool.should_succeed( ['ls', *b2_uri_args(bucket_name)], f'^a{os.linesep}b/{os.linesep}c{os.linesep}d{os.linesep}' ) b2_tool.should_succeed(['get-url', f"b2id://{second_c_version['fileId']}"]) b2_tool.should_succeed( ['get-url', f"b2://{bucket_name}/any-file-name"], '^https://.*/file/{}/{}\r?$'.format( bucket_name, 'any-file-name', ), ) # \r? is for Windows, as $ doesn't match \r\n @pytest.mark.apiver(from_ver=4) def test_ls_b2id(b2_tool, uploaded_sample_file): b2_tool.should_succeed( ['ls', f"b2id://{uploaded_sample_file['fileId']}"], expected_pattern=f"^{uploaded_sample_file['fileName']}", ) @pytest.mark.apiver(from_ver=4) def test_rm_b2id(b2_tool, bucket_name, uploaded_sample_file): # remove the file by id b2_tool.should_succeed(['rm', f"b2id://{uploaded_sample_file['fileId']}"]) # check that the file is gone b2_tool.should_succeed( ['ls', f'b2://{bucket_name}'], expected_pattern='^$', ) def test_debug_logs(b2_tool, is_running_on_docker, tmp_path): to_be_removed_bucket_name = b2_tool.generate_bucket_name() b2_tool.should_succeed( [ 'create-bucket', to_be_removed_bucket_name, 'allPublic', *b2_tool.get_bucket_info_args(), ], ) b2_tool.should_succeed(['delete-bucket', to_be_removed_bucket_name],) b2_tool.should_fail( ['delete-bucket', to_be_removed_bucket_name], re.compile(r'^ERROR: Bucket with id=\w* not found\s*$') ) # Check logging settings if not is_running_on_docker: # It's difficult to read the log in docker in CI b2_tool.should_fail( ['delete-bucket', to_be_removed_bucket_name, '--debug-logs'], re.compile(r'^ERROR: Bucket with id=\w* not found\s*$') ) stack_trace_in_log = r'Traceback \(most recent call last\):.*Bucket with id=\w* not found' # the two regexes below depend on log message from urllib3, which is not perfect, but this test needs to # check global logging settings stderr_regex = re.compile( r'DEBUG:urllib3.connectionpool:.* "POST /b2api/v2/b2_delete_bucket HTTP' r'.*' + stack_trace_in_log, re.DOTALL, ) log_file_regex = re.compile( r'urllib3.connectionpool\tDEBUG\t.* "POST /b2api/v2/b2_delete_bucket HTTP' r'.*' + stack_trace_in_log, re.DOTALL, ) with open('b2_cli.log') as logfile: log = logfile.read() assert re.search(log_file_regex, log), log os.remove('b2_cli.log') b2_tool.should_fail(['delete-bucket', to_be_removed_bucket_name, '--verbose'], stderr_regex) assert not os.path.exists('b2_cli.log') b2_tool.should_fail( ['delete-bucket', to_be_removed_bucket_name, '--verbose', '--debug-logs'], stderr_regex ) with open('b2_cli.log') as logfile: log = logfile.read() assert re.search(log_file_regex, log), log def test_bucket(b2_tool, bucket_name): rule = """{ "daysFromHidingToDeleting": 1, "daysFromUploadingToHiding": null, "fileNamePrefix": "" }""" output = b2_tool.should_succeed_json( [ 'update-bucket', '--lifecycle-rule', rule, bucket_name, 'allPublic', *b2_tool.get_bucket_info_args() ], ) ########## // doesn't happen on production, but messes up some tests \\ ########## for key in output['lifecycleRules'][0]: if key[8] == 'S' and len(key) == 47: del output['lifecycleRules'][0][key] break ########## \\ doesn't happen on production, but messes up some tests // ########## assert output["lifecycleRules"] == [ { "daysFromHidingToDeleting": 1, "daysFromUploadingToHiding": None, "fileNamePrefix": "" } ] def test_key_restrictions(b2_tool, bucket_name, sample_file, bucket_factory, b2_uri_args): # A single file for rm to fail on. b2_tool.should_succeed(['upload-file', '--no-progress', bucket_name, sample_file, 'test']) key_one_name = 'clt-testKey-01' + random_hex(6) created_key_stdout = b2_tool.should_succeed( [ 'create-key', key_one_name, 'listFiles,listBuckets,readFiles,writeKeys', ] ) key_one_id, key_one = created_key_stdout.split() b2_tool.should_succeed( ['authorize-account', '--environment', b2_tool.realm, key_one_id, key_one], ) b2_tool.should_succeed(['get-bucket', bucket_name],) second_bucket_name = bucket_factory().name b2_tool.should_succeed(['get-bucket', second_bucket_name],) key_two_name = 'clt-testKey-02' + random_hex(6) created_key_two_stdout = b2_tool.should_succeed( [ 'create-key', '--bucket', bucket_name, key_two_name, 'listFiles,listBuckets,readFiles', ] ) key_two_id, key_two = created_key_two_stdout.split() b2_tool.should_succeed( ['authorize-account', '--environment', b2_tool.realm, key_two_id, key_two], ) b2_tool.should_succeed(['get-bucket', bucket_name],) b2_tool.should_succeed(['ls', *b2_uri_args(bucket_name)],) # Capabilities can be listed in any order. While this regex doesn't confirm that all three are present, # in ensures that there are three in total. failed_bucket_err = r'Deletion of file "test" \([^\)]+\) failed: unauthorized for ' \ r'application key with capabilities ' \ r"'(.*listFiles.*|.*listBuckets.*|.*readFiles.*){3}', " \ r"restricted to bucket '%s' \(unauthorized\)" % bucket_name b2_tool.should_fail( ['rm', '--recursive', '--no-progress', *b2_uri_args(bucket_name)], failed_bucket_err ) failed_bucket_err = r'ERROR: Application key is restricted to bucket: ' + bucket_name b2_tool.should_fail(['get-bucket', second_bucket_name], failed_bucket_err) failed_list_files_err = r'ERROR: Application key is restricted to bucket: ' + bucket_name b2_tool.should_fail(['ls', *b2_uri_args(second_bucket_name)], failed_list_files_err) failed_list_files_err = r'ERROR: Application key is restricted to bucket: ' + bucket_name b2_tool.should_fail(['rm', *b2_uri_args(second_bucket_name)], failed_list_files_err) # reauthorize with more capabilities for clean up b2_tool.should_succeed( [ 'authorize-account', '--environment', b2_tool.realm, b2_tool.account_id, b2_tool.application_key ] ) b2_tool.should_succeed(['delete-key', key_one_id]) b2_tool.should_succeed(['delete-key', key_two_id]) def test_delete_bucket(b2_tool, bucket_name): b2_tool.should_succeed(['delete-bucket', bucket_name]) b2_tool.should_fail( ['delete-bucket', bucket_name], re.compile(r'^ERROR: Bucket with id=\w* not found\s*$') ) def test_rapid_bucket_operations(b2_tool): new_bucket_name = b2_tool.generate_bucket_name() bucket_info_args = b2_tool.get_bucket_info_args() # apparently server behaves erratically when we delete a bucket and recreate it right away b2_tool.should_succeed(['create-bucket', new_bucket_name, 'allPrivate', *bucket_info_args]) b2_tool.should_succeed(['update-bucket', new_bucket_name, 'allPublic']) b2_tool.should_succeed(['delete-bucket', new_bucket_name]) def test_account(b2_tool, cli_version, apiver_int, monkeypatch): with monkeypatch.context() as mp: account_info_file_path = os.path.join(mkdtemp(), 'b2_account_info') mp.setenv(B2_ACCOUNT_INFO_ENV_VAR, account_info_file_path) b2_tool.should_succeed(['clear-account']) bad_application_key = random_hex(len(b2_tool.application_key)) b2_tool.should_fail( ['authorize-account', b2_tool.account_id, bad_application_key], r'unauthorized' ) # this call doesn't use --environment on purpose, so that we check that it is non-mandatory b2_tool.should_succeed( [ 'authorize-account', '--environment', b2_tool.realm, b2_tool.account_id, b2_tool.application_key, ] ) # Testing (B2_APPLICATION_KEY, B2_APPLICATION_KEY_ID) for commands other than authorize-account with monkeypatch.context() as mp: account_info_file_path = os.path.join(mkdtemp(), 'b2_account_info') mp.setenv(B2_ACCOUNT_INFO_ENV_VAR, account_info_file_path) # first, let's make sure "create-bucket" doesn't work without auth data - i.e. that the sqlite file has been # successfully removed bucket_name = b2_tool.generate_bucket_name() b2_tool.should_fail( ['create-bucket', bucket_name, 'allPrivate'], r'ERROR: Missing account data: \'NoneType\' object is not subscriptable (\(key 0\) )? ' fr'Use: {cli_version}(\.(exe|EXE))? authorize-account or provide auth data with \'B2_APPLICATION_KEY_ID\' and ' r'\'B2_APPLICATION_KEY\' environment variables' ) with monkeypatch.context() as mp: account_info_file_path = os.path.join(mkdtemp(), 'b2_account_info') mp.setenv(B2_ACCOUNT_INFO_ENV_VAR, account_info_file_path) # then, let's see that auth data from env vars works os.environ['B2_APPLICATION_KEY'] = os.environ['B2_TEST_APPLICATION_KEY'] os.environ['B2_APPLICATION_KEY_ID'] = os.environ['B2_TEST_APPLICATION_KEY_ID'] os.environ['B2_ENVIRONMENT'] = b2_tool.realm bucket_name = b2_tool.generate_bucket_name() b2_tool.should_succeed( ['create-bucket', bucket_name, 'allPrivate', *b2_tool.get_bucket_info_args()] ) b2_tool.should_succeed(['delete-bucket', bucket_name]) if apiver_int >= 4: assert not os.path.exists( account_info_file_path ), 'sqlite file was created while it shouldn\'t' else: assert os.path.exists(account_info_file_path), 'sqlite file was not created' account_info = SqliteAccountInfo(account_info_file_path) assert account_info.get_application_key_id() == os.environ['B2_TEST_APPLICATION_KEY_ID'] assert account_info.get_application_key() == os.environ['B2_TEST_APPLICATION_KEY'] os.environ.pop('B2_APPLICATION_KEY') os.environ.pop('B2_APPLICATION_KEY_ID') # last, let's see that providing only one of the env vars results in a failure os.environ['B2_APPLICATION_KEY'] = os.environ['B2_TEST_APPLICATION_KEY'] b2_tool.should_fail( ['create-bucket', bucket_name, 'allPrivate'], r'Please provide both "B2_APPLICATION_KEY" and "B2_APPLICATION_KEY_ID" environment variables or none of them' ) os.environ.pop('B2_APPLICATION_KEY') os.environ['B2_APPLICATION_KEY_ID'] = os.environ['B2_TEST_APPLICATION_KEY_ID'] b2_tool.should_fail( ['create-bucket', bucket_name, 'allPrivate'], r'Please provide both "B2_APPLICATION_KEY" and "B2_APPLICATION_KEY_ID" environment variables or none of them' ) os.environ.pop('B2_APPLICATION_KEY_ID') def file_version_summary(list_of_files): """ Given the result of list-file-versions, returns a list of all file versions, with "+" for upload and "-" for hide, looking like this: ['+ photos/a.jpg', '- photos/b.jpg', '+ photos/c.jpg'] """ return [filename_summary(f) for f in list_of_files] def filename_summary(file_): return ('+ ' if (file_['action'] == 'upload') else '- ') + file_['fileName'] def file_version_summary_with_encryption(list_of_files): """ Given the result of list-file-versions, returns a list of all file versions, with "+" for upload and "-" for hide, with information about encryption, looking like this: [ ('+ photos/a.jpg', 'SSE-C:AES256?sse_c_key_id=user-generated-key-id'), ('+ photos/a.jpg', 'SSE-B2:AES256'), ('- photos/b.jpg', None), ('+ photos/c.jpg', 'none'), ] """ result = [] for f in list_of_files: entry = filename_summary(f) encryption = encryption_summary(f['serverSideEncryption'], f['fileInfo']) result.append((entry, encryption)) return result def find_file_id(list_of_files, file_name): for file in list_of_files: if file['fileName'] == file_name: return file['fileId'] assert False, f'file not found: {file_name}' def encryption_summary(sse_dict, file_info): if isinstance(sse_dict, EncryptionSetting): sse_dict = sse_dict.as_dict() encryption = sse_dict['mode'] assert encryption in ( EncryptionMode.NONE.value, EncryptionMode.SSE_B2.value, EncryptionMode.SSE_C.value ) algorithm = sse_dict.get('algorithm') if algorithm is not None: encryption += ':' + algorithm if sse_dict['mode'] == 'SSE-C': sse_c_key_id = file_info.get(SSE_C_KEY_ID_FILE_INFO_KEY_NAME) encryption += f'?{SSE_C_KEY_ID_FILE_INFO_KEY_NAME}={sse_c_key_id}' return encryption def test_sync_up(b2_tool, bucket_name): sync_up_helper(b2_tool, bucket_name, 'sync') def test_sync_up_sse_b2(b2_tool, bucket_name): sync_up_helper(b2_tool, bucket_name, 'sync', encryption=SSE_B2_AES) def test_sync_up_sse_c(b2_tool, bucket_name): sync_up_helper(b2_tool, bucket_name, 'sync', encryption=SSE_C_AES) def test_sync_up_no_prefix(b2_tool, bucket_name): sync_up_helper(b2_tool, bucket_name, '') def sync_up_helper(b2_tool, bucket_name, dir_, encryption=None): sync_point_parts = [bucket_name] if dir_: sync_point_parts.append(dir_) prefix = dir_ + '/' else: prefix = '' b2_sync_point = 'b2:' + '/'.join(sync_point_parts) with TempDir() as dir_path: file_versions = b2_tool.list_file_versions(bucket_name) should_equal([], file_version_summary(file_versions)) write_file(dir_path / 'a', b'hello') write_file(dir_path / 'b', b'hello') write_file(dir_path / 'c', b'hello') # simulate action (nothing should be uploaded) b2_tool.should_succeed(['sync', '--no-progress', '--dry-run', dir_path, b2_sync_point]) file_versions = b2_tool.list_file_versions(bucket_name) should_equal([], file_version_summary(file_versions)) # # A note about OSError: [WinError 1314] # # If you are seeing this, then probably you ran the integration test suite from # a non-admin account which on Windows doesn't by default get to create symlinks. # A special permission is needed. Now maybe there is a way to give that permission, # but it didn't work for me, so I just ran it as admin. A guide that I've found # recommended to go to Control Panel, Administrative Tools, Local Security Policy, # Local Policies, User Rights Assignment and there you can find a permission to # create symbilic links. Add your user to it (or a group that the user is in). # # Finally in order to apply the new policy, run `cmd` and execute # ``gpupdate /force``. # # Again, if it still doesn't work, consider just running the shell you are # launching ``nox`` as admin. os.symlink('broken', dir_path / 'd') # OSError: [WinError 1314] ? See the comment above additional_env = None # now upload if encryption is None: command = ['sync', '--no-progress', dir_path, b2_sync_point] expected_encryption = SSE_NONE expected_encryption_str = encryption_summary(expected_encryption.as_dict(), {}) elif encryption == SSE_B2_AES: command = [ 'sync', '--no-progress', '--destination-server-side-encryption', 'SSE-B2', dir_path, b2_sync_point ] expected_encryption = encryption expected_encryption_str = encryption_summary(expected_encryption.as_dict(), {}) elif encryption == SSE_C_AES: command = [ 'sync', '--no-progress', '--destination-server-side-encryption', 'SSE-C', dir_path, b2_sync_point ] expected_encryption = encryption additional_env = { 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(SSE_C_AES.key.secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': SSE_C_AES.key.key_id, } expected_encryption_str = encryption_summary( expected_encryption.as_dict(), {SSE_C_KEY_ID_FILE_INFO_KEY_NAME: SSE_C_AES.key.key_id} ) else: raise NotImplementedError('unsupported encryption mode: %s' % encryption) b2_tool.should_succeed( command, expected_pattern="d could not be accessed", additional_env=additional_env ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ ('+ ' + prefix + 'a', expected_encryption_str), ('+ ' + prefix + 'b', expected_encryption_str), ('+ ' + prefix + 'c', expected_encryption_str), ], file_version_summary_with_encryption(file_versions), ) if encryption and encryption.mode == EncryptionMode.SSE_C: b2_tool.should_fail( command, expected_pattern="ValueError: Using SSE-C requires providing an encryption key via " "B2_DESTINATION_SSE_C_KEY_B64 env var" ) if encryption is not None: return # that's enough, we've checked that encryption works, no need to repeat the whole sync suite c_id = find_file_id(file_versions, prefix + 'c') file_info = b2_tool.should_succeed_json(['file-info', f"b2id://{c_id}"])['fileInfo'] should_equal( file_mod_time_millis(dir_path / 'c'), int(file_info['src_last_modified_millis']) ) os.unlink(dir_path / 'b') write_file(dir_path / 'c', b'hello world') b2_tool.should_succeed( ['sync', '--no-progress', '--keep-days', '10', dir_path, b2_sync_point] ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'a', '- ' + prefix + 'b', '+ ' + prefix + 'b', '+ ' + prefix + 'c', '+ ' + prefix + 'c', ], file_version_summary(file_versions) ) os.unlink(dir_path / 'a') b2_tool.should_succeed(['sync', '--no-progress', '--delete', dir_path, b2_sync_point]) file_versions = b2_tool.list_file_versions(bucket_name) should_equal([ '+ ' + prefix + 'c', ], file_version_summary(file_versions)) # test --compare-threshold with file size write_file(dir_path / 'c', b'hello world!') # should not upload new version of c b2_tool.should_succeed( [ 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'size', '--compare-threshold', '1', dir_path, b2_sync_point ] ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal([ '+ ' + prefix + 'c', ], file_version_summary(file_versions)) # should upload new version of c b2_tool.should_succeed( [ 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'size', dir_path, b2_sync_point ] ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'c', '+ ' + prefix + 'c', ], file_version_summary(file_versions) ) set_file_mod_time_millis(dir_path / 'c', file_mod_time_millis(dir_path / 'c') + 2000) # test --compare-threshold with modTime # should not upload new version of c b2_tool.should_succeed( [ 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'modTime', '--compare-threshold', '2000', dir_path, b2_sync_point ] ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'c', '+ ' + prefix + 'c', ], file_version_summary(file_versions) ) # should upload new version of c b2_tool.should_succeed( [ 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'modTime', dir_path, b2_sync_point ] ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'c', ], file_version_summary(file_versions) ) # create one more file write_file(dir_path / 'linktarget', b'hello') mod_time = str((file_mod_time_millis(dir_path / 'linktarget') - 10) / 1000) # exclude last created file because of mtime b2_tool.should_succeed( [ 'sync', '--no-progress', '--exclude-if-modified-after', mod_time, dir_path, b2_sync_point ] ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'c', ], file_version_summary(file_versions), ) # confirm symlink is skipped os.symlink('linktarget', dir_path / 'alink') b2_tool.should_succeed( ['sync', '--no-progress', '--exclude-all-symlinks', dir_path, b2_sync_point], ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'linktarget', ], file_version_summary(file_versions), ) # confirm symlink target is uploaded (with symlink's name) b2_tool.should_succeed(['sync', '--no-progress', dir_path, b2_sync_point]) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ '+ ' + prefix + 'alink', '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'linktarget', ], file_version_summary(file_versions), ) def test_sync_down(b2_tool, bucket_name, sample_file): sync_down_helper(b2_tool, bucket_name, 'sync', sample_file) def test_sync_down_no_prefix(b2_tool, bucket_name, sample_file): sync_down_helper(b2_tool, bucket_name, '', sample_file) def test_sync_down_sse_c_no_prefix(b2_tool, bucket_name, sample_file): sync_down_helper(b2_tool, bucket_name, '', sample_file, SSE_C_AES) def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryption=None): b2_sync_point = 'b2:%s' % bucket_name if folder_in_bucket: b2_sync_point += '/' + folder_in_bucket b2_file_prefix = folder_in_bucket + '/' else: b2_file_prefix = '' if encryption is None or encryption.mode in (EncryptionMode.NONE, EncryptionMode.SSE_B2): upload_encryption_args = [] upload_additional_env = {} sync_encryption_args = [] sync_additional_env = {} elif encryption.mode == EncryptionMode.SSE_C: upload_encryption_args = ['--destination-server-side-encryption', 'SSE-C'] upload_additional_env = { 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(encryption.key.secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': encryption.key.key_id, } sync_encryption_args = ['--source-server-side-encryption', 'SSE-C'] sync_additional_env = { 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(encryption.key.secret).decode(), 'B2_SOURCE_SSE_C_KEY_ID': encryption.key.key_id, } else: raise NotImplementedError(encryption) with TempDir() as local_path: # Sync from an empty "folder" as a source. b2_tool.should_succeed(['sync', b2_sync_point, local_path]) should_equal([], sorted(local_path.iterdir())) # Put a couple files in B2 b2_tool.should_succeed( ['upload-file', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'a'] + upload_encryption_args, additional_env=upload_additional_env, ) b2_tool.should_succeed( ['upload-file', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'b'] + upload_encryption_args, additional_env=upload_additional_env, ) b2_tool.should_succeed( ['sync', b2_sync_point, local_path] + sync_encryption_args, additional_env=sync_additional_env, ) should_equal(['a', 'b'], sorted(os.listdir(local_path))) b2_tool.should_succeed( ['upload-file', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'c'] + upload_encryption_args, additional_env=upload_additional_env, ) # Sync the files with one file being excluded because of mtime mod_time = str((file_mod_time_millis(sample_file) - 10) / 1000) b2_tool.should_succeed( [ 'sync', '--no-progress', '--exclude-if-modified-after', mod_time, b2_sync_point, local_path ] + sync_encryption_args, additional_env=sync_additional_env, ) should_equal(['a', 'b'], sorted(os.listdir(local_path))) # Sync all the files b2_tool.should_succeed( ['sync', '--no-progress', b2_sync_point, local_path] + sync_encryption_args, additional_env=sync_additional_env, ) should_equal(['a', 'b', 'c'], sorted(os.listdir(local_path))) with TempDir() as new_local_path: if encryption and encryption.mode == EncryptionMode.SSE_C: b2_tool.should_fail( ['sync', '--no-progress', b2_sync_point, new_local_path] + sync_encryption_args, expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' 'B2_SOURCE_SSE_C_KEY_B64 env var', ) b2_tool.should_fail( ['sync', '--no-progress', b2_sync_point, new_local_path], expected_pattern= 'b2sdk._internal.exception.BadRequest: The object was stored using a form of Server Side ' 'Encryption. The correct parameters must be provided to retrieve the object. ' r'\(bad_request\)', ) def test_sync_copy(bucket_factory, b2_tool, bucket_name, sample_file): prepare_and_run_sync_copy_tests( bucket_factory, b2_tool, bucket_name, 'sync', sample_file=sample_file ) def test_sync_copy_no_prefix_default_encryption(bucket_factory, b2_tool, bucket_name, sample_file): prepare_and_run_sync_copy_tests( bucket_factory, b2_tool, bucket_name, '', sample_file=sample_file, destination_encryption=None, expected_encryption=SSE_NONE ) def test_sync_copy_no_prefix_no_encryption(bucket_factory, b2_tool, bucket_name, sample_file): prepare_and_run_sync_copy_tests( bucket_factory, b2_tool, bucket_name, '', sample_file=sample_file, destination_encryption=SSE_NONE, expected_encryption=SSE_NONE ) def test_sync_copy_no_prefix_sse_b2(bucket_factory, b2_tool, bucket_name, sample_file): prepare_and_run_sync_copy_tests( bucket_factory, b2_tool, bucket_name, '', sample_file=sample_file, destination_encryption=SSE_B2_AES, expected_encryption=SSE_B2_AES, ) def test_sync_copy_no_prefix_sse_c(bucket_factory, b2_tool, bucket_name, sample_file): prepare_and_run_sync_copy_tests( bucket_factory, b2_tool, bucket_name, '', sample_file=sample_file, destination_encryption=SSE_C_AES, expected_encryption=SSE_C_AES, source_encryption=SSE_C_AES_2, ) def test_sync_copy_sse_c_single_bucket(b2_tool, bucket_name, sample_file): run_sync_copy_with_basic_checks( b2_tool=b2_tool, b2_file_prefix='first_folder/', b2_sync_point=f'b2:{bucket_name}/first_folder', bucket_name=bucket_name, other_b2_sync_point=f'b2:{bucket_name}/second_folder', destination_encryption=SSE_C_AES_2, source_encryption=SSE_C_AES, sample_file=sample_file, ) expected_encryption_first = encryption_summary( SSE_C_AES.as_dict(), {SSE_C_KEY_ID_FILE_INFO_KEY_NAME: SSE_C_AES.key.key_id}, ) expected_encryption_second = encryption_summary( SSE_C_AES_2.as_dict(), {SSE_C_KEY_ID_FILE_INFO_KEY_NAME: SSE_C_AES_2.key.key_id}, ) file_versions = b2_tool.list_file_versions(bucket_name) should_equal( [ ('+ first_folder/a', expected_encryption_first), ('+ first_folder/b', expected_encryption_first), ('+ second_folder/a', expected_encryption_second), ('+ second_folder/b', expected_encryption_second), ], file_version_summary_with_encryption(file_versions), ) def prepare_and_run_sync_copy_tests( bucket_factory, b2_tool, bucket_name, folder_in_bucket, sample_file, destination_encryption=None, expected_encryption=SSE_NONE, source_encryption=None, ): b2_sync_point = 'b2:%s' % bucket_name if folder_in_bucket: b2_sync_point += '/' + folder_in_bucket b2_file_prefix = folder_in_bucket + '/' else: b2_file_prefix = '' other_bucket_name = bucket_factory().name other_b2_sync_point = 'b2:%s' % other_bucket_name if folder_in_bucket: other_b2_sync_point += '/' + folder_in_bucket run_sync_copy_with_basic_checks( b2_tool=b2_tool, b2_file_prefix=b2_file_prefix, b2_sync_point=b2_sync_point, bucket_name=bucket_name, other_b2_sync_point=other_b2_sync_point, destination_encryption=destination_encryption, source_encryption=source_encryption, sample_file=sample_file, ) if destination_encryption is None or destination_encryption in (SSE_NONE, SSE_B2_AES): encryption_file_info = {} elif destination_encryption.mode == EncryptionMode.SSE_C: encryption_file_info = {SSE_C_KEY_ID_FILE_INFO_KEY_NAME: destination_encryption.key.key_id} else: raise NotImplementedError(destination_encryption) file_versions = b2_tool.list_file_versions(other_bucket_name) expected_encryption_str = encryption_summary( expected_encryption.as_dict(), encryption_file_info ) should_equal( [ ('+ ' + b2_file_prefix + 'a', expected_encryption_str), ('+ ' + b2_file_prefix + 'b', expected_encryption_str), ], file_version_summary_with_encryption(file_versions), ) def run_sync_copy_with_basic_checks( b2_tool, b2_file_prefix, b2_sync_point, bucket_name, other_b2_sync_point, destination_encryption, source_encryption, sample_file, ): # Put a couple files in B2 if source_encryption is None or source_encryption.mode in ( EncryptionMode.NONE, EncryptionMode.SSE_B2 ): b2_tool.should_succeed( [ 'upload-file', '--no-progress', '--destination-server-side-encryption', 'SSE-B2', bucket_name, sample_file, b2_file_prefix + 'a' ] ) b2_tool.should_succeed( ['upload-file', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'b'] ) elif source_encryption.mode == EncryptionMode.SSE_C: for suffix in ['a', 'b']: b2_tool.should_succeed( [ 'upload-file', '--no-progress', '--destination-server-side-encryption', 'SSE-C', bucket_name, sample_file, b2_file_prefix + suffix ], additional_env={ 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(source_encryption.key.secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': source_encryption.key.key_id, }, ) else: raise NotImplementedError(source_encryption) # Sync all the files if destination_encryption is None or destination_encryption == SSE_NONE: b2_tool.should_succeed(['sync', '--no-progress', b2_sync_point, other_b2_sync_point]) elif destination_encryption == SSE_B2_AES: b2_tool.should_succeed( [ 'sync', '--no-progress', '--destination-server-side-encryption', destination_encryption.mode.value, b2_sync_point, other_b2_sync_point ] ) elif destination_encryption.mode == EncryptionMode.SSE_C: b2_tool.should_fail( [ 'sync', '--no-progress', '--destination-server-side-encryption', destination_encryption.mode.value, b2_sync_point, other_b2_sync_point ], additional_env={ 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(destination_encryption.key.secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': destination_encryption.key.key_id, }, expected_pattern= 'b2sdk._internal.exception.BadRequest: The object was stored using a form of Server Side ' 'Encryption. The correct parameters must be provided to retrieve the object. ' r'\(bad_request\)' ) b2_tool.should_succeed( [ 'sync', '--no-progress', '--destination-server-side-encryption', destination_encryption.mode.value, '--source-server-side-encryption', source_encryption.mode.value, b2_sync_point, other_b2_sync_point ], additional_env={ 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(destination_encryption.key.secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': destination_encryption.key.key_id, 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(source_encryption.key.secret).decode(), 'B2_SOURCE_SSE_C_KEY_ID': source_encryption.key.key_id, } ) else: raise NotImplementedError(destination_encryption) def test_sync_long_path(b2_tool, bucket_name): """ test sync with very long path (overcome windows 260 character limit) """ b2_sync_point = 'b2://' + bucket_name long_path = '/'.join( ( 'extremely_long_path_which_exceeds_windows_unfortunate_260_character_path_limit', 'and_needs_special_prefixes_containing_backslashes_added_to_overcome_this_limitation', 'when_doing_so_beware_leaning_toothpick_syndrome_as_it_can_cause_frustration', 'see_also_xkcd_1638' ) ) with TempDir() as dir_path: local_long_path = (dir_path / long_path).resolve() fixed_local_long_path = Path(fix_windows_path_limit(str(local_long_path))) os.makedirs(fixed_local_long_path.parent) write_file(fixed_local_long_path, b'asdf') b2_tool.should_succeed(['sync', '--no-progress', '--delete', dir_path, b2_sync_point]) file_versions = b2_tool.list_file_versions(bucket_name) should_equal(['+ ' + long_path], file_version_summary(file_versions)) def test_default_sse_b2__update_bucket(b2_tool, bucket_name, schedule_bucket_cleanup): # Set default encryption via update-bucket bucket_info = b2_tool.should_succeed_json(['get-bucket', bucket_name]) bucket_default_sse = {'mode': 'none'} should_equal(bucket_default_sse, bucket_info['defaultServerSideEncryption']) bucket_info = b2_tool.should_succeed_json( ['update-bucket', '--default-server-side-encryption=SSE-B2', bucket_name] ) bucket_default_sse = { 'algorithm': 'AES256', 'mode': 'SSE-B2', } should_equal(bucket_default_sse, bucket_info['defaultServerSideEncryption']) bucket_info = b2_tool.should_succeed_json(['get-bucket', bucket_name]) bucket_default_sse = { 'algorithm': 'AES256', 'mode': 'SSE-B2', } should_equal(bucket_default_sse, bucket_info['defaultServerSideEncryption']) def test_default_sse_b2__create_bucket(b2_tool, schedule_bucket_cleanup): # Set default encryption via create-bucket second_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(second_bucket_name) b2_tool.should_succeed( [ 'create-bucket', '--default-server-side-encryption=SSE-B2', second_bucket_name, 'allPublic', *b2_tool.get_bucket_info_args(), ] ) second_bucket_info = b2_tool.should_succeed_json(['get-bucket', second_bucket_name]) second_bucket_default_sse = { 'algorithm': 'AES256', 'mode': 'SSE-B2', } should_equal(second_bucket_default_sse, second_bucket_info['defaultServerSideEncryption']) def test_sse_b2(b2_tool, bucket_name, sample_file, tmp_path, b2_uri_args): b2_tool.should_succeed( [ 'upload-file', '--destination-server-side-encryption=SSE-B2', '--quiet', bucket_name, sample_file, 'encrypted' ] ) b2_tool.should_succeed(['upload-file', '--quiet', bucket_name, sample_file, 'not_encrypted']) b2_tool.should_succeed( ['download-file', '--quiet', f'b2://{bucket_name}/encrypted', tmp_path / 'encrypted'] ) b2_tool.should_succeed( [ 'download-file', '--quiet', f'b2://{bucket_name}/not_encrypted', tmp_path / 'not_encrypted' ] ) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', *b2_uri_args(bucket_name)] ) should_equal( [{ 'algorithm': 'AES256', 'mode': 'SSE-B2' }, { 'mode': 'none' }], [f['serverSideEncryption'] for f in list_of_files] ) encrypted_version = list_of_files[0] file_info = b2_tool.should_succeed_json(['file-info', f"b2id://{encrypted_version['fileId']}"]) should_equal({'algorithm': 'AES256', 'mode': 'SSE-B2'}, file_info['serverSideEncryption']) not_encrypted_version = list_of_files[1] file_info = b2_tool.should_succeed_json( ['file-info', f"b2id://{not_encrypted_version['fileId']}"] ) should_equal({'mode': 'none'}, file_info['serverSideEncryption']) b2_tool.should_succeed( [ 'copy-file-by-id', '--destination-server-side-encryption=SSE-B2', encrypted_version['fileId'], bucket_name, 'copied_encrypted' ] ) b2_tool.should_succeed( ['copy-file-by-id', not_encrypted_version['fileId'], bucket_name, 'copied_not_encrypted'] ) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', *b2_uri_args(bucket_name)] ) should_equal( [{ 'algorithm': 'AES256', 'mode': 'SSE-B2' }, { 'mode': 'none' }] * 2, [f['serverSideEncryption'] for f in list_of_files] ) copied_encrypted_version = list_of_files[2] file_info = b2_tool.should_succeed_json( ['file-info', f"b2id://{copied_encrypted_version['fileId']}"] ) should_equal({'algorithm': 'AES256', 'mode': 'SSE-B2'}, file_info['serverSideEncryption']) copied_not_encrypted_version = list_of_files[3] file_info = b2_tool.should_succeed_json( ['file-info', f"b2id://{copied_not_encrypted_version['fileId']}"] ) should_equal({'mode': 'none'}, file_info['serverSideEncryption']) def test_sse_c(b2_tool, bucket_name, is_running_on_docker, sample_file, tmp_path, b2_uri_args): sse_c_key_id = 'user-generated-key-id \nąóźćż\nœøΩ≈ç\nßäöü' if is_running_on_docker: # TODO: fix this once we figure out how to pass env vars with \n in them to docker, docker-compose should work sse_c_key_id = sse_c_key_id.replace('\n', '') secret = os.urandom(32) b2_tool.should_fail( [ 'upload-file', '--no-progress', '--quiet', '--destination-server-side-encryption', 'SSE-C', bucket_name, sample_file, 'gonna-fail-anyway' ], 'Using SSE-C requires providing an encryption key via B2_DESTINATION_SSE_C_KEY_B64 env var' ) file_version_info = b2_tool.should_succeed_json( [ 'upload-file', '--no-progress', '--quiet', '--destination-server-side-encryption', 'SSE-C', bucket_name, sample_file, 'uploaded_encrypted' ], additional_env={ 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': sse_c_key_id, } ) should_equal( { "algorithm": "AES256", "customerKey": "******", "customerKeyMd5": "******", "mode": "SSE-C" }, file_version_info['serverSideEncryption'] ) should_equal(sse_c_key_id, file_version_info['fileInfo'][SSE_C_KEY_ID_FILE_INFO_KEY_NAME]) b2_tool.should_fail( ['download-file', '--quiet', f'b2://{bucket_name}/uploaded_encrypted', 'gonna_fail_anyway'], expected_pattern='ERROR: The object was stored using a form of Server Side Encryption. The ' r'correct parameters must be provided to retrieve the object. \(bad_request\)' ) b2_tool.should_fail( [ 'download-file', '--quiet', '--source-server-side-encryption', 'SSE-C', f'b2://{bucket_name}/uploaded_encrypted', 'gonna_fail_anyway' ], expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' 'B2_SOURCE_SSE_C_KEY_B64 env var' ) b2_tool.should_fail( [ 'download-file', '--quiet', '--source-server-side-encryption', 'SSE-C', f'b2://{bucket_name}/uploaded_encrypted', 'gonna_fail_anyway' ], expected_pattern='ERROR: Wrong or no SSE-C key provided when reading a file.', additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode()} ) with contextlib.nullcontext(tmp_path) as dir_path: b2_tool.should_succeed( [ 'download-file', '--no-progress', '--quiet', '--source-server-side-encryption', 'SSE-C', f'b2://{bucket_name}/uploaded_encrypted', dir_path / 'a', ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} ) assert read_file(dir_path / 'a') == read_file(sample_file) b2_tool.should_succeed( [ 'download-file', '--no-progress', '--quiet', '--source-server-side-encryption', 'SSE-C', f"b2id://{file_version_info['fileId']}", dir_path / 'b', ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} ) assert read_file(dir_path / 'b') == read_file(sample_file) b2_tool.should_fail( ['copy-file-by-id', file_version_info['fileId'], bucket_name, 'gonna-fail-anyway'], expected_pattern= 'ERROR: The object was stored using a form of Server Side Encryption. The correct ' r'parameters must be provided to retrieve the object. \(bad_request\)' ) b2_tool.should_fail( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'gonna-fail-anyway' ], expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' 'B2_SOURCE_SSE_C_KEY_B64 env var' ) b2_tool.should_fail( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', '--destination-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'gonna-fail-anyway' ], expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' 'B2_DESTINATION_SSE_C_KEY_B64 env var', additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} ) b2_tool.should_fail( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'gonna-fail-anyway' ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, expected_pattern= 'Attempting to copy file with metadata while either source or destination uses ' 'SSE-C. Use --fetch-metadata to fetch source file metadata before copying.', ) b2_tool.should_succeed( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'not_encrypted_copied_from_encrypted_metadata_replace', '--info', 'a=b', '--content-type', 'text/plain', ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} ) b2_tool.should_succeed( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'not_encrypted_copied_from_encrypted_metadata_replace_empty', '--no-info', '--content-type', 'text/plain', ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} ) b2_tool.should_succeed( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'not_encrypted_copied_from_encrypted_metadata_pseudo_copy', '--fetch-metadata', ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} ) b2_tool.should_succeed( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', '--destination-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'encrypted_no_id_copied_from_encrypted', '--fetch-metadata', ], additional_env={ 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode(), } ) b2_tool.should_succeed( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', '--destination-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'encrypted_with_id_copied_from_encrypted_metadata_replace', '--no-info', '--content-type', 'text/plain', ], additional_env={ 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': 'another-user-generated-key-id', } ) b2_tool.should_succeed( [ 'copy-file-by-id', '--source-server-side-encryption=SSE-C', '--destination-server-side-encryption=SSE-C', file_version_info['fileId'], bucket_name, 'encrypted_with_id_copied_from_encrypted_metadata_pseudo_copy', '--fetch-metadata', ], additional_env={ 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': 'another-user-generated-key-id', } ) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', *b2_uri_args(bucket_name)] ) should_equal( [ { 'file_name': 'encrypted_no_id_copied_from_encrypted', 'sse_c_key_id': 'missing_key', 'serverSideEncryption': { "algorithm": "AES256", "customerKey": "******", "customerKeyMd5": "******", "mode": "SSE-C" }, }, { 'file_name': 'encrypted_with_id_copied_from_encrypted_metadata_pseudo_copy', 'sse_c_key_id': 'another-user-generated-key-id', 'serverSideEncryption': { 'algorithm': 'AES256', "customerKey": "******", "customerKeyMd5": "******", 'mode': 'SSE-C', }, }, { 'file_name': 'encrypted_with_id_copied_from_encrypted_metadata_replace', 'sse_c_key_id': 'another-user-generated-key-id', 'serverSideEncryption': { 'algorithm': 'AES256', "customerKey": "******", "customerKeyMd5": "******", 'mode': 'SSE-C', }, }, { 'file_name': 'not_encrypted_copied_from_encrypted_metadata_pseudo_copy', 'sse_c_key_id': 'missing_key', 'serverSideEncryption': { 'mode': 'none', }, }, { 'file_name': 'not_encrypted_copied_from_encrypted_metadata_replace', 'sse_c_key_id': 'missing_key', 'serverSideEncryption': { 'mode': 'none', }, }, { 'file_name': 'not_encrypted_copied_from_encrypted_metadata_replace_empty', 'sse_c_key_id': 'missing_key', 'serverSideEncryption': { 'mode': 'none', }, }, { 'file_name': 'uploaded_encrypted', 'sse_c_key_id': sse_c_key_id, 'serverSideEncryption': { "algorithm": "AES256", "customerKey": "******", "customerKeyMd5": "******", "mode": "SSE-C" }, }, ], sorted( [ { 'sse_c_key_id': f['fileInfo'].get(SSE_C_KEY_ID_FILE_INFO_KEY_NAME, 'missing_key'), 'serverSideEncryption': f['serverSideEncryption'], 'file_name': f['fileName'] } for f in list_of_files ], key=lambda r: r['file_name'] ) ) @pytest.mark.skipif( (sys.version_info.major, sys.version_info.minor) < (3, 8), reason="License extraction doesn't work on older versions, and we're only " "obliged to provide this " "data in bundled and built packages." ) @pytest.mark.parametrize('with_packages', [True, False]) def test_license(b2_tool, with_packages, cli_version): license_text = b2_tool.should_succeed( ['license'] + (['--with-packages'] if with_packages else []) ) if with_packages: # In the case of e.g.: docker image, it has a license built-in with a `b2`. # It also is unable to generate this license because it lacks required packages. # Thus, I'm allowing here for the test of licenses to pass whenever # the binary is named `b2` or with the proper cli version string (e.g. `_b2v4` or `b2v3`). full_license_re = re.compile( fr'Licenses of all modules used by ({cli_version}|b2)(\.EXE)?, shipped with it in binary form:\r?\n' r'\+-*\+-*\+\r?\n' r'\|\s*Module name\s*\|\s*License text\s*\|\r?\n' r'.*' r'\+-*\+-*\+\r?\n', re.MULTILINE + re.DOTALL ) full_license_text = next(full_license_re.finditer(license_text), None) assert full_license_text, license_text assert len( full_license_text.group(0) ) > 140_000 # we should know if the length of this block changes dramatically # Note that GitHub CI adds additional packages: # 'colorlog', 'virtualenv', 'nox', 'packaging', 'argcomplete', 'filelock' # that sum up to around 50k characters. Tests ran from docker image are unaffected. # See the explanation above for why both `b2` and `cli_version` are allowed here. license_summary_re = re.compile( fr'Summary of all modules used by ({cli_version}|b2)(\.EXE)?, shipped with it in binary form:\r?\n' r'\+-*\+-*\+-*\+-*\+-*\+\r?\n' r'\|\s*Module name\s*\|\s*Version\s*\|\s*License\s*\|\s*Author\s*\|\s*URL\s*\|\r?\n' r'.*' r'\+-*\+-*\+-*\+-*\+-*\+\r?\n', re.MULTILINE + re.DOTALL ) license_summary_text = next(license_summary_re.finditer(license_text), None) assert license_summary_text, license_text assert len( license_summary_text.group(0) ) > 6_300 # we should know if the length of this block changes dramatically assert """ license: Backblaze wants developers and organization to copy and re-use our code examples, so we make the samples available by several different licenses. One option is the MIT license (below). Other options are available here: https://www.backblaze.com/using_b2_code.html The MIT License (MIT) Copyright (c) 2015 Backblaze Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.""" in license_text.replace(os.linesep, '\n'), repr(license_text[-2000:]) def test_file_lock( b2_tool, application_key_id, application_key, sample_file, bucket_factory, schedule_bucket_cleanup ): lock_disabled_bucket_name = bucket_factory(bucket_type='allPrivate').name now_millis = current_time_millis() not_lockable_file = b2_tool.should_succeed_json( # file in a lock disabled bucket ['upload-file', '--quiet', lock_disabled_bucket_name, sample_file, 'a'] ) _assert_file_lock_configuration( b2_tool, not_lockable_file['fileId'], retention_mode=RetentionMode.NONE, legal_hold=LegalHold.UNSET ) b2_tool.should_fail( [ 'upload-file', '--quiet', lock_disabled_bucket_name, sample_file, 'a', '--file-retention-mode', 'governance', '--retain-until', str(now_millis + 1.5 * ONE_HOUR_MILLIS), '--legal-hold', 'on', ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' ) b2_tool.should_fail( [ 'update-bucket', lock_disabled_bucket_name, 'allPrivate', '--default-retention-mode', 'compliance' ], 'ValueError: must specify period for retention mode RetentionMode.COMPLIANCE' ) b2_tool.should_fail( [ 'update-bucket', lock_disabled_bucket_name, 'allPrivate', '--default-retention-mode', 'compliance', '--default-retention-period', '7 days' ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' ) lock_enabled_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(lock_enabled_bucket_name) b2_tool.should_succeed( [ 'create-bucket', lock_enabled_bucket_name, 'allPrivate', '--file-lock-enabled', *b2_tool.get_bucket_info_args(), ], ) updated_bucket = b2_tool.should_succeed_json( [ 'update-bucket', lock_enabled_bucket_name, 'allPrivate', '--default-retention-mode', 'governance', '--default-retention-period', '1 days', ], ) assert updated_bucket['defaultRetention'] == { 'mode': 'governance', 'period': { 'duration': 1, 'unit': 'days', }, } lockable_file = b2_tool.should_succeed_json( # file in a lock enabled bucket ['upload-file', '--no-progress', '--quiet', lock_enabled_bucket_name, sample_file, 'a'] ) b2_tool.should_fail( [ 'update-file-retention', not_lockable_file['fileName'], not_lockable_file['fileId'], 'governance', '--retain-until', str(now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS) ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' ) b2_tool.should_succeed( # first let's try with a file name ['update-file-retention', lockable_file['fileName'], lockable_file['fileId'], 'governance', '--retain-until', str(now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS)] ) _assert_file_lock_configuration( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS ) b2_tool.should_succeed( # and now without a file name ['update-file-retention', lockable_file['fileId'], 'governance', '--retain-until', str(now_millis + ONE_DAY_MILLIS + 2 * ONE_HOUR_MILLIS)] ) _assert_file_lock_configuration( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + ONE_DAY_MILLIS + 2 * ONE_HOUR_MILLIS ) b2_tool.should_fail( [ 'update-file-retention', lockable_file['fileName'], lockable_file['fileId'], 'governance', '--retain-until', str(now_millis + ONE_HOUR_MILLIS) ], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " "bypassGovernance=true parameter missing", ) b2_tool.should_succeed( [ 'update-file-retention', lockable_file['fileName'], lockable_file['fileId'], 'governance', '--retain-until', str(now_millis + ONE_HOUR_MILLIS), '--bypass-governance' ], ) _assert_file_lock_configuration( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + ONE_HOUR_MILLIS ) b2_tool.should_fail( ['update-file-retention', lockable_file['fileName'], lockable_file['fileId'], 'none'], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " "bypassGovernance=true parameter missing", ) b2_tool.should_succeed( [ 'update-file-retention', lockable_file['fileName'], lockable_file['fileId'], 'none', '--bypass-governance' ], ) _assert_file_lock_configuration( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.NONE ) b2_tool.should_fail( ['update-file-legal-hold', not_lockable_file['fileId'], 'on'], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' ) b2_tool.should_succeed( # first let's try with a file name ['update-file-legal-hold', lockable_file['fileName'], lockable_file['fileId'], 'on'], ) _assert_file_lock_configuration(b2_tool, lockable_file['fileId'], legal_hold=LegalHold.ON) b2_tool.should_succeed( # and now without a file name ['update-file-legal-hold', lockable_file['fileId'], 'off'], ) _assert_file_lock_configuration(b2_tool, lockable_file['fileId'], legal_hold=LegalHold.OFF) updated_bucket = b2_tool.should_succeed_json( [ 'update-bucket', lock_enabled_bucket_name, 'allPrivate', '--default-retention-mode', 'none', ], ) assert updated_bucket['defaultRetention'] == {'mode': None} b2_tool.should_fail( [ 'upload-file', '--no-progress', '--quiet', lock_enabled_bucket_name, sample_file, 'a', '--file-retention-mode', 'governance', '--retain-until', str(now_millis - 1.5 * ONE_HOUR_MILLIS), ], r'ERROR: The retainUntilTimestamp must be in future \(retain_until_timestamp_must_be_in_future\)', ) uploaded_file = b2_tool.should_succeed_json( [ 'upload-file', '--no-progress', '--quiet', lock_enabled_bucket_name, sample_file, 'a', '--file-retention-mode', 'governance', '--retain-until', str(now_millis + 1.5 * ONE_HOUR_MILLIS), '--legal-hold', 'on', ] ) _assert_file_lock_configuration( b2_tool, uploaded_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + 1.5 * ONE_HOUR_MILLIS, legal_hold=LegalHold.ON ) b2_tool.should_fail( [ 'copy-file-by-id', lockable_file['fileId'], lock_disabled_bucket_name, 'copied', '--file-retention-mode', 'governance', '--retain-until', str(now_millis + 1.25 * ONE_HOUR_MILLIS), '--legal-hold', 'off', ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' ) copied_file = b2_tool.should_succeed_json( [ 'copy-file-by-id', lockable_file['fileId'], lock_enabled_bucket_name, 'copied', '--file-retention-mode', 'governance', '--retain-until', str(now_millis + 1.25 * ONE_HOUR_MILLIS), '--legal-hold', 'off', ] ) _assert_file_lock_configuration( b2_tool, copied_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + 1.25 * ONE_HOUR_MILLIS, legal_hold=LegalHold.OFF ) lock_disabled_key_id, lock_disabled_key = make_lock_disabled_key(b2_tool) b2_tool.should_succeed( [ 'authorize-account', '--environment', b2_tool.realm, lock_disabled_key_id, lock_disabled_key ], ) file_lock_without_perms_test( b2_tool, lock_enabled_bucket_name, lock_disabled_bucket_name, lockable_file['fileId'], not_lockable_file['fileId'], sample_file=sample_file ) b2_tool.should_succeed( ['authorize-account', '--environment', b2_tool.realm, application_key_id, application_key], ) deleting_locked_files( b2_tool, lock_enabled_bucket_name, lock_disabled_key_id, lock_disabled_key, sample_file ) def make_lock_disabled_key(b2_tool): key_name = 'no-perms-for-file-lock' + random_hex(6) created_key_stdout = b2_tool.should_succeed( [ 'create-key', key_name, 'listFiles,listBuckets,readFiles,writeKeys,deleteFiles', ] ) key_id, key = created_key_stdout.split() return key_id, key def file_lock_without_perms_test( b2_tool, lock_enabled_bucket_name, lock_disabled_bucket_name, lockable_file_id, not_lockable_file_id, sample_file ): b2_tool.should_fail( [ 'update-bucket', lock_enabled_bucket_name, 'allPrivate', '--default-retention-mode', 'governance', '--default-retention-period', '1 days' ], 'ERROR: unauthorized for application key with capabilities', ) _assert_file_lock_configuration( b2_tool, lockable_file_id, retention_mode=RetentionMode.UNKNOWN, legal_hold=LegalHold.UNKNOWN ) b2_tool.should_fail( [ 'update-file-retention', lockable_file_id, 'governance', '--retain-until', str(current_time_millis() + 7 * ONE_DAY_MILLIS) ], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " "bypassGovernance=true parameter missing", ) b2_tool.should_fail( [ 'update-file-retention', not_lockable_file_id, 'governance', '--retain-until', str(current_time_millis() + 7 * ONE_DAY_MILLIS) ], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " "bypassGovernance=true parameter missing", ) b2_tool.should_fail( ['update-file-legal-hold', lockable_file_id, 'on'], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " "bypassGovernance=true parameter missing", ) b2_tool.should_fail( ['update-file-legal-hold', not_lockable_file_id, 'on'], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " "bypassGovernance=true parameter missing", ) b2_tool.should_fail( [ 'upload-file', '--no-progress', '--quiet', lock_enabled_bucket_name, sample_file, 'bound_to_fail_anyway', '--file-retention-mode', 'governance', '--retain-until', str(current_time_millis() + ONE_HOUR_MILLIS), '--legal-hold', 'on', ], "unauthorized for application key with capabilities", ) b2_tool.should_fail( [ 'upload-file', '--no-progress', '--quiet', lock_disabled_bucket_name, sample_file, 'bound_to_fail_anyway', '--file-retention-mode', 'governance', '--retain-until', str(current_time_millis() + ONE_HOUR_MILLIS), '--legal-hold', 'on', ], "unauthorized for application key with capabilities", ) b2_tool.should_fail( [ 'copy-file-by-id', lockable_file_id, lock_enabled_bucket_name, 'copied', '--file-retention-mode', 'governance', '--retain-until', str(current_time_millis() + ONE_HOUR_MILLIS), '--legal-hold', 'off', ], 'ERROR: unauthorized for application key with capabilities', ) b2_tool.should_fail( [ 'copy-file-by-id', lockable_file_id, lock_disabled_bucket_name, 'copied', '--file-retention-mode', 'governance', '--retain-until', str(current_time_millis() + ONE_HOUR_MILLIS), '--legal-hold', 'off', ], 'ERROR: unauthorized for application key with capabilities', ) def upload_locked_file(b2_tool, bucket_name, sample_file): return b2_tool.should_succeed_json( [ 'upload-file', '--no-progress', '--quiet', '--file-retention-mode', 'governance', '--retain-until', str(int(time.time()) + 1000), bucket_name, sample_file, 'a-locked', ] ) def deleting_locked_files( b2_tool, lock_enabled_bucket_name, lock_disabled_key_id, lock_disabled_key, sample_file ): locked_file = upload_locked_file(b2_tool, lock_enabled_bucket_name, sample_file) b2_tool.should_fail( [ # master key 'delete-file-version', locked_file['fileName'], locked_file['fileId'], ], "ERROR: Access Denied for application key " ) b2_tool.should_succeed([ # master key 'delete-file-version', locked_file['fileName'], locked_file['fileId'], '--bypass-governance' ]) locked_file = upload_locked_file(b2_tool, lock_enabled_bucket_name, sample_file) b2_tool.should_succeed( [ 'authorize-account', '--environment', b2_tool.realm, lock_disabled_key_id, lock_disabled_key ], ) b2_tool.should_fail([ # lock disabled key 'delete-file-version', locked_file['fileName'], locked_file['fileId'], '--bypass-governance', ], "ERROR: unauthorized for application key with capabilities '") def test_profile_switch(b2_tool): # this test could be unit, but it adds a lot of complexity because of # necessity to pass mocked B2Api to ConsoleTool; it's much easier to # just have an integration test instead MISSING_ACCOUNT_PATTERN = 'Missing account data' b2_tool.should_succeed( [ 'authorize-account', '--environment', b2_tool.realm, b2_tool.account_id, b2_tool.application_key, ] ) b2_tool.should_succeed(['get-account-info']) b2_tool.should_succeed(['clear-account']) b2_tool.should_fail(['get-account-info'], expected_pattern=MISSING_ACCOUNT_PATTERN) # in order to use --profile flag, we need to temporary # delete B2_ACCOUNT_INFO_ENV_VAR B2_ACCOUNT_INFO = os.environ.pop(B2_ACCOUNT_INFO_ENV_VAR, None) # now authorize a different account profile = 'profile-for-test-' + random_hex(6) b2_tool.should_fail( ['get-account-info', '--profile', profile], expected_pattern=MISSING_ACCOUNT_PATTERN, ) b2_tool.should_succeed( [ 'authorize-account', '--environment', b2_tool.realm, '--profile', profile, b2_tool.account_id, b2_tool.application_key, ] ) account_info = b2_tool.should_succeed_json(['get-account-info', '--profile', profile]) account_file_path = account_info['accountFilePath'] assert profile in account_file_path, \ f'accountFilePath "{account_file_path}" should contain profile name "{profile}"' b2_tool.should_succeed(['clear-account', '--profile', profile]) b2_tool.should_fail( ['get-account-info', '--profile', profile], expected_pattern=MISSING_ACCOUNT_PATTERN, ) os.remove(account_file_path) # restore B2_ACCOUNT_INFO_ENV_VAR, if existed if B2_ACCOUNT_INFO: os.environ[B2_ACCOUNT_INFO_ENV_VAR] = B2_ACCOUNT_INFO def test_replication_basic(b2_tool, bucket_name, schedule_bucket_cleanup): key_one_name = 'clt-testKey-01' + random_hex(6) created_key_stdout = b2_tool.should_succeed( [ 'create-key', key_one_name, 'listBuckets,readFiles', ] ) key_one_id, _ = created_key_stdout.split() key_two_name = 'clt-testKey-02' + random_hex(6) created_key_stdout = b2_tool.should_succeed( [ 'create-key', key_two_name, 'listBuckets,writeFiles', ] ) key_two_id, _ = created_key_stdout.split() destination_bucket_name = bucket_name destination_bucket = b2_tool.should_succeed_json(['get-bucket', destination_bucket_name]) # test that by default there's no `replicationConfiguration` key assert 'replicationConfiguration' not in destination_bucket # ---------------- set up replication destination ---------------- # update destination bucket info destination_replication_configuration = { 'asReplicationSource': None, 'asReplicationDestination': { 'sourceToDestinationKeyMapping': { key_one_id: key_two_id, }, }, } destination_replication_configuration_json = json.dumps(destination_replication_configuration) destination_bucket = b2_tool.should_succeed_json( [ 'update-bucket', destination_bucket_name, 'allPublic', '--replication', destination_replication_configuration_json, ] ) # test that destination bucket is registered as replication destination assert destination_bucket['replication'].get('asReplicationSource') is None assert destination_bucket['replication' ]['asReplicationDestination' ] == destination_replication_configuration['asReplicationDestination'] # ---------------- set up replication source ---------------- source_replication_configuration = { "asReplicationSource": { "replicationRules": [ { "destinationBucketId": destination_bucket['bucketId'], "fileNamePrefix": "one/", "includeExistingFiles": False, "isEnabled": True, "priority": 1, "replicationRuleName": "replication-one" }, { "destinationBucketId": destination_bucket['bucketId'], "fileNamePrefix": "two/", "includeExistingFiles": False, "isEnabled": True, "priority": 2, "replicationRuleName": "replication-two" } ], "sourceApplicationKeyId": key_one_id, }, } source_replication_configuration_json = json.dumps(source_replication_configuration) # create a source bucket and set up replication to destination bucket source_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(source_bucket_name) b2_tool.should_succeed( [ 'create-bucket', source_bucket_name, 'allPublic', '--replication', source_replication_configuration_json, *b2_tool.get_bucket_info_args(), ] ) source_bucket = b2_tool.should_succeed_json(['get-bucket', source_bucket_name]) # test that all replication rules are present in source bucket assert source_bucket['replication']['asReplicationSource' ] == source_replication_configuration['asReplicationSource'] # test that source bucket is not mentioned as replication destination assert source_bucket['replication'].get('asReplicationDestination') is None # ---------------- attempt enabling object lock ---------------- b2_tool.should_fail( ['update-bucket', source_bucket_name, '--file-lock-enabled'], 'ERROR: Operation not supported for buckets with source replication' ) # ---------------- remove replication source ---------------- no_replication_configuration = { 'asReplicationSource': None, 'asReplicationDestination': None, } no_replication_configuration_json = json.dumps(no_replication_configuration) source_bucket = b2_tool.should_succeed_json( [ 'update-bucket', source_bucket_name, 'allPublic', '--replication', no_replication_configuration_json ] ) # test that source bucket replication is removed assert source_bucket['replication'] == { 'asReplicationDestination': None, 'asReplicationSource': None } # ---------------- remove replication destination ---------------- destination_bucket = b2_tool.should_succeed_json( [ 'update-bucket', destination_bucket_name, 'allPublic', '--replication', '{}', ] ) # test that destination bucket replication is removed assert destination_bucket['replication'] == { 'asReplicationDestination': None, 'asReplicationSource': None } b2_tool.should_succeed(['delete-key', key_one_id]) b2_tool.should_succeed(['delete-key', key_two_id]) def test_replication_setup(b2_tool, bucket_name, schedule_bucket_cleanup): source_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(source_bucket_name) b2_tool.should_succeed( [ 'create-bucket', source_bucket_name, 'allPublic', '--file-lock-enabled', *b2_tool.get_bucket_info_args(), ] ) destination_bucket_name = bucket_name b2_tool.should_succeed(['replication-setup', source_bucket_name, destination_bucket_name]) destination_bucket_old = b2_tool.should_succeed_json(['get-bucket', destination_bucket_name]) b2_tool.should_succeed( [ 'replication-setup', '--priority', '132', '--file-name-prefix', 'foo', '--name', 'my-replication-rule', source_bucket_name, destination_bucket_name, ] ) source_bucket = b2_tool.should_succeed_json(['get-bucket', source_bucket_name]) destination_bucket = b2_tool.should_succeed_json(['get-bucket', destination_bucket_name]) assert source_bucket['replication']['asReplicationSource']['replicationRules'] == [ { "destinationBucketId": destination_bucket['bucketId'], "fileNamePrefix": "", "includeExistingFiles": False, "isEnabled": True, "priority": 128, "replicationRuleName": destination_bucket['bucketName'], }, { "destinationBucketId": destination_bucket['bucketId'], "fileNamePrefix": "foo", "includeExistingFiles": False, "isEnabled": True, "priority": 132, "replicationRuleName": "my-replication-rule", }, ] for key_one_id, key_two_id in destination_bucket['replication']['asReplicationDestination'][ 'sourceToDestinationKeyMapping'].items(): b2_tool.should_succeed(['delete-key', key_one_id]) b2_tool.should_succeed(['delete-key', key_two_id]) assert destination_bucket_old['replication']['asReplicationDestination'][ 'sourceToDestinationKeyMapping'] == destination_bucket['replication'][ 'asReplicationDestination']['sourceToDestinationKeyMapping'] def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_bucket_cleanup): # ---------------- set up keys ---------------- key_one_name = 'clt-testKey-01' + random_hex(6) created_key_stdout = b2_tool.should_succeed( [ 'create-key', key_one_name, 'listBuckets,readFiles', ] ) key_one_id, _ = created_key_stdout.split() key_two_name = 'clt-testKey-02' + random_hex(6) created_key_stdout = b2_tool.should_succeed( [ 'create-key', key_two_name, 'listBuckets,writeFiles', ] ) key_two_id, _ = created_key_stdout.split() # ---------------- add test data ---------------- destination_bucket_name = bucket_name uploaded_a = b2_tool.should_succeed_json( ['upload-file', '--quiet', destination_bucket_name, sample_file, 'one/a'] ) # ---------------- set up replication destination ---------------- # update destination bucket info destination_replication_configuration = { 'asReplicationSource': None, 'asReplicationDestination': { 'sourceToDestinationKeyMapping': { key_one_id: key_two_id, }, }, } destination_replication_configuration_json = json.dumps(destination_replication_configuration) destination_bucket = b2_tool.should_succeed_json( [ 'update-bucket', destination_bucket_name, 'allPublic', '--replication', destination_replication_configuration_json, ] ) # ---------------- set up replication source ---------------- source_replication_configuration = { "asReplicationSource": { "replicationRules": [ { "destinationBucketId": destination_bucket['bucketId'], "fileNamePrefix": "one/", "includeExistingFiles": False, "isEnabled": True, "priority": 1, "replicationRuleName": "replication-one" }, { "destinationBucketId": destination_bucket['bucketId'], "fileNamePrefix": "two/", "includeExistingFiles": False, "isEnabled": True, "priority": 2, "replicationRuleName": "replication-two" } ], "sourceApplicationKeyId": key_one_id, }, } source_replication_configuration_json = json.dumps(source_replication_configuration) # create a source bucket and set up replication to destination bucket source_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(source_bucket_name) b2_tool.should_succeed( [ 'create-bucket', source_bucket_name, 'allPublic', '--file-lock-enabled', '--replication', source_replication_configuration_json, *b2_tool.get_bucket_info_args(), ] ) # make test data uploaded_a = b2_tool.should_succeed_json( ['upload-file', '--quiet', source_bucket_name, sample_file, 'one/a'] ) b2_tool.should_succeed_json( [ 'upload-file', '--quiet', source_bucket_name, '--legal-hold', 'on', sample_file, 'two/b', ] ) # encryption # SSE-B2 upload_encryption_args = ['--destination-server-side-encryption', 'SSE-B2'] upload_additional_env = {} b2_tool.should_succeed_json( ['upload-file', '--quiet', source_bucket_name, sample_file, 'two/c'] + upload_encryption_args, additional_env=upload_additional_env, ) # SSE-C upload_encryption_args = ['--destination-server-side-encryption', 'SSE-C'] upload_additional_env = { 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(SSE_C_AES.key.secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': SSE_C_AES.key.key_id, } b2_tool.should_succeed_json( ['upload-file', '--quiet', source_bucket_name, sample_file, 'two/d'] + upload_encryption_args, additional_env=upload_additional_env, ) # encryption + legal hold b2_tool.should_succeed_json( [ 'upload-file', '--quiet', source_bucket_name, sample_file, 'two/e', '--legal-hold', 'on', ] + upload_encryption_args, additional_env=upload_additional_env, ) # there is just one file, so clean after itself for faster execution b2_tool.should_succeed(['delete-file-version', uploaded_a['fileName'], uploaded_a['fileId']]) # run stats command replication_status_json = b2_tool.should_succeed_json( [ 'replication-status', # '--destination-profile', # profile, '--no-progress', # '--columns=count, hash differs', '--output-format', 'json', source_bucket_name, ] ) assert replication_status_json in [ { "replication-one": [ { "count": 1, "destination_replication_status": None, "hash_differs": None, "metadata_differs": None, "source_has_file_retention": None, "source_has_hide_marker": None, "source_has_large_metadata": None, "source_has_legal_hold": None, "source_encryption_mode": None, "source_replication_status": None, } ], "replication-two": [ { "count": 1, "destination_replication_status": None, "hash_differs": None, "metadata_differs": None, "source_has_file_retention": False, "source_has_hide_marker": False, "source_has_large_metadata": False, "source_has_legal_hold": True, "source_encryption_mode": 'none', "source_replication_status": first, }, { "count": 1, "destination_replication_status": None, "hash_differs": None, "metadata_differs": None, "source_has_file_retention": False, "source_has_hide_marker": False, "source_has_large_metadata": False, "source_has_legal_hold": False, "source_encryption_mode": 'SSE-B2', "source_replication_status": second, }, { "count": 1, "destination_replication_status": None, "hash_differs": None, "metadata_differs": None, "source_has_file_retention": False, "source_has_hide_marker": False, "source_has_large_metadata": False, "source_has_legal_hold": False, "source_encryption_mode": 'SSE-C', "source_replication_status": None, }, { "count": 1, "destination_replication_status": None, "hash_differs": None, "metadata_differs": None, "source_has_file_retention": False, "source_has_hide_marker": False, "source_has_large_metadata": False, "source_has_legal_hold": True, "source_encryption_mode": 'SSE-C', "source_replication_status": None, } ] } for first, second in itertools.product(['FAILED', 'PENDING'], ['FAILED', 'PENDING']) ] def test_enable_file_lock_first_retention_second(b2_tool, bucket_name): # enable file lock only b2_tool.should_succeed(['update-bucket', bucket_name, '--file-lock-enabled']) # set retention with file lock already enabled b2_tool.should_succeed( [ 'update-bucket', bucket_name, '--default-retention-mode', 'compliance', '--default-retention-period', '7 days' ] ) # attempt to re-enable should be a noop b2_tool.should_succeed(['update-bucket', bucket_name, '--file-lock-enabled']) def test_enable_file_lock_and_set_retention_at_once(b2_tool, bucket_name): # attempt setting retention without file lock enabled b2_tool.should_fail( [ 'update-bucket', bucket_name, '--default-retention-mode', 'compliance', '--default-retention-period', '7 days' ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' ) # enable file lock and set retention at once b2_tool.should_succeed( [ 'update-bucket', bucket_name, '--default-retention-mode', 'compliance', '--default-retention-period', '7 days', '--file-lock-enabled' ] ) # attempt to re-enable should be a noop b2_tool.should_succeed(['update-bucket', bucket_name, '--file-lock-enabled']) def _assert_file_lock_configuration( b2_tool, file_id, retention_mode: RetentionMode | None = None, retain_until: int | None = None, legal_hold: LegalHold | None = None ): file_version = b2_tool.should_succeed_json(['file-info', f"b2id://{file_id}"]) if retention_mode is not None: if file_version['fileRetention']['mode'] == 'unknown': actual_file_retention = UNKNOWN_FILE_RETENTION_SETTING else: actual_file_retention = FileRetentionSetting.from_file_retention_value_dict( file_version['fileRetention'] ) expected_file_retention = FileRetentionSetting(retention_mode, retain_until) assert expected_file_retention == actual_file_retention if legal_hold is not None: if file_version['legalHold'] == 'unknown': actual_legal_hold = LegalHold.UNKNOWN else: actual_legal_hold = LegalHold.from_string_or_none(file_version['legalHold']) assert legal_hold == actual_legal_hold def test_upload_file__custom_upload_time(b2_tool, bucket_name, sample_file, b2_uri_args): file_data = read_file(sample_file) cut = 12345 cut_printable = '1970-01-01 00:00:12' args = [ 'upload-file', '--no-progress', '--custom-upload-time', str(cut), '--quiet', bucket_name, sample_file, 'a', ] succeeded, stdout = b2_tool.run_command(args) if not succeeded: b2_tool.should_fail(args, 'custom_timestamp_not_allowed') else: # file_id, action, date, time, size(, replication), name b2_tool.should_succeed( ['ls', '--long', *b2_uri_args(bucket_name)], '^4_z.* upload {} +{} a'.format( cut_printable, len(file_data), ) ) # file_id, action, date, time, size(, replication), name b2_tool.should_succeed( ['ls', '--long', '--replication', *b2_uri_args(bucket_name)], f'^4_z.* upload {cut_printable} +{len(file_data)} - a' ) @skip_on_windows def test_upload_file__stdin_pipe_operator(request, bash_runner, b2_tool, bucket_name): """Test upload-file from stdin using pipe operator.""" content = request.node.name run = bash_runner( f'echo -n {content!r} ' f'| ' f'{" ".join(b2_tool.parse_command(b2_tool.prepare_env()))} upload-file {bucket_name} - {request.node.name}.txt' ) assert hashlib.sha1(content.encode()).hexdigest() in run.stdout @skip_on_windows def test_upload_unbound_stream__redirect_operator( request, bash_runner, b2_tool, bucket_name, is_running_on_docker ): """Test upload-unbound-stream from stdin using redirect operator.""" if is_running_on_docker: pytest.skip('Not supported on Docker') content = request.node.name command = request.config.getoption('--sut') run = bash_runner( f'{command} upload-unbound-stream {bucket_name} <(echo -n {content}) {request.node.name}.txt' ) assert hashlib.sha1(content.encode()).hexdigest() in run.stdout def test_download_file_stdout( b2_tool, bucket_name, sample_filepath, tmp_path, uploaded_sample_file ): assert b2_tool.should_succeed( ['download-file', '--quiet', f"b2://{bucket_name}/{uploaded_sample_file['fileName']}", '-'], ) == sample_filepath.read_text() assert b2_tool.should_succeed( ['download-file', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", '-'], ) == sample_filepath.read_text() def test_download_file_to_directory( b2_tool, bucket_name, sample_filepath, tmp_path, uploaded_sample_file ): downloads_directory = 'downloads' target_directory = tmp_path / downloads_directory target_directory.mkdir() filename_as_path = pathlib.Path(uploaded_sample_file['fileName']) sample_file_content = sample_filepath.read_text() b2_tool.should_succeed( [ 'download-file', '--quiet', f"b2://{bucket_name}/{uploaded_sample_file['fileName']}", str(target_directory), ], ) downloaded_file = target_directory / filename_as_path assert downloaded_file.read_text() == sample_file_content, \ f'{downloaded_file}, {downloaded_file.read_text()}, {sample_file_content}' b2_tool.should_succeed( [ 'download-file', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", str(target_directory), ], ) # A second file should be created. new_files = [ filepath for filepath in target_directory.glob(f'{filename_as_path.stem}*{filename_as_path.suffix}') if filepath.name != filename_as_path.name ] assert len(new_files) == 1, f'{new_files}' assert new_files[0].read_text() == sample_file_content, \ f'{new_files}, {new_files[0].read_text()}, {sample_file_content}' def test_cat(b2_tool, bucket_name, sample_filepath, tmp_path, uploaded_sample_file): assert b2_tool.should_succeed( ['cat', f"b2://{bucket_name}/{uploaded_sample_file['fileName']}"], ) == sample_filepath.read_text() assert b2_tool.should_succeed(['cat', f"b2id://{uploaded_sample_file['fileId']}"] ) == sample_filepath.read_text() def test_header_arguments(b2_tool, bucket_name, sample_filepath, tmp_path): # yapf: disable args = [ '--cache-control', 'max-age=3600', '--content-disposition', 'attachment', '--content-encoding', 'gzip', '--content-language', 'en', '--expires', 'Thu, 01 Dec 2050 16:00:00 GMT', ] # yapf: enable expected_file_info = { 'b2-cache-control': 'max-age=3600', 'b2-content-disposition': 'attachment', 'b2-content-encoding': 'gzip', 'b2-content-language': 'en', 'b2-expires': 'Thu, 01 Dec 2050 16:00:00 GMT', } def assert_expected(file_info, expected=expected_file_info): for key, val in expected.items(): assert file_info[key] == val status, stdout, stderr = b2_tool.execute( [ 'upload-file', '--quiet', '--no-progress', bucket_name, str(sample_filepath), 'sample_file', *args, '--info', 'b2-content-disposition=will-be-overwritten', ] ) assert status == 0 file_version = json.loads(stdout) assert_expected(file_version['fileInfo']) # Since we used both --info and --content-disposition to set b2-content-disposition, # a warning should be emitted assert 'will be overwritten' in stderr and 'b2-content-disposition = attachment' in stderr copied_version = b2_tool.should_succeed_json( [ 'copy-file-by-id', '--quiet', *args, '--content-type', 'text/plain', file_version['fileId'], bucket_name, 'copied_file' ] ) assert_expected(copied_version['fileInfo']) download_output = b2_tool.should_succeed( ['download-file', f"b2id://{file_version['fileId']}", tmp_path / 'downloaded_file'] ) assert re.search(r'CacheControl: *max-age=3600', download_output) assert re.search(r'ContentDisposition: *attachment', download_output) assert re.search(r'ContentEncoding: *gzip', download_output) assert re.search(r'ContentLanguage: *en', download_output) assert re.search(r'Expires: *Thu, 01 Dec 2050 16:00:00 GMT', download_output) def test_notification_rules(b2_tool, bucket_name): auth_dict = b2_tool.should_succeed_json(['get-account-info']) if 'writeBucketNotifications' not in auth_dict['allowed']['capabilities']: pytest.skip('Test account does not have writeBucketNotifications capability') private_preview_pattern = re.compile(r'FeaturePreviewWarning') assert b2_tool.should_succeed_json( ["notification-rules", "list", f"b2://{bucket_name}", "--json"], expected_stderr_pattern=private_preview_pattern ) == [] notification_rule = { "eventTypes": ["b2:ObjectCreated:*"], "isEnabled": True, "name": "test-rule", "objectNamePrefix": "", "targetConfiguration": { "customHeaders": None, "hmacSha256SigningSecret": None, "targetType": "webhook", "url": "https://example.com/webhook", } } # add rule created_rule = b2_tool.should_succeed_json( [ "notification-rules", "create", "--json", f"b2://{bucket_name}", "test-rule", "--webhook-url", "https://example.com/webhook", "--event-type", "b2:ObjectCreated:*", ], expected_stderr_pattern=private_preview_pattern ) expected_rules = [{**notification_rule, "isSuspended": False, "suspensionReason": ""}] assert created_rule == expected_rules[0] # modify rule secret = "0testSecret000000000000000000032" modified_rule = b2_tool.should_succeed_json( [ "notification-rules", "update", "--json", f"b2://{bucket_name}/prefix", "test-rule", "--disable", "--sign-secret", secret, ], expected_stderr_pattern=private_preview_pattern ) expected_rules[0].update({"objectNamePrefix": "prefix", "isEnabled": False}) expected_rules[0]["targetConfiguration"]["hmacSha256SigningSecret"] = secret assert modified_rule == expected_rules[0] # read updated rules assert b2_tool.should_succeed_json( ["notification-rules", "list", f"b2://{bucket_name}", "--json"], expected_stderr_pattern=private_preview_pattern ) == expected_rules # delete rule by name assert b2_tool.should_succeed( ["notification-rules", "delete", f"b2://{bucket_name}", "test-rule"], expected_stderr_pattern=private_preview_pattern ) == f"Rule 'test-rule' has been deleted from b2://{bucket_name}/\n" assert b2_tool.should_succeed_json( ["notification-rules", "list", f"b2://{bucket_name}", "--json"], expected_stderr_pattern=private_preview_pattern ) == [] B2_Command_Line_Tool-3.19.1/test/integration/test_help.py000066400000000000000000000014311461201031300233030ustar00rootroot00000000000000###################################################################### # # File: test/integration/test_help.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import platform import re import subprocess def test_help(cli_version): p = subprocess.run( [cli_version, "--help"], check=True, capture_output=True, text=True, ) # verify help contains apiver binary name expected_name = cli_version if platform.system() == 'Windows': expected_name += '.exe' assert re.match(r"^_?b2(v\d+)?(\.exe)?$", expected_name) # test sanity check assert f" {expected_name} --help" in p.stdout B2_Command_Line_Tool-3.19.1/test/integration/test_tqdm_closer.py000066400000000000000000000026211461201031300246710ustar00rootroot00000000000000###################################################################### # # File: test/integration/test_tqdm_closer.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import re import sys import pytest @pytest.mark.skipif( (sys.platform != 'darwin') or ((sys.version_info.major, sys.version_info.minor) < (3, 11)), reason='Tqdm closing error only occurs on OSX and python 3.11 or newer', ) def test_tqdm_closer(b2_tool, bucket, file_name): # test that stderr doesn't contain any warning, in particular warnings about multiprocessing resource tracker # leaking semaphores b2_tool.should_succeed([ 'cat', f'b2://{bucket.name}/{file_name}', ]) # test that disabling _TqdmCloser does produce a resource tracker warning. Should the following check ever fail, # that would mean that either Tqdm or python fixed the issue and _TqdmCloser can be disabled for fixed versions b2_tool.should_succeed( [ 'cat', f'b2://{bucket.name}/{file_name}', ], additional_env={'B2_TEST_DISABLE_TQDM_CLOSER': '1'}, expected_stderr_pattern=re.compile( r'UserWarning: resource_tracker: There appear to be \d+ leaked semaphore' r' objects to clean up at shutdown' ), ) B2_Command_Line_Tool-3.19.1/test/static/000077500000000000000000000000001461201031300177075ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/static/__init__.py000066400000000000000000000004421461201031300220200ustar00rootroot00000000000000###################################################################### # # File: test/static/__init__.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### B2_Command_Line_Tool-3.19.1/test/static/test_licenses.py000066400000000000000000000043101461201031300231230ustar00rootroot00000000000000###################################################################### # # File: test/static/test_licenses.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from datetime import datetime from glob import glob from itertools import islice import pytest FIXER_CMD = "python test/static/test_licenses.py" LICENSE_HEADER_TMPL = """\ ###################################################################### # # File: {path} # # Copyright {year} Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """ def get_file_header_errors(file_path_glob: str) -> dict[str, str]: failed_files = {} for file in glob(file_path_glob, recursive=True): if file.startswith('build/'): # built files naturally have a different file path than source files continue with open(file) as fd: file = file.replace( '\\', '/' ) # glob('**/*.py') on Windows returns "b2\console_tool.py" (wrong slash) head = ''.join(islice(fd, 9)) if 'All Rights Reserved' not in head: failed_files[file] = 'Missing "All Rights Reserved" in the header' elif file not in head: failed_files[file] = 'Wrong file name in the header' return failed_files def test_files_headers(): failed_files = get_file_header_errors('**/*.py') if failed_files: error_msg = '; '.join(f'{path}:{error}' for path, error in failed_files.items()) pytest.fail(f'Bad file headers in files (you may want to run {FIXER_CMD!r}): {error_msg}') def insert_header(file_path: str): with open(file_path, 'r+') as fd: content = fd.read() fd.seek(0) fd.write(LICENSE_HEADER_TMPL.format( path=file_path, year=datetime.now().year, )) fd.write(content) def _main(): failed_files = get_file_header_errors('**/*.py') for filepath in failed_files: insert_header(filepath) if __name__ == '__main__': _main() B2_Command_Line_Tool-3.19.1/test/unit/000077500000000000000000000000001461201031300173775ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/unit/__init__.py000066400000000000000000000006061461201031300215120ustar00rootroot00000000000000###################################################################### # # File: test/unit/__init__.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """ B2 CLI tests This package contains all test that do not need to interact with remote server. """ B2_Command_Line_Tool-3.19.1/test/unit/_cli/000077500000000000000000000000001461201031300203055ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/unit/_cli/__init__.py000066400000000000000000000004451461201031300224210ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/__init__.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### B2_Command_Line_Tool-3.19.1/test/unit/_cli/fixtures/000077500000000000000000000000001461201031300221565ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/unit/_cli/fixtures/__init__.py000066400000000000000000000004561461201031300242740ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/fixtures/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### B2_Command_Line_Tool-3.19.1/test/unit/_cli/fixtures/dummy_command.py000077500000000000000000000011741461201031300253670ustar00rootroot00000000000000#!/usr/bin/env python ###################################################################### # # File: test/unit/_cli/fixtures/dummy_command.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import argparse def main(): parser = argparse.ArgumentParser(description="Dummy command") parser.add_argument("--foo", help="foo help") parser.add_argument("--bar", help="bar help") args = parser.parse_args() print(args.foo) print(args.bar) if __name__ == "__main__": main() B2_Command_Line_Tool-3.19.1/test/unit/_cli/fixtures/module_loading_b2sdk.py000066400000000000000000000006631461201031300266040ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/fixtures/module_loading_b2sdk.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # This is a helper module for test_autocomplete_cache.py from b2sdk.v2 import B2Api # noqa def function(): pass B2_Command_Line_Tool-3.19.1/test/unit/_cli/test_autocomplete_cache.py000066400000000000000000000240631461201031300255470ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/test_autocomplete_cache.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### # Most of the tests in this module are running in a forked process # because argcomplete and autocomplete_cache mess with global state, # making the argument parser unusable for other tests. from __future__ import annotations import contextlib import importlib import io import os import pickle import sys from typing import Any import argcomplete import pytest import b2._internal._cli.argcompleters import b2._internal.arg_parser import b2._internal.console_tool from b2._internal._cli import autocomplete_cache # We can't use pytest.mark.skipif to skip forked tests because with pytest-forked, # there is an attempt to fork even if the test is marked as skipped. # See https://github.com/pytest-dev/pytest-forked/issues/44 if sys.platform == "win32": forked = pytest.mark.skip(reason="Tests can't be run forked on windows") else: forked = pytest.mark.forked class Exit: """A mocked exit method callable. Instead of actually exiting, it just stores the exit code and returns.""" code: int | None @property def success(self): return self.code == 0 @property def empty(self): return self.code is None def __init__(self): self.code = None def __call__(self, n: int): self.code = n @pytest.fixture def autocomplete_runner(monkeypatch, b2_cli): def fdopen(fd, *args, **kwargs): # argcomplete package tries to open fd 9 for debugging which causes # pytest to later raise errors about bad file descriptors. if fd == 9: return sys.stderr return os.fdopen(fd, *args, **kwargs) @contextlib.contextmanager def runner(command: str): with monkeypatch.context() as m: m.setenv('COMP_LINE', command) m.setenv('COMP_POINT', str(len(command))) m.setenv('_ARGCOMPLETE_IFS', ' ') m.setenv('_ARGCOMPLETE', '1') m.setattr('os.fdopen', fdopen) def _get_b2api_for_profile(profile: str): return b2_cli.b2_api m.setattr('b2._internal._cli.b2api._get_b2api_for_profile', _get_b2api_for_profile) yield return runner def argcomplete_result(): parser = b2._internal.console_tool.B2.create_parser() exit, output = Exit(), io.StringIO() argcomplete.autocomplete(parser, exit_method=exit, output_stream=output) return exit.code, output.getvalue() def cached_complete_result(cache: autocomplete_cache.AutocompleteCache, raise_exc: bool = True): exit, output = Exit(), io.StringIO() cache.autocomplete_from_cache( uncached_args={ 'exit_method': exit, 'output_stream': output }, raise_exc=raise_exc ) return exit.code, output.getvalue() def uncached_complete_result(cache: autocomplete_cache.AutocompleteCache): exit, output = Exit(), io.StringIO() parser = b2._internal.console_tool.B2.create_parser() cache.cache_and_autocomplete( parser, uncached_args={ 'exit_method': exit, 'output_stream': output } ) return exit.code, output.getvalue() @forked def test_complete_main_command(autocomplete_runner, tmp_path): cache = autocomplete_cache.AutocompleteCache( tracker=autocomplete_cache.VersionTracker(), store=autocomplete_cache.HomeCachePickleStore(tmp_path), ) with autocomplete_runner('b2 '): exit, argcomplete_output = argcomplete_result() assert exit == 0 assert 'get-bucket' in argcomplete_output with autocomplete_runner('b2 '): exit, output = cached_complete_result(cache) # Nothing has been cached yet, we expect simple return, not an exit assert exit is None assert not output with autocomplete_runner('b2 '): exit, output = uncached_complete_result(cache) assert exit == 0 assert output == argcomplete_output with autocomplete_runner('b2 '): exit, output = cached_complete_result(cache) assert exit == 0 assert output == argcomplete_output @forked def test_complete_with_bucket_suggestions(autocomplete_runner, tmp_path, bucket, authorized_b2_cli): cache = autocomplete_cache.AutocompleteCache( tracker=autocomplete_cache.VersionTracker(), store=autocomplete_cache.HomeCachePickleStore(tmp_path), ) with autocomplete_runner('b2 get-bucket '): exit, argcomplete_output = argcomplete_result() assert exit == 0 assert bucket in argcomplete_output exit, output = uncached_complete_result(cache) assert exit == 0 assert output == argcomplete_output exit, output = cached_complete_result(cache) assert exit == 0 assert output == argcomplete_output @forked def test_complete_with_escaped_control_characters( autocomplete_runner, tmp_path, bucket, uploaded_file_with_control_chars, authorized_b2_cli ): cc_file_name = uploaded_file_with_control_chars['fileName'] escaped_cc_file_name = uploaded_file_with_control_chars['escapedFileName'] cache = autocomplete_cache.AutocompleteCache( tracker=autocomplete_cache.VersionTracker(), store=autocomplete_cache.HomeCachePickleStore(tmp_path), ) with autocomplete_runner(f'b2 hide-file {bucket} '): exit, argcomplete_output = argcomplete_result() assert exit == 0 assert escaped_cc_file_name in argcomplete_output assert cc_file_name not in argcomplete_output exit, output = uncached_complete_result(cache) assert exit == 0 assert output == argcomplete_output exit, output = cached_complete_result(cache) assert exit == 0 assert output == argcomplete_output @forked def test_complete_with_file_suggestions( autocomplete_runner, tmp_path, bucket, uploaded_file, authorized_b2_cli ): file_name = uploaded_file['fileName'] cache = autocomplete_cache.AutocompleteCache( tracker=autocomplete_cache.VersionTracker(), store=autocomplete_cache.HomeCachePickleStore(tmp_path), ) with autocomplete_runner(f'b2 hide-file {bucket} '): exit, argcomplete_output = argcomplete_result() assert exit == 0 assert file_name in argcomplete_output exit, output = cached_complete_result(cache) assert exit is None assert output == '' exit, output = uncached_complete_result(cache) assert exit == 0 assert output == argcomplete_output exit, output = cached_complete_result(cache) assert exit == 0 assert output == argcomplete_output @forked def test_complete_with_file_uri_suggestions( autocomplete_runner, tmp_path, bucket, uploaded_file, authorized_b2_cli ): file_name = uploaded_file['fileName'] cache = autocomplete_cache.AutocompleteCache( tracker=autocomplete_cache.VersionTracker(), store=autocomplete_cache.HomeCachePickleStore(tmp_path), ) with autocomplete_runner(f'b2 download-file b2://{bucket}/'): exit, argcomplete_output = argcomplete_result() assert exit == 0 assert file_name in argcomplete_output exit, output = uncached_complete_result(cache) assert exit == 0 assert output == argcomplete_output exit, output = cached_complete_result(cache) assert exit == 0 assert output == argcomplete_output def test_pickle_store(tmp_path): dir = tmp_path store = autocomplete_cache.HomeCachePickleStore(dir) store.set_pickle('test_1', b'test_data_1') assert store.get_pickle('test_1') == b'test_data_1' assert store.get_pickle('test_2') is None assert len(list(dir.glob('**'))) == 1 store.set_pickle('test_2', b'test_data_2') assert store.get_pickle('test_2') == b'test_data_2' assert store.get_pickle('test_1') is None assert len(list(dir.glob('**'))) == 1 class Unpickler(pickle.Unpickler): """This Unpickler will raise an exception if loading the pickled object imports any b2sdk module.""" _modules_to_load: set[str] def load(self): self._modules_to_load = set() b2_modules = [module for module in sys.modules if 'b2sdk' in module] for key in b2_modules: del sys.modules[key] result = super().load() for module in self._modules_to_load: importlib.import_module(module) importlib.reload(sys.modules[module]) if any('b2sdk' in module for module in sys.modules): raise RuntimeError("Loading the pickled object imported b2sdk module") return result def find_class(self, module: str, name: str) -> Any: self._modules_to_load.add(module) return super().find_class(module, name) def unpickle(data: bytes) -> Any: """Unpickling function that raises RuntimeError if unpickled object depends on b2sdk.""" return Unpickler(io.BytesIO(data)).load() def test_unpickle(): """This tests ensures that Unpickler works as expected: prevents successful unpickling of objects that depend on loading modules from b2sdk.""" from .fixtures.module_loading_b2sdk import function pickled = pickle.dumps(function) with pytest.raises(RuntimeError): unpickle(pickled) @forked def test_that_autocomplete_cache_loading_does_not_load_b2sdk(autocomplete_runner, tmp_path): cache = autocomplete_cache.AutocompleteCache( tracker=autocomplete_cache.VersionTracker(), store=autocomplete_cache.HomeCachePickleStore(tmp_path), unpickle=unpickle, # using our unpickling function that fails if b2sdk is loaded ) with autocomplete_runner('b2 '): exit, uncached_output = uncached_complete_result(cache) assert exit == 0 assert 'get-bucket' in uncached_output exit, output = cached_complete_result(cache, raise_exc=True) assert (exit, output) == (0, uncached_output) B2_Command_Line_Tool-3.19.1/test/unit/_cli/test_autocomplete_install.py000066400000000000000000000056571461201031300261620ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/test_autocomplete_install.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import pathlib import shutil from test.helpers import skip_on_windows import pytest from b2._internal._cli.autocomplete_install import ( SHELL_REGISTRY, add_or_update_shell_section, ) section = "test_section" managed_by = "pytest" content = "test content" @pytest.fixture def test_file(tmp_path): yield tmp_path / "test_file.sh" def test_add_or_update_shell_section_new_section(test_file): test_file.write_text("# preexisting content\n\n") add_or_update_shell_section(test_file, section, managed_by, content) assert test_file.read_text() == f"""# preexisting content # >>> {section} >>> # This section is managed by {managed_by} . Manual edit may break automated updates. {content} # <<< {section} <<< """ def test_add_or_update_shell_section_existing_section(test_file): old_content = "old content" new_content = "new content" # Write the initial file with an existing section test_file.write_text( f"""# preexisting content # >>> {section} >>> # This section is managed by {managed_by} . Manual edit may break automated updates. {old_content} # <<< {section} <<< """ ) # Add the new content to the section add_or_update_shell_section(test_file, section, managed_by, new_content) assert test_file.read_text() == f"""# preexisting content # >>> {section} >>> # This section is managed by {managed_by} . Manual edit may break automated updates. {new_content} # <<< {section} <<< """ def test_add_or_update_shell_section_no_file(test_file): # Add the new content to the section, which should create the file add_or_update_shell_section(test_file, section, managed_by, content) assert test_file.read_text() == f""" # >>> {section} >>> # This section is managed by {managed_by} . Manual edit may break automated updates. {content} # <<< {section} <<< """ @pytest.fixture def dummy_command(homedir, monkeypatch, env): name = "dummy_command" bin_path = homedir / "bin" / name bin_path.parent.mkdir(parents=True, exist_ok=True) bin_path.symlink_to(pathlib.Path(__file__).parent / "fixtures" / f"{name}.py") monkeypatch.setenv("PATH", f"{homedir}/bin:{env['PATH']}") yield name @pytest.mark.parametrize("shell", ["bash", "zsh", "fish"]) @skip_on_windows def test_autocomplete_installer(homedir, env, shell, caplog, dummy_command): caplog.set_level(10) shell_installer = SHELL_REGISTRY.get(shell, prog=dummy_command) shell_bin = shutil.which(shell) if shell_bin is None: pytest.skip(f"{shell} is not installed") assert shell_installer.is_enabled() is False shell_installer.install() assert shell_installer.is_enabled() is True B2_Command_Line_Tool-3.19.1/test/unit/_cli/test_obj_dumps.py000066400000000000000000000041121461201031300236760ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/test_obj_dumps.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from io import StringIO import pytest from b2._internal._cli.obj_dumps import readable_yaml_dump # Test cases as tuples: (input_data, expected_output) test_cases = [ ({"key": "value"}, "key: value\n"), ([{"a": 1, "b": 2}], "- a: 1\n b: 2\n"), ([1, 2, "false"], "- 1\n- 2\n- 'false'\n"), ({"true": True, "null": None}, "'null': null\n'true': true\n"), ([1., 0.567], "- 1.0\n- 0.567\n"), ([''], "- ''\n"), ( # make sure id and name are first, rest should be sorted alphabetically [ {"b": 2, "a": 1, "name": 4, "id": 3}, ], "- id: 3\n name: 4\n a: 1\n b: 2\n", ), ( # nested data [ { "name": "John Doe", "age": 30, "addresses": [ { "street": "123 Elm St", "city": "Somewhere", }, { "street": "456 Oak St", }, ], "address": { "street": "789 Pine St", "city": "Anywhere", "zip": "67890", }, } ], ( "- name: John Doe\n" " address: \n" " city: Anywhere\n" " street: 789 Pine St\n" " zip: '67890'\n" " addresses: \n" " - city: Somewhere\n" " street: 123 Elm St\n" " - street: 456 Oak St\n" " age: 30\n" ), ), ] @pytest.mark.parametrize("input_data,expected", test_cases) def test_readable_yaml_dump(input_data, expected): output = StringIO() readable_yaml_dump(input_data, output) assert output.getvalue() == expected B2_Command_Line_Tool-3.19.1/test/unit/_cli/test_obj_loads.py000066400000000000000000000021521461201031300236520ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/test_obj_loads.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import argparse import pytest from b2._internal._cli.obj_loads import validated_loads @pytest.mark.parametrize( "input_, expected_val", [ # json ('{"a": 1}', { "a": 1 }), ('{"a": 1, "b": 2}', { "a": 1, "b": 2 }), ('{"a": 1, "b": 2, "c": 3}', { "a": 1, "b": 2, "c": 3 }), ], ) def test_validated_loads(input_, expected_val): assert validated_loads(input_) == expected_val @pytest.mark.parametrize( "input_, error_msg", [ # not valid json nor yaml ("{", "'{' is not a valid JSON value"), ], ) def test_validated_loads__invalid_syntax(input_, error_msg): with pytest.raises(argparse.ArgumentTypeError, match=error_msg): validated_loads(input_) B2_Command_Line_Tool-3.19.1/test/unit/_cli/test_shell.py000066400000000000000000000007531461201031300230320ustar00rootroot00000000000000###################################################################### # # File: test/unit/_cli/test_shell.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os from unittest import mock from b2._internal._cli import shell @mock.patch.dict(os.environ, {"SHELL": "/bin/bash"}) def test_detect_shell(): assert shell.detect_shell() == 'bash' B2_Command_Line_Tool-3.19.1/test/unit/_utils/000077500000000000000000000000001461201031300206765ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/unit/_utils/test_uri.py000066400000000000000000000072451461201031300231160ustar00rootroot00000000000000###################################################################### # # File: test/unit/_utils/test_uri.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from pathlib import Path import pytest from b2._internal._utils.uri import B2URI, B2FileIdURI, parse_uri class TestB2URI: def test__str__(self): uri = B2URI(bucket_name="testbucket", path="path/to/file") assert str(uri) == "b2://testbucket/path/to/file" @pytest.mark.parametrize( "path, expected", [ ("", True), ("path/", True), ("path/subpath", None), ], ) def test_is_dir(self, path, expected): assert B2URI("bucket", path).is_dir() is expected def test__bucket_uris_are_normalized(self): alternatives = [ B2URI("bucket"), B2URI("bucket", ""), ] assert len(set(alternatives)) == 1 assert {str(uri) for uri in alternatives} == {"b2://bucket/"} # normalized @pytest.mark.parametrize( "path, expected_uri_str", [ ("", "b2://bucket/"), ("path/", "b2://bucket/path/"), ("path/subpath", "b2://bucket/path/subpath"), ], ) def test__normalization(self, path, expected_uri_str): assert str(B2URI("bucket", path)) == expected_uri_str assert str(B2URI("bucket", path)) == str(B2URI("bucket", path)) # normalized def test_b2fileuri_str(): uri = B2FileIdURI(file_id="file123") assert str(uri) == "b2id://file123" @pytest.mark.parametrize( "uri,expected", [ ("some/local/path", Path("some/local/path")), ("./some/local/path", Path("some/local/path")), ("b2://bucket", B2URI(bucket_name="bucket")), ("b2://bucket/", B2URI(bucket_name="bucket")), ("b2://bucket/path/to/dir/", B2URI(bucket_name="bucket", path="path/to/dir/")), ("b2id://file123", B2FileIdURI(file_id="file123")), ("b2://bucket/wild[card]", B2URI(bucket_name="bucket", path="wild[card]")), ("b2://bucket/wild?card", B2URI(bucket_name="bucket", path="wild?card")), ("b2://bucket/special#char", B2URI(bucket_name="bucket", path="special#char")), ], ) def test_parse_uri(uri, expected): assert parse_uri(uri) == expected def test_parse_uri__allow_all_buckets(): assert parse_uri("b2://", allow_all_buckets=True) == B2URI("") with pytest.raises(ValueError) as exc_info: parse_uri("b2:///", allow_all_buckets=True) assert "Invalid B2 URI: all buckets URI doesn't allow non-empty path, but '/' was provided" == str( exc_info.value ) @pytest.mark.parametrize( "uri, expected_exception_message", [ ("", "URI cannot be empty"), # Test cases for invalid B2 URIs (missing netloc part) ("b2://", "Invalid B2 URI: 'b2://'"), ("b2id://", "Invalid B2 URI: 'b2id://'"), # Test cases for B2 URIs with credentials ( "b2://user@password:bucket/path", "Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI", ), ( "b2id://user@password:file123", "Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI", ), # Test cases for unsupported URI schemes ("unknown://bucket/path", "Unsupported URI scheme: 'unknown'"), ], ) def test_parse_uri_exceptions(uri, expected_exception_message): with pytest.raises(ValueError) as exc_info: parse_uri(uri) assert expected_exception_message in str(exc_info.value) B2_Command_Line_Tool-3.19.1/test/unit/conftest.py000066400000000000000000000121511461201031300215760ustar00rootroot00000000000000###################################################################### # # File: test/unit/conftest.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import importlib import os from unittest import mock import pytest from b2sdk.v2 import REALM_URLS from b2._internal.console_tool import _TqdmCloser from b2._internal.version_listing import CLI_VERSIONS, UNSTABLE_CLI_VERSION, get_int_version from ..helpers import b2_uri_args_v3, b2_uri_args_v4 from .helpers import RunOrDieExecutor from .test_console_tool import BaseConsoleToolTest @pytest.hookimpl def pytest_addoption(parser): parser.addoption( '--cli', default=UNSTABLE_CLI_VERSION, choices=CLI_VERSIONS, help='version of the CLI', ) @pytest.hookimpl def pytest_report_header(config): int_version = get_int_version(config.getoption('--cli')) return f"b2 apiver: {int_version}" @pytest.fixture(scope='session') def cli_version(request) -> str: return request.config.getoption('--cli') @pytest.fixture def homedir(tmp_path_factory): yield tmp_path_factory.mktemp("test_homedir") @pytest.fixture def env(homedir, monkeypatch): """Get ENV for running b2 command from shell level.""" monkeypatch.setenv("HOME", str(homedir)) monkeypatch.delenv("XDG_CONFIG_HOME", raising=False) monkeypatch.setenv("SHELL", "/bin/bash") # fix for running under github actions if "TERM" not in os.environ: monkeypatch.setenv("TERM", "xterm") yield os.environ @pytest.fixture(scope='session') def console_tool_class(cli_version): # Ensures import of the correct library to handle all the tests. module = importlib.import_module(f'b2._internal.{cli_version}.registry') return module.ConsoleTool @pytest.fixture(scope='class') def unit_test_console_tool_class(request, console_tool_class): # Ensures that the unittest class uses the correct console tool version. request.cls.console_tool_class = console_tool_class @pytest.fixture(autouse=True, scope='session') def mock_realm_urls(): with mock.patch.dict(REALM_URLS, {'production': 'http://production.example.com'}): yield @pytest.fixture def bg_executor(): """Executor for running background tasks in tests""" with RunOrDieExecutor() as executor: yield executor @pytest.fixture(autouse=True) def disable_tqdm_closer_cleanup(): with mock.patch.object(_TqdmCloser, '__exit__'): yield class ConsoleToolTester(BaseConsoleToolTest): def authorize(self): self._authorize_account() def run(self, *args, **kwargs): return self._run_command(*args, **kwargs) @pytest.fixture(scope="session", autouse=True) def mock_signal(): with mock.patch('signal.signal'): yield @pytest.fixture def b2_cli(console_tool_class): cli_tester = ConsoleToolTester() # Because of the magic the pytest does on importing and collecting fixtures, # ConsoleToolTester is not injected with the `unit_test_console_tool_class` # despite having it as a parent. # Thus, we inject it manually here. cli_tester.console_tool_class = console_tool_class cli_tester.setUp() yield cli_tester cli_tester.tearDown() @pytest.fixture def authorized_b2_cli(b2_cli): b2_cli.authorize() yield b2_cli @pytest.fixture def bucket_info(b2_cli, authorized_b2_cli): bucket_name = "my-bucket" bucket_id = "bucket_0" b2_cli.run(['create-bucket', bucket_name, 'allPublic'], expected_stdout=f'{bucket_id}\n') return { 'bucketName': bucket_name, 'bucketId': bucket_id, } @pytest.fixture def bucket(bucket_info): return bucket_info['bucketName'] @pytest.fixture def local_file(tmp_path): """Set up a test file and return its path.""" filename = 'file1.txt' content = 'hello world' local_file = tmp_path / filename local_file.write_text(content) mod_time = 1500111222 os.utime(local_file, (mod_time, mod_time)) return local_file @pytest.fixture def uploaded_file_with_control_chars(b2_cli, bucket_info, local_file): filename = '\u009bC\u009bC\u009bIfile.txt' b2_cli.run(['upload-file', bucket_info["bucketName"], str(local_file), filename]) return { 'bucket': bucket_info["bucketName"], 'bucketId': bucket_info["bucketId"], 'fileName': filename, 'escapedFileName': '\\\\x9bC\\\\x9bC\\\\x9bIfile.txt', 'fileId': '1111', 'content': local_file.read_text(), } @pytest.fixture def uploaded_file(b2_cli, bucket_info, local_file): filename = 'file1.txt' b2_cli.run(['upload-file', '--quiet', bucket_info["bucketName"], str(local_file), filename]) return { 'bucket': bucket_info["bucketName"], 'bucketId': bucket_info["bucketId"], 'fileName': filename, 'fileId': '9999', 'content': local_file.read_text(), } @pytest.fixture(scope='class') def b2_uri_args(apiver_int, request): if apiver_int >= 4: fn = b2_uri_args_v4 else: fn = b2_uri_args_v3 request.cls.b2_uri_args = staticmethod(fn) B2_Command_Line_Tool-3.19.1/test/unit/console_tool/000077500000000000000000000000001461201031300220765ustar00rootroot00000000000000B2_Command_Line_Tool-3.19.1/test/unit/console_tool/__init__.py000066400000000000000000000005301461201031300242050ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/__init__.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### """Tests for the console_tool commands.""" B2_Command_Line_Tool-3.19.1/test/unit/console_tool/conftest.py000066400000000000000000000016471461201031300243050ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/conftest.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os import sys import pytest import b2._internal.console_tool @pytest.fixture def cwd_path(tmp_path): """Set up a test directory and return its path.""" prev_cwd = os.getcwd() os.chdir(tmp_path) yield tmp_path os.chdir(prev_cwd) @pytest.fixture def b2_cli_log_fix(caplog): caplog.set_level(0) # prevent pytest from blocking logs b2._internal.console_tool.logger.setLevel(0) # reset logger level to default @pytest.fixture def mock_stdin(monkeypatch): out_, in_ = os.pipe() monkeypatch.setattr(sys, 'stdin', os.fdopen(out_)) in_f = open(in_, 'w') yield in_f in_f.close() B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_authorize_account.py000066400000000000000000000106301461201031300272350ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_authorize_account.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from unittest import mock import pytest from b2sdk.v2 import ALL_CAPABILITIES from b2._internal._cli.const import ( B2_APPLICATION_KEY_ENV_VAR, B2_APPLICATION_KEY_ID_ENV_VAR, B2_ENVIRONMENT_ENV_VAR, ) @pytest.fixture def b2_cli_is_authorized_afterwards(b2_cli): assert b2_cli.account_info.get_account_auth_token() is None yield b2_cli assert b2_cli.account_info.get_account_auth_token() is not None def test_authorize_with_bad_key(b2_cli): expected_stdout = "" expected_stderr = """ ERROR: unable to authorize account: Invalid authorization token. Server said: secret key is wrong (unauthorized) """ b2_cli._run_command( ["authorize-account", b2_cli.account_id, "bad-app-key"], expected_stdout, expected_stderr, 1, ) assert b2_cli.account_info.get_account_auth_token() is None @pytest.mark.parametrize( "command", [ "authorize-account", "authorize_account", ], ) def test_authorize_with_good_key(b2_cli, b2_cli_is_authorized_afterwards, command): assert b2_cli.account_info.get_account_auth_token() is None expected_stderr = """ """ b2_cli._run_command([command, b2_cli.account_id, b2_cli.master_key], None, expected_stderr, 0) assert b2_cli.account_info.get_account_auth_token() is not None def test_authorize_using_env_variables(b2_cli): assert b2_cli.account_info.get_account_auth_token() is None expected_stderr = """ """ with mock.patch.dict( "os.environ", { B2_APPLICATION_KEY_ID_ENV_VAR: b2_cli.account_id, B2_APPLICATION_KEY_ENV_VAR: b2_cli.master_key, }, ): b2_cli._run_command(["authorize-account"], None, expected_stderr, 0) assert b2_cli.account_info.get_account_auth_token() is not None @pytest.mark.parametrize( "flags,realm_url", [ ([], "http://production.example.com"), (["--debug-logs"], "http://production.example.com"), (["--environment", "http://custom.example.com"], "http://custom.example.com"), (["--environment", "production"], "http://production.example.com"), (["--dev"], "http://api.backblazeb2.xyz:8180"), (["--staging"], "https://api.backblaze.net"), ], ) def test_authorize_towards_realm( b2_cli, b2_cli_is_authorized_afterwards, flags, realm_url, cwd_path, b2_cli_log_fix ): expected_stderr = f"Using {realm_url}\n" if any(f != "--debug-logs" for f in flags) else "" b2_cli._run_command( ["authorize-account", *flags, b2_cli.account_id, b2_cli.master_key], None, expected_stderr, 0, ) log_path = cwd_path / "b2_cli.log" if "--debug-logs" in flags: assert f"Using {realm_url}\n" in log_path.read_text() else: assert not log_path.exists() def test_authorize_towards_custom_realm_using_env(b2_cli, b2_cli_is_authorized_afterwards): expected_stderr = """ Using http://custom2.example.com """ with mock.patch.dict( "os.environ", { B2_ENVIRONMENT_ENV_VAR: "http://custom2.example.com", }, ): b2_cli._run_command( ["authorize-account", b2_cli.account_id, b2_cli.master_key], None, expected_stderr, 0, ) def test_authorize_account_prints_account_info(b2_cli): expected_json = { 'accountAuthToken': 'auth_token_0', 'accountFilePath': None, 'accountId': 'account-0', 'allowed': { 'bucketId': None, 'bucketName': None, 'capabilities': sorted(ALL_CAPABILITIES), 'namePrefix': None, }, 'apiUrl': 'http://api.example.com', 'applicationKey': 'masterKey-0', 'applicationKeyId': 'account-0', 'downloadUrl': 'http://download.example.com', 'isMasterKey': True, 's3endpoint': 'http://s3.api.example.com' } b2_cli._run_command( ['authorize-account', b2_cli.account_id, b2_cli.master_key], expected_stderr='', expected_status=0, expected_json_in_stdout=expected_json, ) B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_download_file.py000066400000000000000000000150421461201031300263170ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_download_file.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os import pathlib from test.helpers import skip_on_windows import pytest EXPECTED_STDOUT_DOWNLOAD = ''' File name: file1.txt File id: 9999 Output file path: {output_path} File size: 11 Content type: b2/x-auto Content sha1: 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed Encryption: none Retention: none Legal hold: INFO src_last_modified_millis: 1500111222000 Checksum matches Download finished ''' @pytest.mark.parametrize( 'flag,expected_stdout', [ ('--no-progress', EXPECTED_STDOUT_DOWNLOAD), ('-q', ''), ('--quiet', ''), ] ) def test_download_file_by_uri__flag_support(b2_cli, uploaded_file, tmp_path, flag, expected_stdout): output_path = tmp_path / 'output.txt' b2_cli.run( ['download-file', flag, 'b2id://9999', str(output_path)], expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()) ) assert output_path.read_text() == uploaded_file['content'] @pytest.mark.parametrize('b2_uri', [ 'b2://my-bucket/file1.txt', 'b2id://9999', ]) def test_download_file_by_uri__b2_uri_support(b2_cli, uploaded_file, tmp_path, b2_uri): output_path = tmp_path / 'output.txt' b2_cli.run( ['download-file', b2_uri, str(output_path)], expected_stdout=EXPECTED_STDOUT_DOWNLOAD.format( output_path=pathlib.Path(output_path).resolve() ) ) assert output_path.read_text() == uploaded_file['content'] @pytest.mark.parametrize( 'flag,expected_stdout', [ ('--no-progress', EXPECTED_STDOUT_DOWNLOAD), ('-q', ''), ('--quiet', ''), ] ) def test_download_file_by_name(b2_cli, local_file, uploaded_file, tmp_path, flag, expected_stdout): output_path = tmp_path / 'output.txt' b2_cli.run( [ 'download-file-by-name', uploaded_file['bucket'], uploaded_file['fileName'], str(output_path) ], expected_stdout=EXPECTED_STDOUT_DOWNLOAD.format( output_path=pathlib.Path(output_path).resolve() ), expected_stderr= 'WARNING: download-file-by-name command is deprecated. Use download-file instead.\n', ) assert output_path.read_text() == uploaded_file['content'] @pytest.mark.parametrize( 'flag,expected_stdout', [ ('--no-progress', EXPECTED_STDOUT_DOWNLOAD), ('-q', ''), ('--quiet', ''), ] ) def test_download_file_by_id(b2_cli, uploaded_file, tmp_path, flag, expected_stdout): output_path = tmp_path / 'output.txt' b2_cli.run( ['download-file-by-id', flag, '9999', str(output_path)], expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()), expected_stderr= 'WARNING: download-file-by-id command is deprecated. Use download-file instead.\n', ) assert output_path.read_text() == uploaded_file['content'] @skip_on_windows(reason='os.mkfifo is not supported on Windows') def test_download_file_by_name__named_pipe( b2_cli, local_file, uploaded_file, tmp_path, bg_executor ): output_path = tmp_path / 'output.txt' os.mkfifo(output_path) output_string = None def reader(): nonlocal output_string output_string = output_path.read_text() reader_future = bg_executor.submit(reader) b2_cli.run( [ 'download-file-by-name', '--no-progress', uploaded_file['bucket'], uploaded_file['fileName'], str(output_path) ], expected_stdout=EXPECTED_STDOUT_DOWNLOAD.format( output_path=pathlib.Path(output_path).resolve() ), expected_stderr= 'WARNING: download-file-by-name command is deprecated. Use download-file instead.\n', ) reader_future.result(timeout=1) assert output_string == uploaded_file['content'] @pytest.fixture def uploaded_stdout_txt(b2_cli, bucket, local_file, tmp_path): local_file.write_text('non-mocked /dev/stdout test ignore me') b2_cli.run(['upload-file', bucket, str(local_file), 'stdout.txt']) return { 'bucket': bucket, 'fileName': 'stdout.txt', 'content': local_file.read_text(), } def test_download_file_by_name__to_stdout_by_alias( b2_cli, bucket, uploaded_stdout_txt, tmp_path, capfd ): """Test download_file_by_name stdout alias support""" b2_cli.run( ['download-file-by-name', '--no-progress', bucket, uploaded_stdout_txt['fileName'], '-'], expected_stderr= 'WARNING: download-file-by-name command is deprecated. Use download-file instead.\n', ) assert capfd.readouterr().out == uploaded_stdout_txt['content'] assert not pathlib.Path('-').exists() def test_cat__b2_uri(b2_cli, bucket, uploaded_stdout_txt, tmp_path, capfd): """Test download_file_by_name stdout alias support""" b2_cli.run(['cat', '--no-progress', f"b2://{bucket}/{uploaded_stdout_txt['fileName']}"],) assert capfd.readouterr().out == uploaded_stdout_txt['content'] def test_cat__b2_uri__invalid(b2_cli, capfd): b2_cli.run( ['cat', "nothing/meaningful"], expected_stderr=None, expected_status=2, ) assert "argument B2_URI: Unsupported URI scheme: ''" in capfd.readouterr().err def test_cat__b2_uri__not_a_file(b2_cli, bucket, capfd): b2_cli.run( ['cat', "b2://bucket/dir/subdir/"], expected_stderr=None, expected_status=2, ) assert "argument B2_URI: B2 URI pointing to a file-like object is required" in capfd.readouterr( ).err def test_cat__b2id_uri(b2_cli, bucket, uploaded_stdout_txt, tmp_path, capfd): """Test download_file_by_name stdout alias support""" b2_cli.run(['cat', '--no-progress', "b2id://9999"],) assert capfd.readouterr().out == uploaded_stdout_txt['content'] def test__download_file__threads(b2_cli, local_file, uploaded_file, tmp_path): num_threads = 13 output_path = tmp_path / 'output.txt' b2_cli.run( [ 'download-file', '--no-progress', '--threads', str(num_threads), 'b2://my-bucket/file1.txt', str(output_path) ] ) assert output_path.read_text() == uploaded_file['content'] assert b2_cli.console_tool.api.services.download_manager.get_thread_pool_size() == num_threads B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_file_info.py000066400000000000000000000035131461201031300254430ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_file_info.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import pytest @pytest.fixture def uploaded_download_version(b2_cli, bucket_info, uploaded_file): return { "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": uploaded_file["fileId"], "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 11, "uploadTimestamp": 5000, } @pytest.fixture def uploaded_file_version(b2_cli, bucket_info, uploaded_file, uploaded_download_version): return { **uploaded_download_version, "accountId": b2_cli.account_id, "action": "upload", "bucketId": uploaded_file["bucketId"], } def test_get_file_info(b2_cli, uploaded_file_version): b2_cli.run( ["get-file-info", uploaded_file_version["fileId"]], expected_json_in_stdout=uploaded_file_version, expected_stderr='WARNING: get-file-info command is deprecated. Use file-info instead.\n', ) def test_file_info__b2_uri(b2_cli, bucket, uploaded_download_version): b2_cli.run( [ "file-info", f'b2://{bucket}/{uploaded_download_version["fileName"]}', ], expected_json_in_stdout=uploaded_download_version, ) def test_file_info__b2id_uri(b2_cli, uploaded_file_version): b2_cli.run( ["file-info", f'b2id://{uploaded_file_version["fileId"]}'], expected_json_in_stdout=uploaded_file_version, ) B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_get_url.py000066400000000000000000000033011461201031300251450ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_get_url.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import pytest @pytest.fixture def uploaded_file_url(bucket_info, uploaded_file): return f"http://download.example.com/file/{bucket_info['bucketName']}/{uploaded_file['fileName']}" @pytest.fixture def uploaded_file_url_by_id(uploaded_file): return f"http://download.example.com/b2api/v2/b2_download_file_by_id?fileId={uploaded_file['fileId']}" def test_make_url(b2_cli, uploaded_file, uploaded_file_url_by_id): b2_cli.run( ["make-url", uploaded_file["fileId"]], expected_stdout=f"{uploaded_file_url_by_id}\n", expected_stderr='WARNING: make-url command is deprecated. Use get-url instead.\n', ) def test_make_friendly_url(b2_cli, bucket, uploaded_file, uploaded_file_url): b2_cli.run( ["make-friendly-url", bucket, uploaded_file["fileName"]], expected_stdout=f"{uploaded_file_url}\n", expected_stderr='WARNING: make-friendly-url command is deprecated. Use get-url instead.\n', ) def test_get_url__b2_uri(b2_cli, bucket, uploaded_file, uploaded_file_url): b2_cli.run( [ "get-url", f'b2://{bucket}/{uploaded_file["fileName"]}', ], expected_stdout=f"{uploaded_file_url}\n", ) def test_get_url__b2id_uri(b2_cli, uploaded_file, uploaded_file_url_by_id): b2_cli.run( ["get-url", f'b2id://{uploaded_file["fileId"]}'], expected_stdout=f"{uploaded_file_url_by_id}\n", ) B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_help.py000066400000000000000000000023611461201031300244410ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_help.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import pytest @pytest.mark.parametrize( "flag, included, excluded", [ # --help shouldn't show deprecated commands ( "--help", [" b2 download-file ", "-h", "--help-all"], [" download-file-by-name ", "(DEPRECATED)"], ), # --help-all should show deprecated commands, but marked as deprecated ( "--help-all", ["(DEPRECATED) b2 download-file-by-name ", "-h", "--help-all"], [], ), ], ) def test_help(b2_cli, flag, included, excluded, capsys): b2_cli.run([flag], expected_stdout=None) out = capsys.readouterr().out found = set() for i in included: if i in out: found.add(i) for e in excluded: if e in out: found.add(e) assert found.issuperset(included), f"expected {included!r} in {out!r}" assert found.isdisjoint(excluded), f"expected {excluded!r} not in {out!r}" B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_install_autocomplete.py000066400000000000000000000023601461201031300277370ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_install_autocomplete.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import contextlib import shutil from test.helpers import skip_on_windows import pexpect import pytest @contextlib.contextmanager def pexpect_shell(shell_bin, env): p = pexpect.spawn(f"{shell_bin} -i", env=env, maxread=1000) p.setwinsize(100, 100) # required to see all suggestions in tests yield p p.close() @pytest.mark.parametrize("shell", ["bash", "zsh", "fish"]) @skip_on_windows def test_install_autocomplete(b2_cli, env, shell, monkeypatch): shell_bin = shutil.which(shell) if shell_bin is None: pytest.skip(f"{shell} is not installed") monkeypatch.setenv("SHELL", shell_bin) b2_cli.run( ["install-autocomplete"], expected_part_of_stdout=f"Autocomplete successfully installed for {shell}", ) with pexpect_shell(shell_bin, env=env) as pshell: pshell.send("b2 \t\t") pshell.expect_exact(["authorize-account", "download-file", "get-bucket"], timeout=30) B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_ls.py000066400000000000000000000040751461201031300241330ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_ls.py # # Copyright 2024 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### ###################################################################### # # File: test/unit/console_tool/test_download_file.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import pytest def test_ls__without_bucket_name(b2_cli, bucket_info): expected_output = "bucket_0 allPublic my-bucket\n" b2_cli.run(["ls"], expected_stdout=expected_output) b2_cli.run(["ls", "b2://"], expected_stdout=expected_output) def test_ls__without_bucket_name__json(b2_cli, bucket_info): expected_output = [ { "accountId": "account-0", "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultRetention": { "mode": None }, "defaultServerSideEncryption": { "mode": "none" }, "isFileLockEnabled": False, "lifecycleRules": [], "options": [], "replication": { "asReplicationDestination": None, "asReplicationSource": None, }, "revision": 1, } ] b2_cli.run(["ls", "--json"], expected_json_in_stdout=expected_output) b2_cli.run(["ls", "--json", "b2://"], expected_json_in_stdout=expected_output) @pytest.mark.parametrize("flag", ["--long", "--recursive", "--replication"]) def test_ls__without_bucket_name__option_not_supported(b2_cli, bucket_info, flag): b2_cli.run( ["ls", flag], expected_stderr=f"ERROR: Cannot use {flag} option without specifying a bucket name\n", expected_status=1, ) B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_notification_rules.py000066400000000000000000000157661461201031300274260ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_notification_rules.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import json import pytest @pytest.fixture() def bucket_notification_rule(b2_cli, bucket): rule = { "eventTypes": ["b2:ObjectCreated:*"], "isEnabled": True, "isSuspended": False, "name": "test-rule", "objectNamePrefix": "", "suspensionReason": "", "targetConfiguration": { "targetType": "webhook", "url": "https://example.com/webhook", }, } _, stdout, _ = b2_cli.run( [ "notification-rules", "create", "--json", f"b2://{bucket}", "test-rule", "--webhook-url", "https://example.com/webhook", "--event-type", "b2:ObjectCreated:*", ], ) actual_rule = json.loads(stdout) assert actual_rule == rule return actual_rule def test_notification_rules__list_all(b2_cli, bucket, bucket_notification_rule): _, stdout, _ = b2_cli.run([ "notification-rules", "list", f"b2://{bucket}", ]) assert ( stdout == f"""\ Notification rules for b2://{bucket}/ : - name: test-rule eventTypes: - b2:ObjectCreated:* isEnabled: true isSuspended: false objectNamePrefix: '' suspensionReason: '' targetConfiguration: targetType: webhook url: https://example.com/webhook """ ) def test_notification_rules__list_all_json(b2_cli, bucket, bucket_notification_rule): _, stdout, _ = b2_cli.run([ "notification-rules", "list", "--json", f"b2://{bucket}", ]) assert json.loads(stdout) == [bucket_notification_rule] def test_notification_rules__update(b2_cli, bucket, bucket_notification_rule): bucket_notification_rule["isEnabled"] = False _, stdout, _ = b2_cli.run( [ "notification-rules", "update", "--json", f"b2://{bucket}", bucket_notification_rule["name"], "--disable", "--custom-header", "X-Custom-Header=value=1", ], ) bucket_notification_rule["targetConfiguration"]["customHeaders"] = { "X-Custom-Header": "value=1" } assert json.loads(stdout) == bucket_notification_rule def test_notification_rules__update__no_such_rule(b2_cli, bucket, bucket_notification_rule): b2_cli.run( [ "notification-rules", "update", f"b2://{bucket}", f'{bucket_notification_rule["name"]}-unexisting', "--disable", ], expected_stderr=( "ERROR: rule with name 'test-rule-unexisting' does not exist on bucket " "'my-bucket', available rules: ['test-rule']\n" ), expected_status=1, ) def test_notification_rules__update__custom_header_malformed( b2_cli, bucket, bucket_notification_rule ): bucket_notification_rule["isEnabled"] = False _, stdout, _ = b2_cli.run( [ "notification-rules", "update", "--json", f"b2://{bucket}", bucket_notification_rule["name"], "--disable", "--custom-header", "X-Custom-Header: value", ], ) bucket_notification_rule["targetConfiguration"]["customHeaders"] = { "X-Custom-Header: value": "" } assert json.loads(stdout) == bucket_notification_rule def test_notification_rules__delete(b2_cli, bucket, bucket_notification_rule): _, stdout, _ = b2_cli.run( [ "notification-rules", "delete", f"b2://{bucket}", bucket_notification_rule["name"], ], ) assert stdout == "Rule 'test-rule' has been deleted from b2://my-bucket/\n" def test_notification_rules__delete_no_such_rule(b2_cli, bucket, bucket_notification_rule): b2_cli.run( [ "notification-rules", "delete", f"b2://{bucket}", f'{bucket_notification_rule["name"]}-unexisting', ], expected_stderr=( "ERROR: no such rule to delete: 'test-rule-unexisting', available rules: ['test-rule'];" " No rules have been deleted.\n" ), expected_status=1, ) @pytest.mark.parametrize( "args,expected_stdout", [ (["-q"], ""), ([], "No notification rules for b2://my-bucket/\n"), (["--json"], "[]\n"), ], ) def test_notification_rules__no_rules(b2_cli, bucket, args, expected_stdout): b2_cli.run( ["notification-rules", "list", f"b2://{bucket}", *args], expected_stdout=expected_stdout, ) def test_notification_rules__disable_enable(b2_cli, bucket, bucket_notification_rule): _, stdout, _ = b2_cli.run( [ "notification-rules", "disable", "--json", f"b2://{bucket}", bucket_notification_rule["name"], ], ) assert json.loads(stdout) == {**bucket_notification_rule, "isEnabled": False} _, stdout, _ = b2_cli.run( [ "notification-rules", "enable", "--json", f"b2://{bucket}", bucket_notification_rule["name"], ], ) assert json.loads(stdout) == {**bucket_notification_rule, "isEnabled": True} @pytest.mark.parametrize( "command", ["disable", "enable"], ) def test_notification_rules__disable_enable__no_such_rule( b2_cli, bucket, bucket_notification_rule, command ): b2_cli.run( [ "notification-rules", command, f"b2://{bucket}", f'{bucket_notification_rule["name"]}-unexisting', ], expected_stderr=( "ERROR: rule with name 'test-rule-unexisting' does not exist on bucket " "'my-bucket', available rules: ['test-rule']\n" ), expected_status=1, ) def test_notification_rules__sign_secret(b2_cli, bucket, bucket_notification_rule): b2_cli.run( [ "notification-rules", "update", "--json", f"b2://{bucket}", bucket_notification_rule["name"], "--sign-secret", "new-secret", ], expected_status=2, ) _, stdout, _ = b2_cli.run( [ "notification-rules", "update", "--json", f"b2://{bucket}", bucket_notification_rule["name"], "--sign-secret", "7" * 32, ], ) bucket_notification_rule["targetConfiguration"]["hmacSha256SigningSecret"] = "7" * 32 assert json.loads(stdout) == bucket_notification_rule assert json.loads(b2_cli.run(["notification-rules", "list", "--json", f"b2://{bucket}"],)[1] ) == [bucket_notification_rule] B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_upload_file.py000066400000000000000000000125301461201031300257730ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_upload_file.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os from test.helpers import skip_on_windows import b2 def test_upload_file__file_info_src_last_modified_millis_and_headers(b2_cli, bucket, tmpdir): """Test upload_file supports manually specifying file info src_last_modified_millis""" filename = 'file1.txt' content = 'hello world' local_file1 = tmpdir.join('file1.txt') local_file1.write(content) expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "fileInfo": { "b2-cache-control": "max-age=3600", "b2-expires": "Thu, 01 Dec 2050 16:00:00 GMT", "b2-content-language": "en", "b2-content-disposition": "attachment", "b2-content-encoding": "gzip", "src_last_modified_millis": "1" }, "fileName": filename, "size": len(content), } b2_cli.run( [ 'upload-file', '--no-progress', '--info=src_last_modified_millis=1', 'my-bucket', '--cache-control', 'max-age=3600', '--expires', 'Thu, 01 Dec 2050 16:00:00 GMT', '--content-language', 'en', '--content-disposition', 'attachment', '--content-encoding', 'gzip', str(local_file1), 'file1.txt' ], expected_json_in_stdout=expected_json, remove_version=True, ) @skip_on_windows def test_upload_file__named_pipe(b2_cli, bucket, tmpdir, bg_executor): """Test upload_file supports named pipes""" filename = 'named_pipe.txt' content = 'hello world' local_file1 = tmpdir.join('file1.txt') os.mkfifo(str(local_file1)) writer = bg_executor.submit( local_file1.write, content ) # writer will block until content is read expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileName": filename, "size": len(content), } b2_cli.run( ['upload-file', '--no-progress', 'my-bucket', str(local_file1), filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) writer.result(timeout=1) def test_upload_file__hyphen_file_instead_of_stdin(b2_cli, bucket, tmpdir, monkeypatch): """Test upload_file will upload file named `-` instead of stdin by default""" # TODO remove this in v4 assert b2.__version__ < '4', "`-` filename should not be supported in next major version of CLI" filename = 'stdin.txt' content = "I'm very rare creature, a file named '-'" monkeypatch.chdir(str(tmpdir)) source_file = tmpdir.join('-') source_file.write(content) expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "contentSha1": "ab467567b98216a255f77aef08aa2c418073d974", "fileName": filename, "size": len(content), } b2_cli.run( ['upload-file', '--no-progress', 'my-bucket', '-', filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, expected_stderr= "WARNING: Filename `-` won't be supported in the future and will always be treated as stdin alias.\n", ) def test_upload_file__stdin(b2_cli, bucket, tmpdir, mock_stdin): """Test upload_file stdin alias support""" content = "stdin input" filename = 'stdin.txt' expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", "fileName": filename, "size": len(content), } mock_stdin.write(content) mock_stdin.close() b2_cli.run( ['upload-file', '--no-progress', 'my-bucket', '-', filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) def test_upload_file__threads_setting(b2_cli, bucket, tmp_path): """Test upload_file supports setting number of threads""" num_threads = 66 filename = 'file1.txt' content = 'hello world' local_file1 = tmp_path / 'file1.txt' local_file1.write_text(content) expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "fileInfo": { "src_last_modified_millis": f"{local_file1.stat().st_mtime_ns // 1000000}" }, "fileName": filename, "size": len(content), } b2_cli.run( [ 'upload-file', '--no-progress', 'my-bucket', '--threads', str(num_threads), str(local_file1), 'file1.txt' ], expected_json_in_stdout=expected_json, remove_version=True, ) assert b2_cli.console_tool.api.services.upload_manager.get_thread_pool_size() == num_threads B2_Command_Line_Tool-3.19.1/test/unit/console_tool/test_upload_unbound_stream.py000066400000000000000000000103371461201031300301040ustar00rootroot00000000000000###################################################################### # # File: test/unit/console_tool/test_upload_unbound_stream.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import os from test.helpers import skip_on_windows from b2sdk.v2 import DEFAULT_MIN_PART_SIZE @skip_on_windows def test_upload_unbound_stream__named_pipe(b2_cli, bucket, tmpdir, bg_executor): """Test upload_unbound_stream supports named pipes""" filename = 'named_pipe.txt' content = 'hello world' fifo_file = tmpdir.join('fifo_file.txt') os.mkfifo(str(fifo_file)) writer = bg_executor.submit(fifo_file.write, content) # writer will block until content is read expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "fileName": filename, "size": len(content), } b2_cli.run( ['upload-unbound-stream', '--no-progress', 'my-bucket', str(fifo_file), filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) writer.result(timeout=1) def test_upload_unbound_stream__stdin(b2_cli, bucket, tmpdir, mock_stdin): """Test upload_unbound_stream stdin alias support""" content = "stdin input" filename = 'stdin.txt' expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", "fileName": filename, "size": len(content), } mock_stdin.write(content) mock_stdin.close() b2_cli.run( ['upload-unbound-stream', '--no-progress', 'my-bucket', '-', filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) @skip_on_windows def test_upload_unbound_stream__with_part_size_options( b2_cli, bucket, tmpdir, mock_stdin, bg_executor ): """Test upload_unbound_stream with part size options""" part_size = DEFAULT_MIN_PART_SIZE expected_size = part_size + 500 # has to be bigger to force multipart upload filename = 'named_pipe.txt' fifo_file = tmpdir.join('fifo_file.txt') os.mkfifo(str(fifo_file)) writer = bg_executor.submit( lambda: fifo_file.write("x" * expected_size) ) # writer will block until content is read expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "fileName": filename, "size": expected_size, } b2_cli.run( [ 'upload-unbound-stream', '--min-part-size', str(DEFAULT_MIN_PART_SIZE), '--part-size', str(part_size), '--no-progress', 'my-bucket', str(fifo_file), filename, ], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) writer.result(timeout=1) def test_upload_unbound_stream__regular_file(b2_cli, bucket, tmpdir): """Test upload_unbound_stream regular file support""" content = "stdin input" filename = 'file.txt' filepath = tmpdir.join(filename) filepath.write(content) expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { "action": "upload", "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", "fileName": filename, "size": len(content), } b2_cli.run( ['upload-unbound-stream', '--no-progress', 'my-bucket', str(filepath), filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, expected_stderr= "WARNING: You are using a stream upload command to upload a regular file. While it will work, it is inefficient. Use of upload-file command is recommended.\n", ) B2_Command_Line_Tool-3.19.1/test/unit/helpers.py000066400000000000000000000023761461201031300214230ustar00rootroot00000000000000###################################################################### # # File: test/unit/helpers.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import concurrent.futures import sys class RunOrDieExecutor(concurrent.futures.ThreadPoolExecutor): """ Deadly ThreadPoolExecutor, which ensures all task are quickly closed before exiting. Only really usable in tests. """ def __exit__(self, exc_type, exc_val, exc_tb): self.shutdown(wait=False, cancel_futures=True) return super().__exit__(exc_type, exc_val, exc_tb) if sys.version_info < (3, 9): # shutdown(cancel_futures=True) is Python 3.9+ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._futures = [] def shutdown(self, wait=True, cancel_futures=False): if cancel_futures: for future in self._futures: future.cancel() super().shutdown(wait=wait) def submit(self, *args, **kwargs): future = super().submit(*args, **kwargs) self._futures.append(future) return future B2_Command_Line_Tool-3.19.1/test/unit/test_apiver.py000066400000000000000000000030601461201031300222750ustar00rootroot00000000000000###################################################################### # # File: test/unit/test_apiver.py # # Copyright 2023 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import unittest import pytest @pytest.fixture def inject_apiver_int(request, apiver_int): request.cls.apiver_int = apiver_int @pytest.mark.usefixtures('inject_apiver_int') class UnitTestClass(unittest.TestCase): apiver_int: int @pytest.mark.apiver(to_ver=3) def test_passes_below_and_on_v3(self): assert self.apiver_int <= 3 @pytest.mark.apiver(from_ver=4) def test_passes_above_and_on_v4(self): assert self.apiver_int >= 4 @pytest.mark.apiver(3) def test_passes_only_on_v3(self): assert self.apiver_int == 3 @pytest.mark.apiver(4) def test_passes_only_on_v4(self): assert self.apiver_int == 4 @pytest.mark.apiver(3, 4) def test_passes_on_both_v3_and_v4(self): assert self.apiver_int in {3, 4} @pytest.mark.apiver(to_ver=3) def test_passes_below_and_on_v3(apiver_int): assert apiver_int <= 3 @pytest.mark.apiver(from_ver=4) def test_passes_above_and_on_v4(apiver_int): assert apiver_int >= 4 @pytest.mark.apiver(3) def test_passes_only_on_v3(apiver_int): assert apiver_int == 3 @pytest.mark.apiver(4) def test_passes_only_on_v4(apiver_int): assert apiver_int == 4 @pytest.mark.apiver(3, 4) def test_passes_on_both_v3_and_v4(apiver_int): assert apiver_int in {3, 4} B2_Command_Line_Tool-3.19.1/test/unit/test_arg_parser.py000066400000000000000000000062031461201031300231360ustar00rootroot00000000000000###################################################################### # # File: test/unit/test_arg_parser.py # # Copyright 2020 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import argparse import sys from b2._internal._cli.arg_parser_types import ( parse_comma_separated_list, parse_millis_from_float_timestamp, parse_range, ) from b2._internal.arg_parser import B2ArgumentParser from b2._internal.console_tool import B2 from .test_base import TestBase class TestCustomArgTypes(TestBase): def test_parse_comma_separated_list(self): self.assertEqual([''], parse_comma_separated_list('')) self.assertEqual(['1', '2', '3'], parse_comma_separated_list('1,2,3')) def test_parse_millis_from_float_timestamp(self): self.assertEqual(1367900664000, parse_millis_from_float_timestamp('1367900664')) self.assertEqual(1367900664152, parse_millis_from_float_timestamp('1367900664.152')) with self.assertRaises(ValueError): parse_millis_from_float_timestamp('!$@$%@!@$') def test_parse_range(self): self.assertEqual((1, 2), parse_range('1,2')) with self.assertRaises(argparse.ArgumentTypeError): parse_range('1') with self.assertRaises(argparse.ArgumentTypeError): parse_range('1,2,3') with self.assertRaises(ValueError): parse_range('!@#,%^&') class TestNonUTF8TerminalSupport(TestBase): class ASCIIEncodedStream: def __init__(self, original_stream): self.original_stream = original_stream self.encoding = 'ascii' def write(self, data): if isinstance(data, str): data = data.encode(self.encoding, 'strict') self.original_stream.buffer.write(data) def flush(self): self.original_stream.flush() def check_help_string(self, command_class, command_name): help_string = command_class.__doc__ # create a parser with a help message that is based on the command_class.__doc__ string parser = B2ArgumentParser(description=help_string) try: old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = TestNonUTF8TerminalSupport.ASCIIEncodedStream(sys.stdout) sys.stderr = TestNonUTF8TerminalSupport.ASCIIEncodedStream(sys.stderr) parser.print_help() except UnicodeEncodeError as e: self.fail( f'Failed to encode help message for command "{command_name}" on a non-UTF-8 terminal: {e}' ) finally: # Restore original stdout and stderr sys.stdout = old_stdout sys.stderr = old_stderr def test_help_in_non_utf8_terminal(self): command_classes = dict(B2.subcommands_registry.items()) command_classes['b2'] = B2 for command_name, command_class in command_classes.items(): with self.subTest(command_class=command_class, command_name=command_name): self.check_help_string(command_class, command_name) B2_Command_Line_Tool-3.19.1/test/unit/test_base.py000066400000000000000000000023521461201031300217240ustar00rootroot00000000000000###################################################################### # # File: test/unit/test_base.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import re import unittest from contextlib import contextmanager from typing import Type import pytest @pytest.mark.usefixtures('unit_test_console_tool_class', 'b2_uri_args') class TestBase(unittest.TestCase): console_tool_class: Type @contextmanager def assertRaises(self, exc, msg=None): try: yield except exc as e: if msg is not None: if msg != str(e): assert False, f"expected message '{msg}', but got '{str(e)}'" else: assert False, f'should have thrown {exc}' @contextmanager def assertRaisesRegexp(self, expected_exception, expected_regexp): try: yield except expected_exception as e: if not re.search(expected_regexp, str(e)): assert False, f"expected message '{expected_regexp}', but got '{str(e)}'" else: assert False, f'should have thrown {expected_exception}' B2_Command_Line_Tool-3.19.1/test/unit/test_console_tool.py000066400000000000000000003416741461201031300235260ustar00rootroot00000000000000###################################################################### # # File: test/unit/test_console_tool.py # # Copyright 2019 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### import json import os import pathlib import re from functools import lru_cache from io import StringIO from itertools import chain, product from test.helpers import skip_on_windows from typing import List, Optional from unittest import mock import pytest from b2sdk import v1 from b2sdk.v2 import ( ALL_CAPABILITIES, B2Api, B2HttpApiConfig, ProgressReport, RawSimulator, StubAccountInfo, TempDir, UploadSourceBytes, fix_windows_path_limit, ) from b2sdk.v2.exception import Conflict # Any error for testing fast-fail of the rm command. from more_itertools import one from b2._internal._b2v4.registry import Rm as v4Rm from b2._internal._cli.const import ( B2_APPLICATION_KEY_ENV_VAR, B2_APPLICATION_KEY_ID_ENV_VAR, B2_ENVIRONMENT_ENV_VAR, ) from b2._internal.b2v3.rm import Rm as v3Rm from b2._internal.version import VERSION from .test_base import TestBase def file_mod_time_millis(path): return int(os.path.getmtime(path) * 1000) class BaseConsoleToolTest(TestBase): RE_API_VERSION = re.compile(r"\/v\d\/") json_pattern = re.compile(r'[^{,^\[]*(?P{.*})|(?P\[.*]).*', re.DOTALL) def setUp(self): self.account_info = StubAccountInfo() # this is a hack - B2HttpApiConfig expects a class, but we want to use an instance # which will be reused during the test, thus instead of class we pass a lambda which # returns already predefined instance self.raw_simulator = RawSimulator() self.api_config = B2HttpApiConfig(_raw_api_class=lambda *args, **kwargs: self.raw_simulator) @lru_cache(maxsize=None) def _get_b2api(**kwargs) -> B2Api: kwargs.pop('profile', None) return B2Api( account_info=self.account_info, cache=None, api_config=self.api_config, **kwargs, ) self.console_tool_class._initialize_b2_api = lambda cls, args, kwargs: _get_b2api(**kwargs) self.b2_api = _get_b2api() self.raw_api = self.b2_api.session.raw_api self.account_id, self.master_key = self.raw_api.create_account() for env_var_name in [ B2_APPLICATION_KEY_ID_ENV_VAR, B2_APPLICATION_KEY_ENV_VAR, B2_ENVIRONMENT_ENV_VAR, ]: os.environ.pop(env_var_name, None) def _get_stdouterr(self): stdout = StringIO() stderr = StringIO() return stdout, stderr def _run_command_ignore_output(self, argv): """ Runs the given command in the console tool, checking that it success, but ignoring the stdout. """ stdout, stderr = self._get_stdouterr() actual_status = self.console_tool_class(stdout, stderr).run_command(['b2'] + argv) actual_stderr = self._trim_trailing_spaces(stderr.getvalue()) if actual_stderr != '': print('ACTUAL STDERR: ', repr(actual_stderr)) print(actual_stderr) assert re.match(r'^(|Using https?://[\w.]+)$', actual_stderr), f"stderr: {actual_stderr!r}" self.assertEqual(0, actual_status, 'exit status code') def _trim_leading_spaces(self, s): """ Takes the contents of a triple-quoted string, and removes the leading newline and leading spaces that come from it being indented with code. """ # The first line starts on the line following the triple # quote, so the first line after splitting can be discarded. lines = s.split('\n') if lines[0] == '': lines = lines[1:] if len(lines) == 0: return '' # Count the leading spaces space_count = min((self._leading_spaces(line) for line in lines if line != ''), default=0) # Remove the leading spaces from each line, based on the line # with the fewest leading spaces leading_spaces = ' ' * space_count assert all( line.startswith(leading_spaces) or line == '' for line in lines ), 'all lines have leading spaces' return '\n'.join('' if line == '' else line[space_count:] for line in lines) def _leading_spaces(self, s): space_count = 0 while space_count < len(s) and s[space_count] == ' ': space_count += 1 return space_count def _trim_trailing_spaces(self, s): return '\n'.join(line.rstrip() for line in s.split('\n')) def _make_local_file(self, temp_dir, file_name): local_path = os.path.join(temp_dir, file_name) with open(local_path, 'wb') as f: f.write(b'hello world') return local_path def _read_file(self, local_path): with open(local_path, 'rb') as f: return f.read() def _remove_api_version_number(self, s): return re.sub(self.RE_API_VERSION, '/vx/', s) def _normalize_expected_output(self, text, format_vars=None): if text is None: return None format_vars = format_vars or {} return self._trim_leading_spaces(text).format( account_id=self.account_id, master_key=self.master_key, **format_vars ) def assertDictIsContained(self, subset, superset): """Asserts that all keys in `subset` are present is `superset` and their corresponding values are the same""" truncated_superset = {k: v for k, v in superset.items() if k in subset} self.maxDiff = None self.assertEqual(subset, truncated_superset) def assertListOfDictsIsContained(self, list_of_subsets, list_of_supersets): """Performs the same assertion as assertDictIsContained, but for dicts in two lists itertively""" self.assertEqual(len(list_of_subsets), len(list_of_supersets)) truncated_list_of_supersets = [] for subset, superset in zip(list_of_subsets, list_of_supersets): truncated_list_of_supersets.append({k: v for k, v in superset.items() if k in subset}) self.assertEqual(list_of_subsets, truncated_list_of_supersets) def _authorize_account(self): """ Prepare for a test by authorizing an account and getting an account auth token """ self._run_command_ignore_output(['authorize-account', self.account_id, self.master_key]) def _clear_account(self): """ Clear account auth data """ self._run_command_ignore_output(['clear-account']) def _create_my_bucket(self): self._run_command(['create-bucket', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) def _run_command( self, argv, expected_stdout=None, expected_stderr='', expected_status=0, format_vars=None, remove_version=False, expected_json_in_stdout: Optional[dict] = None, expected_part_of_stdout=None, unexpected_part_of_stdout=None, ): """ Runs one command using the ConsoleTool, checking stdout, stderr, and the returned status code. The expected output strings are format strings (as used by str.format), so braces need to be escaped by doubling them. The variables 'account_id' and 'master_key' are set by default, plus any variables passed in the dict format_vars. The ConsoleTool is stateless, so we can make a new one for each call, with a fresh stdout and stderr. However, last instance of ConsoleTool is stored in `self.console_tool`, may be handy for testing internals of the tool after last command invocation. """ expected_stderr = self._normalize_expected_output(expected_stderr, format_vars) stdout, stderr = self._get_stdouterr() self.console_tool = self.console_tool_class(stdout, stderr) try: actual_status = self.console_tool.run_command(['b2'] + argv) except SystemExit as e: actual_status = e.code actual_stdout = self._trim_trailing_spaces(stdout.getvalue()) actual_stderr = self._trim_trailing_spaces(stderr.getvalue()) # ignore any references to specific api version if remove_version: actual_stdout = self._remove_api_version_number(actual_stdout) actual_stderr = self._remove_api_version_number(actual_stderr) if expected_stdout is not None and expected_stdout != actual_stdout: expected_stdout = self._normalize_expected_output(expected_stdout, format_vars) print('EXPECTED STDOUT:', repr(expected_stdout)) print('ACTUAL STDOUT: ', repr(actual_stdout)) print(actual_stdout) if expected_part_of_stdout is not None and expected_part_of_stdout not in actual_stdout: expected_part_of_stdout = self._normalize_expected_output( expected_part_of_stdout, format_vars ) print('EXPECTED TO FIND IN STDOUT:', repr(expected_part_of_stdout)) print('ACTUAL STDOUT: ', repr(actual_stdout)) if expected_stderr is not None and expected_stderr != actual_stderr: print('EXPECTED STDERR:', repr(expected_stderr)) print('ACTUAL STDERR: ', repr(actual_stderr)) print(actual_stderr) if expected_json_in_stdout is not None: json_match = self.json_pattern.match(actual_stdout) if not json_match: self.fail('EXPECTED TO FIND A JSON IN: ' + repr(actual_stdout)) found_json = json.loads(json_match.group('dict_json') or json_match.group('list_json')) if json_match.group('dict_json'): self.assertDictIsContained(expected_json_in_stdout, found_json) else: self.assertListOfDictsIsContained(expected_json_in_stdout, found_json) if expected_stdout is not None: self.assertEqual(expected_stdout, actual_stdout, 'stdout') if expected_part_of_stdout is not None: self.assertIn(expected_part_of_stdout, actual_stdout) if unexpected_part_of_stdout is not None: self.assertNotIn(unexpected_part_of_stdout, actual_stdout) if expected_stderr is not None: self.assertEqual(expected_stderr, actual_stderr, 'stderr') assert expected_status == actual_status return actual_status, actual_stdout, actual_stderr @classmethod def _upload_multiple_files(cls, bucket): data = UploadSourceBytes(b'test-data') bucket.upload(data, 'a/test.csv') bucket.upload(data, 'a/test.tsv') bucket.upload(data, 'b/b/test.csv') bucket.upload(data, 'b/b1/test.csv') bucket.upload(data, 'b/b2/test.tsv') bucket.upload(data, 'b/test.txt') bucket.upload(data, 'c/test.csv') bucket.upload(data, 'c/test.tsv') class TestTTYConsoleTool(BaseConsoleToolTest): def _get_stdouterr(self): class FakeStringIO(StringIO): def isatty(self): return True stdout = FakeStringIO() stderr = FakeStringIO() return stdout, stderr def test_e_c1_char_ls_default_escape_control_chars_setting(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket-cc', 'allPrivate'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: local_file = self._make_local_file(temp_dir, "x") bad_str = "\u009b2K\u009b7Gb\u009b24Gx\u009b4GH" escaped_bad_str = "\\x9b2K\\x9b7Gb\\x9b24Gx\\x9b4GH" self._run_command(['upload-file', '--no-progress', 'my-bucket-cc', local_file, bad_str]) self._run_command( ['upload-file', '--no-progress', 'my-bucket-cc', local_file, "some_normal_text"] ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-cc')], expected_part_of_stdout=escaped_bad_str ) class TestConsoleTool(BaseConsoleToolTest): @pytest.mark.apiver(to_ver=3) def test_camel_case_supported_in_v3(self): self._authorize_account() self._run_command( ['create-bucket', 'my-bucket', '--bucketInfo', '{"xxx": "123"}', 'allPrivate'], 'bucket_0\n', '', 0 ) self._run_command( ['create-bucket', 'my-bucket-kebab', '--bucket-info', '{"xxx": "123"}', 'allPrivate'], 'bucket_1\n', '', 0 ) @pytest.mark.apiver(from_ver=4) def test_camel_case_not_supported_in_v4(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket', '--bucketInfo', 'allPrivate'], '', '', 2) self._run_command( ['create-bucket', 'my-bucket-kebab', '--bucket-info', '{"xxx": "123"}', 'allPrivate'], 'bucket_0\n', '', 0 ) def test_create_key_and_authorize_with_it(self): # Start with authorizing with the master key self._authorize_account() # Create a key self._run_command( ['create-key', 'key1', 'listBuckets,listKeys'], 'appKeyId0 appKey0\n', '', 0, ) # Authorize with the key self._run_command( ['authorize-account', 'appKeyId0', 'appKey0'], None, '', 0, ) def test_create_key_with_authorization_from_env_vars(self): # Initial condition assert self.account_info.get_account_auth_token() is None # Authorize an account with a good api key. # Setting up environment variables with mock.patch.dict( 'os.environ', { B2_APPLICATION_KEY_ID_ENV_VAR: self.account_id, B2_APPLICATION_KEY_ENV_VAR: self.master_key, } ): assert B2_APPLICATION_KEY_ID_ENV_VAR in os.environ assert B2_APPLICATION_KEY_ENV_VAR in os.environ # The first time we're running on this cache there will be output from the implicit "authorize-account" call self._run_command( ['create-key', 'key1', 'listBuckets,listKeys'], 'appKeyId0 appKey0\n', '', 0, ) # The second time "authorize-account" is not called self._run_command( ['create-key', 'key1', 'listBuckets,listKeys,writeKeys'], 'appKeyId1 appKey1\n', '', 0, ) with mock.patch.dict( 'os.environ', { B2_APPLICATION_KEY_ID_ENV_VAR: 'appKeyId1', B2_APPLICATION_KEY_ENV_VAR: 'appKey1', } ): # "authorize-account" is called when the key changes self._run_command( ['create-key', 'key1', 'listBuckets,listKeys'], 'appKeyId2 appKey2\n', '', 0, ) # "authorize-account" is also called when the realm changes with mock.patch.dict( 'os.environ', { B2_ENVIRONMENT_ENV_VAR: 'http://custom.example.com', } ): self._run_command( ['create-key', 'key1', 'listBuckets,listKeys'], 'appKeyId3 appKey3\n', 'Using http://custom.example.com\n', 0, ) def test_authorize_key_without_list_buckets(self): self._authorize_account() # Create a key without listBuckets self._run_command(['create-key', 'key1', 'listKeys'], 'appKeyId0 appKey0\n', '', 0) # Authorize with the key self._run_command( ['authorize-account', 'appKeyId0', 'appKey0'], '', 'ERROR: application key has no listBuckets capability, which is required for the b2 command-line tool\n', 1, ) def test_create_bucket__with_lifecycle_rule(self): self._authorize_account() rule = json.dumps( { "daysFromHidingToDeleting": 1, "daysFromUploadingToHiding": None, "fileNamePrefix": "" } ) self._run_command( ['create-bucket', 'my-bucket', 'allPrivate', '--lifecycle-rule', rule], 'bucket_0\n', '', 0 ) def test_create_bucket__with_lifecycle_rules(self): self._authorize_account() rules = json.dumps( [ { "daysFromHidingToDeleting": 1, "daysFromUploadingToHiding": None, "fileNamePrefix": "" } ] ) self._run_command( ['create-bucket', 'my-bucket', 'allPrivate', '--lifecycle-rules', rules], 'bucket_0\n', '', 0 ) def test_create_bucket__mutually_exclusive_lifecycle_rules_options(self): self._authorize_account() rule = json.dumps( { "daysFromHidingToDeleting": 1, "daysFromUploadingToHiding": None, "fileNamePrefix": "" } ) self._run_command( [ 'create-bucket', 'my-bucket', 'allPrivate', '--lifecycle-rule', rule, '--lifecycle-rules', f"[{rule}]" ], '', '', 2 ) def test_create_bucket_key_and_authorize_with_it(self): # Start with authorizing with the master key self._authorize_account() # Make a bucket self._run_command(['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0) # Create a key restricted to that bucket self._run_command( ['create-key', '--bucket', 'my-bucket', 'key1', 'listKeys,listBuckets'], 'appKeyId0 appKey0\n', '', 0 ) # Authorize with the key self._run_command( ['authorize-account', 'appKeyId0', 'appKey0'], None, '', 0, ) def test_update_bucket_without_lifecycle(self): # Start with authorizing with the master key self._authorize_account() bucket_name = 'my-bucket-liferules' # Create a bucket with lifecycleRule self._run_command( [ 'create-bucket', '--lifecycle-rule', '{"daysFromHidingToDeleting": 2, "fileNamePrefix": "foo"}', bucket_name, 'allPrivate' ], 'bucket_0\n', '', 0 ) expected_stdout_dict = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": { "xxx": "123" }, "bucketName": "my-bucket-liferules", "bucketType": "allPrivate", "lifecycleRules": [{ "daysFromHidingToDeleting": 2, "fileNamePrefix": "foo" }], } # Update some other attribute than lifecycleRule, which should remain intact self._run_command( ['update-bucket', bucket_name, '--bucket-info', '{"xxx": "123"}'], expected_json_in_stdout=expected_stdout_dict, ) def test_clear_account(self): # Initial condition self._authorize_account() assert self.account_info.get_account_auth_token() is not None # Clearing the account should remove the auth token # from the account info. self._run_command(['clear-account'], '', '', 0) assert self.account_info.get_account_auth_token() is None def test_buckets(self): self._authorize_account() # Make a bucket with an illegal name expected_stdout = 'ERROR: Bad request: illegal bucket name: bad/bucket/name\n' self._run_command(['create-bucket', 'bad/bucket/name', 'allPublic'], '', expected_stdout, 1) # Make two buckets self._run_command(['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0) self._run_command(['create-bucket', 'your-bucket', 'allPrivate'], 'bucket_1\n', '', 0) # Update one of them expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "lifecycleRules": [], "options": [], "revision": 2 } self._run_command( ['update-bucket', 'my-bucket', 'allPublic'], expected_json_in_stdout=expected_json ) # Make sure they are there expected_stdout = ''' bucket_0 allPublic my-bucket bucket_1 allPrivate your-bucket ''' self._run_command(['list-buckets'], expected_stdout, '', 0) # Delete one expected_stdout = '' self._run_command(['delete-bucket', 'your-bucket'], expected_stdout, '', 0) def test_encrypted_buckets(self): self._authorize_account() # Make two encrypted buckets self._run_command(['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0) self._run_command( [ 'create-bucket', '--default-server-side-encryption=SSE-B2', 'your-bucket', 'allPrivate' ], 'bucket_1\n', '', 0 ) # Update the one without encryption expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "lifecycleRules": [], "options": [], "revision": 2 } self._run_command( ['update-bucket', '--default-server-side-encryption=SSE-B2', 'my-bucket', 'allPublic'], expected_json_in_stdout=expected_json, ) # Update the one with encryption expected_json = { "accountId": self.account_id, "bucketId": "bucket_1", "bucketInfo": {}, "bucketName": "your-bucket", "bucketType": "allPrivate", "corsRules": [], "defaultServerSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "lifecycleRules": [], "options": [], "revision": 2 } self._run_command( ['update-bucket', 'your-bucket', 'allPrivate'], expected_json_in_stdout=expected_json ) # Make sure they are there expected_stdout = ''' bucket_0 allPublic my-bucket bucket_1 allPrivate your-bucket ''' self._run_command(['list-buckets'], expected_stdout, '', 0) def test_keys(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket-a', 'allPublic'], 'bucket_0\n', '', 0) self._run_command(['create-bucket', 'my-bucket-b', 'allPublic'], 'bucket_1\n', '', 0) self._run_command(['create-bucket', 'my-bucket-c', 'allPublic'], 'bucket_2\n', '', 0) capabilities = ['readFiles', 'listBuckets'] capabilities_with_commas = ','.join(capabilities) # Make a key with an illegal name expected_stderr = 'ERROR: Bad request: illegal key name: bad_key_name\n' self._run_command( ['create-key', 'bad_key_name', capabilities_with_commas], '', expected_stderr, 1 ) # Make a key with negative validDurationInSeconds expected_stderr = 'ERROR: Bad request: valid duration must be greater than 0, and less than 1000 days in seconds\n' self._run_command( ['create-key', '--duration', '-456', 'goodKeyName', capabilities_with_commas], '', expected_stderr, 1 ) # Make a key with validDurationInSeconds outside of range expected_stderr = 'ERROR: Bad request: valid duration must be greater than 0, ' \ 'and less than 1000 days in seconds\n' self._run_command( ['create-key', '--duration', '0', 'goodKeyName', capabilities_with_commas], '', expected_stderr, 1 ) self._run_command( ['create-key', '--duration', '86400001', 'goodKeyName', capabilities_with_commas], '', expected_stderr, 1 ) # Create three keys self._run_command( ['create-key', 'goodKeyName-One', capabilities_with_commas], 'appKeyId0 appKey0\n', '', 0, ) self._run_command( [ 'create-key', '--bucket', 'my-bucket-a', 'goodKeyName-Two', capabilities_with_commas + ',readBucketEncryption' ], 'appKeyId1 appKey1\n', '', 0, ) self._run_command( [ 'create-key', '--bucket', 'my-bucket-b', 'goodKeyName-Three', capabilities_with_commas ], 'appKeyId2 appKey2\n', '', 0, ) self._run_command( ['create-key', '--all-capabilities', 'goodKeyName-Four'], 'appKeyId3 appKey3\n', '', 0, ) self._run_command( ['create-key', '--bucket', 'my-bucket-b', 'goodKeyName-Five', capabilities_with_commas], 'appKeyId4 appKey4\n', '', 0, ) # Delete one key self._run_command(['delete-key', 'appKeyId2'], 'appKeyId2\n', '', 0) # Delete one bucket, to test listing when a bucket is gone. self._run_command_ignore_output(['delete-bucket', 'my-bucket-b']) # List keys expected_list_keys_out = """ appKeyId0 goodKeyName-One appKeyId1 goodKeyName-Two appKeyId3 goodKeyName-Four appKeyId4 goodKeyName-Five """ expected_list_keys_out_long = """ appKeyId0 goodKeyName-One - - - '' readFiles,listBuckets appKeyId1 goodKeyName-Two my-bucket-a - - '' readFiles,listBuckets,readBucketEncryption appKeyId3 goodKeyName-Four - - - '' {} appKeyId4 goodKeyName-Five id=bucket_1 - - '' readFiles,listBuckets """.format(','.join(sorted(ALL_CAPABILITIES))) self._run_command(['list-keys'], expected_list_keys_out, '', 0) self._run_command(['list-keys', '--long'], expected_list_keys_out_long, '', 0) # authorize and make calls using application key with no restrictions self._run_command(['authorize-account', 'appKeyId0', 'appKey0'], None, '', 0) self._run_command( ['list-buckets'], 'bucket_0 allPublic my-bucket-a\nbucket_2 allPublic my-bucket-c\n', '', 0 ) expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket-a", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": None }, "lifecycleRules": [], "options": [], "revision": 1 } self._run_command(['get-bucket', 'my-bucket-a'], expected_json_in_stdout=expected_json) # authorize and make calls using an application key with bucket restrictions self._run_command(['authorize-account', 'appKeyId1', 'appKey1'], None, '', 0) self._run_command( ['list-buckets'], '', 'ERROR: Application key is restricted to bucket: my-bucket-a\n', 1 ) self._run_command( ['get-bucket', 'my-bucket-c'], '', 'ERROR: Application key is restricted to bucket: my-bucket-a\n', 1 ) expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket-a", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "lifecycleRules": [], "options": [], "revision": 1 } self._run_command(['get-bucket', 'my-bucket-a'], expected_json_in_stdout=expected_json) self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket-c')], '', 'ERROR: Application key is restricted to bucket: my-bucket-a\n', 1 ) def test_bucket_info_from_json(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) bucket_info = {'color': 'blue'} expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": { "color": "blue" }, "bucketName": "my-bucket", "bucketType": "allPrivate", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "lifecycleRules": [], "options": [], "revision": 2 } self._run_command( ['update-bucket', '--bucket-info', json.dumps(bucket_info), 'my-bucket', 'allPrivate'], expected_json_in_stdout=expected_json, ) def test_files(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: local_file1 = self._make_local_file(temp_dir, 'file1.txt') # For this test, use a mod time without millis. My mac truncates # millis and just leaves seconds. mod_time = 1500111222 os.utime(local_file1, (mod_time, mod_time)) self.assertEqual(1500111222, os.path.getmtime(local_file1)) # Upload a file expected_stdout = ''' URL by file name: http://download.example.com/file/my-bucket/file1.txt URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": "1500111222000", "b2-cache-control": "private, max-age=3600" }, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 11, "uploadTimestamp": 5000 } self._run_command( [ 'upload-file', '--no-progress', 'my-bucket', local_file1, 'file1.txt', '--cache-control=private, max-age=3600' ], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) # Get file info mod_time_str = str(file_mod_time_millis(local_file1)) expected_json = { "accountId": self.account_id, "action": "upload", "bucketId": "bucket_0", "size": 11, "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": "1500111222000", "b2-cache-control": "private, max-age=3600" }, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "uploadTimestamp": 5000 } self._run_command( ['file-info', 'b2id://9999'], expected_json_in_stdout=expected_json, ) # Hide the file expected_json = { "action": "hide", "contentSha1": "none", "fileId": "9998", "fileInfo": {}, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 0, "uploadTimestamp": 5001 } self._run_command( ['hide-file', 'my-bucket', 'file1.txt'], expected_json_in_stdout=expected_json, ) # List the file versions expected_json = [ { "action": "hide", "contentSha1": "none", "fileId": "9998", "fileInfo": {}, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 0, "uploadTimestamp": 5001 }, { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": str(mod_time_str), "b2-cache-control": "private, max-age=3600" }, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 11, "uploadTimestamp": 5000 } ] self._run_command( ['ls', '--json', '--versions', *self.b2_uri_args('my-bucket')], expected_json_in_stdout=expected_json, ) # List the file names expected_stdout = ''' [] ''' self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) # Delete one file version, passing the name in expected_json = {"action": "delete", "fileId": "9998", "fileName": "file1.txt"} self._run_command( ['delete-file-version', 'file1.txt', '9998'], expected_json_in_stdout=expected_json ) # Delete one file version, not passing the name in expected_json = {"action": "delete", "fileId": "9999", "fileName": "file1.txt"} self._run_command( ['delete-file-version', '9999'], expected_json_in_stdout=expected_json ) def test_files_encrypted(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: local_file1 = self._make_local_file(temp_dir, 'file1.txt') # For this test, use a mod time without millis. My mac truncates # millis and just leaves seconds. mod_time = 1500111222 os.utime(local_file1, (mod_time, mod_time)) self.assertEqual(1500111222, os.path.getmtime(local_file1)) # Upload a file expected_stdout = ''' URL by file name: http://download.example.com/file/my-bucket/file1.txt URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1.txt", "serverSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "size": 11, "uploadTimestamp": 5000 } self._run_command( [ 'upload-file', '--no-progress', '--destination-server-side-encryption=SSE-B2', 'my-bucket', local_file1, 'file1.txt' ], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) # Get file info mod_time_str = str(file_mod_time_millis(local_file1)) expected_json = { "accountId": self.account_id, "action": "upload", "bucketId": "bucket_0", "size": 11, "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1.txt", "serverSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "uploadTimestamp": 5000 } self._run_command( ['file-info', 'b2id://9999'], expected_json_in_stdout=expected_json, ) # Download by name local_download1 = os.path.join(temp_dir, 'download1.txt') expected_stdout_template = ''' File name: file1.txt File id: 9999 Output file path: {output_path} File size: 11 Content type: b2/x-auto Content sha1: 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed Encryption: mode=SSE-B2, algorithm=AES256 Retention: none Legal hold: INFO src_last_modified_millis: 1500111222000 Checksum matches Download finished ''' expected_stdout = expected_stdout_template.format( output_path=pathlib.Path(local_download1).resolve() ) self._run_command( ['download-file', '--no-progress', 'b2://my-bucket/file1.txt', local_download1], expected_stdout, '', 0 ) self.assertEqual(b'hello world', self._read_file(local_download1)) self.assertEqual(mod_time, int(round(os.path.getmtime(local_download1)))) # Download file by ID. (Same expected output as downloading by name) local_download2 = os.path.join(temp_dir, 'download2.txt') expected_stdout = expected_stdout_template.format( output_path=pathlib.Path(local_download2).resolve() ) self._run_command( ['download-file', '--no-progress', 'b2id://9999', local_download2], expected_stdout, '', 0 ) self.assertEqual(b'hello world', self._read_file(local_download2)) # Hide the file expected_json = { "action": "hide", "contentSha1": "none", "fileId": "9998", "fileInfo": {}, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 0, "uploadTimestamp": 5001 } self._run_command( ['hide-file', 'my-bucket', 'file1.txt'], expected_json_in_stdout=expected_json, ) # List the file versions expected_json = [ { "action": "hide", "contentSha1": "none", "fileId": "9998", "fileInfo": {}, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 0, "uploadTimestamp": 5001 }, { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": str(mod_time_str) }, "fileName": "file1.txt", "serverSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "size": 11, "uploadTimestamp": 5000 } ] self._run_command( ['ls', '--json', '--versions', *self.b2_uri_args('my-bucket')], expected_json_in_stdout=expected_json, ) # List the file names expected_stdout = ''' [] ''' self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) # Delete one file version, passing the name in expected_json = {"action": "delete", "fileId": "9998", "fileName": "file1.txt"} self._run_command( ['delete-file-version', 'file1.txt', '9998'], expected_json_in_stdout=expected_json, ) # Delete one file version, not passing the name in expected_json = {"action": "delete", "fileId": "9999", "fileName": "file1.txt"} self._run_command( ['delete-file-version', '9999'], expected_json_in_stdout=expected_json, ) def _test_download_to_directory(self, download_by: str): self._authorize_account() self._create_my_bucket() base_filename = 'file' extension = '.txt' source_filename = f'{base_filename}{extension}' with TempDir() as temp_dir: local_file = self._make_local_file(temp_dir, source_filename) local_file_content = self._read_file(local_file) self._run_command( ['upload-file', '--no-progress', 'my-bucket', local_file, source_filename], remove_version=True, ) b2uri = f'b2://my-bucket/{source_filename}' if download_by == 'name' else 'b2id://9999' command = [ 'download-file', '--no-progress', b2uri, ] target_directory = os.path.join(temp_dir, 'target') os.mkdir(target_directory) command += [target_directory] self._run_command(command) self.assertEqual( local_file_content, self._read_file(os.path.join(target_directory, source_filename)) ) # Download the file second time, to check the override behavior. self._run_command(command) # We should get another file. target_directory_files = [ elem for elem in pathlib.Path(target_directory).glob(f'{base_filename}*{extension}') if elem.name != source_filename ] assert len(target_directory_files) == 1, f'{target_directory_files}' self.assertEqual(local_file_content, self._read_file(target_directory_files[0])) def test_download_by_id_to_directory(self): self._test_download_to_directory(download_by='id') def test_download_by_name_to_directory(self): self._test_download_to_directory(download_by='name') def test_copy_file_by_id(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: local_file1 = self._make_local_file(temp_dir, 'file1.txt') # For this test, use a mod time without millis. My mac truncates # millis and just leaves seconds. mod_time = 1500111222 os.utime(local_file1, (mod_time, mod_time)) self.assertEqual(1500111222, os.path.getmtime(local_file1)) # Upload a file expected_stdout = ''' URL by file name: http://download.example.com/file/my-bucket/file1.txt URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 11, "uploadTimestamp": 5000 } self._run_command( ['upload-file', '--no-progress', 'my-bucket', local_file1, 'file1.txt'], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) # Copy File expected_json = { "accountId": self.account_id, "action": "copy", "bucketId": "bucket_0", "size": 11, "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9998", "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1_copy.txt", "serverSideEncryption": { "mode": "none" }, "uploadTimestamp": 5001 } self._run_command( ['copy-file-by-id', '9999', 'my-bucket', 'file1_copy.txt'], expected_json_in_stdout=expected_json, ) # Copy File with range parameter expected_json = { "accountId": self.account_id, "action": "copy", "bucketId": "bucket_0", "size": 5, "contentSha1": "4f664540ff30b8d34e037298a84e4736be39d731", "contentType": "b2/x-auto", "fileId": "9997", "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1_copy.txt", "serverSideEncryption": { "mode": "none" }, "uploadTimestamp": 5002 } self._run_command( ['copy-file-by-id', '--range', '3,7', '9999', 'my-bucket', 'file1_copy.txt'], expected_json_in_stdout=expected_json, ) local_download1 = os.path.join(temp_dir, 'file1_copy.txt') self._run_command( ['download-file', '-q', 'b2://my-bucket/file1_copy.txt', local_download1] ) self.assertEqual(b'lo wo', self._read_file(local_download1)) # Invalid metadata copy with file info expected_stderr = "ERROR: File info can be set only when content type is set\n" self._run_command( [ 'copy-file-by-id', '--info', 'a=b', '9999', 'my-bucket', 'file1_copy.txt', ], '', expected_stderr, 1, ) # Invalid metadata replace without file info expected_stderr = "ERROR: File info can be not set only when content type is not set\n" self._run_command( [ 'copy-file-by-id', '--content-type', 'text/plain', '9999', 'my-bucket', 'file1_copy.txt', ], '', expected_stderr, 1, ) # replace with content type and file info expected_json = { "accountId": self.account_id, "action": "copy", "bucketId": "bucket_0", "size": 11, "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "text/plain", "fileId": "9996", "fileInfo": { "a": "b" }, "fileName": "file1_copy.txt", "serverSideEncryption": { "mode": "none" }, "uploadTimestamp": 5003 } self._run_command( [ 'copy-file-by-id', '--content-type', 'text/plain', '--info', 'a=b', '9999', 'my-bucket', 'file1_copy.txt', ], expected_json_in_stdout=expected_json, ) # UnsatisfiableRange expected_stderr = "ERROR: The range in the request is outside the size of the file\n" self._run_command( ['copy-file-by-id', '--range', '12,20', '9999', 'my-bucket', 'file1_copy.txt'], '', expected_stderr, 1, ) # Copy in different bucket self._run_command(['create-bucket', 'my-bucket1', 'allPublic'], 'bucket_1\n', '', 0) expected_json = { "accountId": self.account_id, "action": "copy", "bucketId": "bucket_1", "size": 11, "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9994", "fileInfo": { "src_last_modified_millis": "1500111222000" }, "fileName": "file1_copy.txt", "serverSideEncryption": { "mode": "none" }, "uploadTimestamp": 5004 } self._run_command( ['copy-file-by-id', '9999', 'my-bucket1', 'file1_copy.txt'], expected_json_in_stdout=expected_json, ) def test_get_download_auth_defaults(self): self._authorize_account() self._create_my_bucket() self._run_command( ['get-download-auth', 'my-bucket'], 'fake_download_auth_token_bucket_0__86400\n', '', 0 ) def test_get_download_auth_explicit(self): self._authorize_account() self._create_my_bucket() self._run_command( ['get-download-auth', '--prefix', 'prefix', '--duration', '12345', 'my-bucket'], 'fake_download_auth_token_bucket_0_prefix_12345\n', '', 0 ) def test_get_download_auth_url(self): self._authorize_account() self._create_my_bucket() self._run_command( ['get-download-url-with-auth', '--duration', '12345', 'my-bucket', 'my-file'], 'http://download.example.com/file/my-bucket/my-file?Authorization=fake_download_auth_token_bucket_0_my-file_12345\n', '', 0 ) def test_get_download_auth_url_with_encoding(self): self._authorize_account() self._create_my_bucket() self._run_command( ['get-download-url-with-auth', '--duration', '12345', 'my-bucket', '\u81ea'], 'http://download.example.com/file/my-bucket/%E8%87%AA?Authorization=fake_download_auth_token_bucket_0_%E8%87%AA_12345\n', '', 0 ) def test_list_unfinished_large_files_with_none(self): self._authorize_account() self._create_my_bucket() self._run_command(['list-unfinished-large-files', 'my-bucket'], '', '', 0) def test_upload_large_file(self): self._authorize_account() self._create_my_bucket() min_part_size = self.account_info.get_recommended_part_size() file_size = min_part_size * 3 with TempDir() as temp_dir: file_path = os.path.join(temp_dir, 'test.txt') text = '*' * file_size with open(file_path, 'wb') as f: f.write(text.encode('utf-8')) mod_time_str = str(file_mod_time_millis(file_path)) expected_stdout = ''' URL by file name: http://download.example.com/file/my-bucket/test.txt URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' expected_json = { "action": "upload", "contentSha1": "none", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "large_file_sha1": "cc8954ec25e0c564b6a693fb22200e4f832c18e8", "src_last_modified_millis": str(mod_time_str) }, "fileName": "test.txt", "serverSideEncryption": { "mode": "none" }, "size": 600, "uploadTimestamp": 5000 } self._run_command( [ 'upload-file', '--no-progress', '--threads', '5', 'my-bucket', file_path, 'test.txt' ], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) def test_upload_large_file_encrypted(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) min_part_size = self.account_info.get_recommended_part_size() file_size = min_part_size * 3 with TempDir() as temp_dir: file_path = os.path.join(temp_dir, 'test.txt') text = '*' * file_size with open(file_path, 'wb') as f: f.write(text.encode('utf-8')) mod_time_str = str(file_mod_time_millis(file_path)) expected_stdout = ''' URL by file name: http://download.example.com/file/my-bucket/test.txt URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' expected_json = { "action": "upload", "contentSha1": "none", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "large_file_sha1": "cc8954ec25e0c564b6a693fb22200e4f832c18e8", "src_last_modified_millis": str(mod_time_str) }, "fileName": "test.txt", "serverSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "size": 600, "uploadTimestamp": 5000 } self._run_command( [ 'upload-file', '--no-progress', '--destination-server-side-encryption=SSE-B2', '--threads', '5', 'my-bucket', file_path, 'test.txt' ], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) def test_upload_incremental(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) min_part_size = self.account_info.get_recommended_part_size() file_size = min_part_size * 2 with TempDir() as temp_dir: file_path = pathlib.Path(temp_dir) / 'test.txt' incremental_upload_params = [ 'upload-file', '--no-progress', '--threads', '5', '--incremental-mode', 'my-bucket', str(file_path), 'test.txt', ] file_path.write_bytes(b'*' * file_size) self._run_command(incremental_upload_params) with open(file_path, 'ab') as f: f.write(b'*' * min_part_size) self._run_command(incremental_upload_params) downloaded_path = pathlib.Path(temp_dir) / 'out.txt' self._run_command( [ 'download-file', '-q', 'b2://my-bucket/test.txt', str(downloaded_path), ] ) assert downloaded_path.read_bytes() == file_path.read_bytes() def test_get_account_info(self): self._authorize_account() expected_json = { "accountAuthToken": "auth_token_0", "accountFilePath": getattr(self.account_info, 'filename', None), # missing in StubAccountInfo in tests "accountId": self.account_id, "allowed": { "bucketId": None, "bucketName": None, "capabilities": sorted(ALL_CAPABILITIES), "namePrefix": None }, "apiUrl": "http://api.example.com", "applicationKey": self.master_key, "downloadUrl": "http://download.example.com", "s3endpoint": "http://s3.api.example.com", } self._run_command( ['get-account-info'], expected_json_in_stdout=expected_json, ) def test_get_bucket(self): self._authorize_account() self._create_my_bucket() expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "lifecycleRules": [], "options": [], "revision": 1 } self._run_command( ['get-bucket', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_empty_show_size(self): self._authorize_account() self._create_my_bucket() expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "fileCount": 0, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 0 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_one_item_show_size(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: # Upload a standard test file. local_file1 = self._make_local_file(temp_dir, 'file1.txt') mod_time_str = str(file_mod_time_millis(local_file1)) expected_stdout = ''' URL by file name: http://download.example.com/file/my-bucket/file1.txt URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' expected_json = { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": str(mod_time_str) }, "fileName": "file1.txt", "serverSideEncryption": { "mode": "none" }, "size": 11, "uploadTimestamp": 5000 } self._run_command( ['upload-file', '--no-progress', 'my-bucket', local_file1, 'file1.txt'], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, ) # Now check the output of get-bucket against the canon. expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "fileCount": 1, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 11 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_with_versions(self): self._authorize_account() self._create_my_bucket() # Put many versions of a file into the test bucket. Unroll the loop here for convenience. bucket = self.b2_api.get_bucket_by_name('my-bucket') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') # Now check the output of get-bucket against the canon. expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "fileCount": 10, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 40 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_with_folders(self): self._authorize_account() self._create_my_bucket() # Create a hierarchical structure within the test bucket. Unroll the loop here for # convenience. bucket = self.b2_api.get_bucket_by_name('my-bucket') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), '1/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/4/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/4/5/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/4/5/6/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/4/5/6/7/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/4/5/6/7/8/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/4/5/6/7/8/9/test') bucket.upload(UploadSourceBytes(b'check'), 'check') bucket.upload(UploadSourceBytes(b'check'), '1/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/5/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/5/6/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/5/6/7/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/5/6/7/8/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/5/6/7/8/9/check') # Now check the output of get-bucket against the canon. expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "fileCount": 20, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 90 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_with_hidden(self): self._authorize_account() self._create_my_bucket() # Put some files into the test bucket. Unroll the loop for convenience. bucket = self.b2_api.get_bucket_by_name('my-bucket') bucket.upload(UploadSourceBytes(b'test'), 'upload1') bucket.upload(UploadSourceBytes(b'test'), 'upload2') bucket.upload(UploadSourceBytes(b'test'), 'upload3') bucket.upload(UploadSourceBytes(b'test'), 'upload4') bucket.upload(UploadSourceBytes(b'test'), 'upload5') bucket.upload(UploadSourceBytes(b'test'), 'upload6') # Hide some new files. Don't check the results here; it will be clear enough that # something has failed if the output of 'get-bucket' does not match the canon. stdout, stderr = self._get_stdouterr() console_tool = self.console_tool_class(stdout, stderr) console_tool.run_command(['b2', 'hide-file', 'my-bucket', 'hidden1']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', 'hidden2']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', 'hidden3']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', 'hidden4']) # Now check the output of get-bucket against the canon. expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "fileCount": 10, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 24 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_complex(self): self._authorize_account() self._create_my_bucket() # Create a hierarchical structure within the test bucket. Unroll the loop here for # convenience. bucket = self.b2_api.get_bucket_by_name('my-bucket') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), 'test') bucket.upload(UploadSourceBytes(b'test'), '1/test') bucket.upload(UploadSourceBytes(b'test'), '1/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/test') bucket.upload(UploadSourceBytes(b'test'), '1/2/3/test') bucket.upload(UploadSourceBytes(b'check'), 'check') bucket.upload(UploadSourceBytes(b'check'), 'check') bucket.upload(UploadSourceBytes(b'check'), '1/check') bucket.upload(UploadSourceBytes(b'check'), '1/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/check') bucket.upload(UploadSourceBytes(b'check'), '1/2/3/4/check') # Hide some new files. Don't check the results here; it will be clear enough that # something has failed if the output of 'get-bucket' does not match the canon. stdout, stderr = self._get_stdouterr() console_tool = self.console_tool_class(stdout, stderr) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/hidden1']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/hidden1']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/hidden2']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/2/hidden3']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/2/hidden3']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/2/hidden3']) console_tool.run_command(['b2', 'hide-file', 'my-bucket', '1/2/hidden3']) # Now check the output of get-bucket against the canon. expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "mode": "none" }, "fileCount": 29, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 99 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_get_bucket_encrypted(self): self._authorize_account() self._run_command( [ 'create-bucket', '--default-server-side-encryption=SSE-B2', '--default-server-side-encryption-algorithm=AES256', 'my-bucket', 'allPublic' ], 'bucket_0\n', '', 0 ) expected_json = { "accountId": self.account_id, "bucketId": "bucket_0", "bucketInfo": {}, "bucketName": "my-bucket", "bucketType": "allPublic", "corsRules": [], "defaultServerSideEncryption": { "algorithm": "AES256", "mode": "SSE-B2" }, "fileCount": 0, "lifecycleRules": [], "options": [], "revision": 1, "totalSize": 0 } self._run_command( ['get-bucket', '--show-size', 'my-bucket'], expected_json_in_stdout=expected_json, ) def test_sync(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: file_path = os.path.join(temp_dir, 'test.txt') with open(file_path, 'wb') as f: f.write(b'hello world') expected_stdout = ''' upload test.txt ''' command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_stdout, '', 0) def test_sync_empty_folder_when_not_enabled(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] expected_stderr = 'ERROR: Directory %s is empty. Use --allow-empty-source to sync anyway.\n' % fix_windows_path_limit( temp_dir.replace('\\\\', '\\') ) self._run_command(command, '', expected_stderr, 1) def test_sync_empty_folder_when_enabled(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: command = ['sync', '--no-progress', '--allow-empty-source', temp_dir, 'b2://my-bucket'] self._run_command(command, '', '', 0) def test_sync_dry_run(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: temp_file = self._make_local_file(temp_dir, 'test-dry-run.txt') # dry-run expected_stdout = ''' upload test-dry-run.txt ''' command = ['sync', '--no-progress', '--dry-run', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_stdout, '', 0) # file should not have been uploaded expected_stdout = ''' [] ''' self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) # upload file expected_stdout = ''' upload test-dry-run.txt ''' command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_stdout, '', 0) # file should have been uploaded mtime = file_mod_time_millis(temp_file) expected_json = [ { "action": "upload", "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", "contentType": "b2/x-auto", "fileId": "9999", "fileInfo": { "src_last_modified_millis": str(mtime) }, "fileName": "test-dry-run.txt", "serverSideEncryption": { "mode": "none" }, "size": 11, "uploadTimestamp": 5000 } ] self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_json_in_stdout=expected_json, ) def test_sync_exclude_all_symlinks(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: self._make_local_file(temp_dir, 'test.txt') os.symlink('test.txt', os.path.join(temp_dir, 'alink')) expected_stdout = ''' upload test.txt ''' command = [ 'sync', '--no-progress', '--exclude-all-symlinks', temp_dir, 'b2://my-bucket' ] self._run_command(command, expected_stdout, '', 0) def test_sync_dont_exclude_all_symlinks(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: self._make_local_file(temp_dir, 'test.txt') os.symlink('test.txt', os.path.join(temp_dir, 'alink')) # Exact stdout cannot be asserted because line order is non-deterministic expected_part_of_stdout = ''' upload alink ''' command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_part_of_stdout=expected_part_of_stdout) def test_sync_exclude_if_modified_after_in_range(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: for file, mtime in (('test.txt', 1367900664.152), ('test2.txt', 1367600664.152)): self._make_local_file(temp_dir, file) path = os.path.join(temp_dir, file) os.utime(path, (mtime, mtime)) expected_stdout = ''' upload test2.txt ''' command = [ 'sync', '--no-progress', '--exclude-if-modified-after', '1367700664.152', temp_dir, 'b2://my-bucket' ] self._run_command(command, expected_stdout, '', 0) def test_sync_exclude_if_modified_after_exact(self): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: for file, mtime in (('test.txt', 1367900664.152), ('test2.txt', 1367600664.152)): self._make_local_file(temp_dir, file) path = os.path.join(temp_dir, file) os.utime(path, (mtime, mtime)) expected_stdout = ''' upload test2.txt ''' command = [ 'sync', '--no-progress', '--exclude-if-modified-after', '1367600664.152', temp_dir, 'b2://my-bucket' ] self._run_command(command, expected_stdout, '', 0) def _test_sync_threads( self, threads=None, sync_threads=None, download_threads=None, upload_threads=None, ): self._authorize_account() self._create_my_bucket() with TempDir() as temp_dir: self._make_local_file(temp_dir, 'file.txt') command = ['sync', '--no-progress'] if threads is not None: command += ['--threads', str(threads)] if sync_threads is not None: command += ['--sync-threads', str(sync_threads)] if download_threads is not None: command += ['--download-threads', str(download_threads)] if upload_threads is not None: command += ['--upload-threads', str(upload_threads)] command += [temp_dir, 'b2://my-bucket'] expected_stdout = ''' upload file.txt ''' self._run_command(command, expected_stdout) def test_sync_threads(self): self._test_sync_threads(threads=1) def test_sync_sync_threads(self): self._test_sync_threads(sync_threads=1) def test_sync_download_threads(self): self._test_sync_threads(download_threads=1) def test_sync_upload_threads(self): self._test_sync_threads(upload_threads=1) def test_sync_many_thread_options(self): self._test_sync_threads(sync_threads=1, download_threads=1, upload_threads=1) def test_sync_threads_and_upload_threads(self): # Using --threads is exclusive with other options with self.assertRaises(ValueError): self._test_sync_threads(threads=1, upload_threads=1) def test_sync_threads_and_sync_threads(self): # Using --threads is exclusive with other options with self.assertRaises(ValueError): self._test_sync_threads(threads=1, sync_threads=1) def test_sync_threads_and_download_threads(self): # Using --threads is exclusive with other options with self.assertRaises(ValueError): self._test_sync_threads(threads=1, download_threads=1) def test_sync_all_thread_options(self): # Using --threads is exclusive with other options with self.assertRaises(ValueError): self._test_sync_threads(threads=1, sync_threads=1, download_threads=1, upload_threads=1) def test_ls(self): self._authorize_account() self._create_my_bucket() # Check with no files self._run_command(['ls', *self.b2_uri_args('my-bucket')], '', '', 0) # Create some files, including files in a folder bucket = self.b2_api.get_bucket_by_name('my-bucket') bucket.upload(UploadSourceBytes(b''), 'a') bucket.upload(UploadSourceBytes(b' '), 'b/b1') bucket.upload(UploadSourceBytes(b' '), 'b/b2') bucket.upload(UploadSourceBytes(b' '), 'c') bucket.upload(UploadSourceBytes(b' '), 'c') # Condensed output expected_stdout = ''' a b/ c ''' self._run_command(['ls', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0) # Recursive output expected_stdout = ''' a b/b1 b/b2 c ''' self._run_command( ['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) self._run_command(['ls', '-r', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0) # Check long output. (The format expects full-length file ids, so it causes whitespace here) expected_stdout = ''' 9999 upload 1970-01-01 00:00:05 0 a - - - - 0 b/ 9995 upload 1970-01-01 00:00:05 6 c ''' self._run_command(['ls', '--long', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0) # Check long versions output (The format expects full-length file ids, so it causes whitespace here) expected_stdout = ''' 9999 upload 1970-01-01 00:00:05 0 a - - - - 0 b/ 9995 upload 1970-01-01 00:00:05 6 c 9996 upload 1970-01-01 00:00:05 5 c ''' self._run_command( ['ls', '--long', '--versions', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) def test_ls_wildcard(self): self._authorize_account() self._create_my_bucket() # Check with no files self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.txt')], '', '', 0 ) # Create some files, including files in a folder bucket = self.b2_api.get_bucket_by_name('my-bucket') self._upload_multiple_files(bucket) expected_stdout = ''' a/test.csv a/test.tsv b/b/test.csv b/b1/test.csv b/b2/test.tsv c/test.csv c/test.tsv ''' self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.[tc]sv')], expected_stdout, ) expected_stdout = ''' a/test.tsv b/b2/test.tsv c/test.tsv ''' self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.tsv')], expected_stdout, ) expected_stdout = ''' b/b1/test.csv ''' self._run_command( [ 'ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', 'b/b?/test.csv') ], expected_stdout, ) expected_stdout = ''' a/test.csv a/test.tsv c/test.csv c/test.tsv ''' self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '?/test.?sv')], expected_stdout, ) expected_stdout = ''' b/b/test.csv b/b1/test.csv ''' self._run_command( [ 'ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '?/*/*.[!t]sv') ], expected_stdout, ) def test_ls_with_wildcard_no_recursive(self): self._authorize_account() self._create_my_bucket() # Check with no files self._run_command( ['ls', '--with-wildcard', *self.b2_uri_args('my-bucket')], expected_stderr='ERROR: with_wildcard requires recursive to be turned on as well\n', expected_status=1, ) def test_restrictions(self): # Initial condition self.assertEqual(None, self.account_info.get_account_auth_token()) # Authorize an account with the master key. account_id = self.account_id self._run_command_ignore_output(['authorize-account', account_id, self.master_key]) # Create a bucket to use bucket_name = 'restrictedBucket' bucket_id = 'bucket_0' self._run_command(['create-bucket', bucket_name, 'allPrivate'], bucket_id + '\n', '', 0) # Create another bucket other_bucket_name = 'otherBucket' self._run_command_ignore_output(['create-bucket', other_bucket_name, 'allPrivate']) # Create a key restricted to a bucket app_key_id = 'appKeyId0' app_key = 'appKey0' capabilities = "listBuckets,readFiles" file_prefix = 'some/file/prefix/' self._run_command( [ 'create-key', '--bucket', bucket_name, '--name-prefix', file_prefix, 'my-key', capabilities ], app_key_id + ' ' + app_key + '\n', '', 0, ) self._run_command_ignore_output(['authorize-account', app_key_id, app_key]) # Auth token should be in account info now self.assertEqual('auth_token_1', self.account_info.get_account_auth_token()) # Assertions that the restrictions not only are saved but what they are supposed to be self.assertEqual( dict( bucketId=bucket_id, bucketName=bucket_name, capabilities=[ 'listBuckets', 'readFiles', ], namePrefix=file_prefix, ), self.account_info.get_allowed(), ) # Test that the application key info gets added to the unauthorized error message. expected_create_key_stderr = "ERROR: unauthorized for application key " \ "with capabilities 'listBuckets,readFiles', " \ "restricted to bucket 'restrictedBucket', " \ "restricted to files that start with 'some/file/prefix/' (unauthorized)\n" self._run_command( ['create-key', 'goodKeyName-One', 'readFiles,listBuckets'], '', expected_create_key_stderr, 1, ) def test_list_buckets_not_allowed_for_app_key(self): # Create a bucket and a key restricted to that bucket. self._authorize_account() self._run_command( ['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0, ) # Authorizing with the key will fail because the ConsoleTool needs # to be able to look up the name of the bucket. self._run_command( ['create-key', 'my-key', 'listFiles'], 'appKeyId0 appKey0\n', '', 0, ) # Authorize with the key, which should result in an error. self._run_command( ['authorize-account', 'appKeyId0', 'appKey0'], '', 'ERROR: application key has no listBuckets capability, which is required for the b2 command-line tool\n', 1, ) def test_bucket_missing_for_bucket_key(self): # Create a bucket and a key restricted to that bucket. self._authorize_account() self._run_command( ['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0, ) self._run_command( ['create-key', '--bucket', 'my-bucket', 'my-key', 'listBuckets,listFiles'], 'appKeyId0 appKey0\n', '', 0, ) # Get rid of the bucket, leaving the key with a dangling pointer to it. self._run_command_ignore_output(['delete-bucket', 'my-bucket']) # Authorizing with the key will fail because the ConsoleTool needs # to be able to look up the name of the bucket. self._run_command( ['authorize-account', 'appKeyId0', 'appKey0'], '', "ERROR: unable to authorize account: Application key is restricted to a bucket that doesn't exist\n", 1, ) def test_ls_for_restricted_bucket(self): # Create a bucket and a key restricted to that bucket. self._authorize_account() self._run_command( ['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0, ) self._run_command( ['create-key', '--bucket', 'my-bucket', 'my-key', 'listBuckets,listFiles'], 'appKeyId0 appKey0\n', '', 0, ) # Authorize with the key and list the files self._run_command_ignore_output(['authorize-account', 'appKeyId0', 'appKey0'],) self._run_command( ['ls', *self.b2_uri_args('my-bucket')], '', '', 0, ) def test_bad_terminal(self): stdout = mock.MagicMock() stdout.write = mock.MagicMock( side_effect=[ UnicodeEncodeError('codec', 'foo', 100, 105, 'artificial UnicodeEncodeError') ] + list(range(25)) ) stderr = mock.MagicMock() console_tool = self.console_tool_class(stdout, stderr) console_tool.run_command(['b2', 'authorize-account', self.account_id, self.master_key]) def test_passing_api_parameters(self): self._authorize_account() commands = [ [ 'b2', 'download-file-by-name', '--profile', 'nonexistent', 'dummy-name', 'dummy-file-name', 'dummy-local-file-name' ], [ 'b2', 'download-file-by-id', '--profile', 'nonexistent', 'dummy-id', 'dummy-local-file-name' ], ['b2', 'sync', '--profile', 'nonexistent', 'b2:dummy-source', 'dummy-destination'], ] parameters = [ { '--write-buffer-size': 123, '--skip-hash-verification': None, '--max-download-streams-per-file': 8, }, { '--write-buffer-size': 321, '--max-download-streams-per-file': 7, }, ] for command, params in product(commands, parameters): console_tool = self.console_tool_class( mock.MagicMock(), mock.MagicMock(), ) args = [str(val) for val in chain.from_iterable(params.items()) if val] console_tool.run_command(command + args) download_manager = console_tool.api.services.download_manager assert download_manager.write_buffer_size == params['--write-buffer-size'] assert download_manager.check_hash is ('--skip-hash-verification' not in params) parallel_strategy = one( strategy for strategy in download_manager.strategies if isinstance(strategy, download_manager.PARALLEL_DOWNLOADER_CLASS) ) assert parallel_strategy.max_streams == params['--max-download-streams-per-file'] def test_passing_api_parameters_with_auth_env_vars(self): os.environ[B2_APPLICATION_KEY_ID_ENV_VAR] = self.account_id os.environ[B2_APPLICATION_KEY_ENV_VAR] = self.master_key command = [ 'b2', 'download-file-by-id', 'dummy-id', 'dummy-local-file-name', '--write-buffer-size', '123', '--max-download-streams-per-file', '5', '--skip-hash-verification', ] console_tool = self.console_tool_class( mock.MagicMock(), mock.MagicMock(), ) console_tool.run_command(command) download_manager = console_tool.api.services.download_manager assert download_manager.write_buffer_size == 123 assert download_manager.check_hash is False parallel_strategy = one( strategy for strategy in download_manager.strategies if isinstance(strategy, download_manager.PARALLEL_DOWNLOADER_CLASS) ) assert parallel_strategy.max_streams == 5 @pytest.mark.apiver(from_ver=4) def test_ls_b2id(self): self._authorize_account() self._create_my_bucket() # Create a file bucket = self.b2_api.get_bucket_by_name('my-bucket') file_version = bucket.upload(UploadSourceBytes(b''), 'test.txt') # Condensed output expected_stdout = ''' test.txt ''' self._run_command(['ls', f'b2id://{file_version.id_}'], expected_stdout, '', 0) def test_ls_filters(self): self._authorize_account() self._create_my_bucket() # Create some files, including files in a folder bucket = self.b2_api.get_bucket_by_name('my-bucket') data = UploadSourceBytes(b'test-data') bucket.upload(data, 'a/test.csv') bucket.upload(data, 'a/test.tsv') bucket.upload(data, 'b/b/test.csv') bucket.upload(data, 'c/test.csv') bucket.upload(data, 'c/test.tsv') bucket.upload(data, 'test.csv') bucket.upload(data, 'test.tsv') expected_stdout = ''' a/ b/ c/ test.csv ''' self._run_command( ['ls', *self.b2_uri_args('my-bucket'), '--include', '*.csv'], expected_stdout, ) self._run_command( ['ls', *self.b2_uri_args('my-bucket'), '--exclude', '*.tsv'], expected_stdout, ) expected_stdout = ''' a/test.csv b/b/test.csv c/test.csv test.csv ''' self._run_command( ['ls', *self.b2_uri_args('my-bucket'), '--recursive', '--include', '*.csv'], expected_stdout, ) self._run_command( ['ls', *self.b2_uri_args('my-bucket'), '--recursive', '--exclude', '*.tsv'], expected_stdout, ) expected_stdout = ''' b/b/test.csv c/test.csv test.csv ''' self._run_command( [ 'ls', *self.b2_uri_args('my-bucket'), '--recursive', '--exclude', '*', '--include', '*.csv', '--exclude', 'a/*' ], expected_stdout, ) @pytest.mark.skip("temporarily disabled") @skip_on_windows def test_escape_c0_char_on_sync_stack_trace(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket-0', 'allPrivate'], 'bucket_0\n', '', 0) self._run_command(['create-bucket', 'my-bucket-1', 'allPrivate'], 'bucket_1\n', '', 0) with TempDir() as temp_dir: _ = self._make_local_file(temp_dir, "\x1b[32mC\x1b[33mC\x1b[34mI\x1b[0m") self._run_command( [ 'sync', '--no-progress', '--no-escape-control-characters', temp_dir, 'b2://my-bucket-0' ], expected_part_of_stdout='\\x1b[32m', expected_status=0, ) self._run_command( [ 'sync', '--no-progress', '--escape-control-characters', temp_dir, 'b2://my-bucket-1' ], expected_part_of_stdout="upload '\\x1b[32mC\\x1b[33mC\\x1b[34mI\\x1b[0m'\n", expected_status=0, unexpected_part_of_stdout='\x1b[32m', ) def test_escape_c0_char_on_key_restricted_path(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket-0', 'allPublic'], 'bucket_0\n', '', 0) cc_name = "$'\x1b[31mC\x1b[32mC\x1b[33mI\x1b[0m'" escaped_error = "ERROR: unauthorized for application key with capabilities 'listBuckets,listKeys', restricted to bucket 'my-bucket-0', restricted to files that start with '$'\\x1b[31mC\\x1b[32mC\\x1b[33mI\\x1b[0m'' (unauthorized)\n" # Create a key self._run_command( [ 'create-key', '--bucket', 'my-bucket-0', '--name-prefix', cc_name, 'key1', 'listBuckets,listKeys' ], 'appKeyId0 appKey0\n', expected_status=0, ) # Authorize with the key self._run_command(['authorize-account', 'appKeyId0', 'appKey0'], expected_status=0) self._run_command( ['ls', *self.b2_uri_args('my-bucket-0'), '--no-escape-control-characters'], expected_status=1, expected_stderr=escaped_error ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-0'), '--escape-control-characters'], expected_status=1, expected_stderr=escaped_error ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-0')], expected_status=1, expected_stderr=escaped_error ) def test_escape_c1_char_on_ls_long(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket-0', 'allPrivate'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: local_file = self._make_local_file(temp_dir, 'file1.txt') cc_filename = '\u009bT\u009bE\u009bS\u009bTtest.txt' escaped_cc_filename = '\\x9bT\\x9bE\\x9bS\\x9bTtest.txt' self._run_command( ['upload-file', '--no-progress', 'my-bucket-0', local_file, cc_filename] ) self._run_command( ['ls', '--long', '--no-escape-control-characters', *self.b2_uri_args('my-bucket-0')], expected_part_of_stdout=cc_filename, ) self._run_command( ['ls', '--long', *self.b2_uri_args('my-bucket-0')], expected_part_of_stdout=cc_filename ) self._run_command( ['ls', '--long', '--escape-control-characters', *self.b2_uri_args('my-bucket-0')], expected_part_of_stdout=escaped_cc_filename, unexpected_part_of_stdout=cc_filename ) def test_escape_c1_char_ls(self): self._authorize_account() self._run_command(['create-bucket', 'my-bucket-cc', 'allPrivate'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: local_file = self._make_local_file(temp_dir, "x") bad_str = "\u009b2K\u009b7Gb\u009b24Gx\u009b4GH" escaped_bad_str = "\\x9b2K\\x9b7Gb\\x9b24Gx\\x9b4GH" self._run_command(['upload-file', '--no-progress', 'my-bucket-cc', local_file, bad_str]) self._run_command( ['upload-file', '--no-progress', 'my-bucket-cc', local_file, "some_normal_text"] ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-cc'), '--no-escape-control-characters'], expected_part_of_stdout=bad_str ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-cc')], expected_part_of_stdout=bad_str ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-cc'), '--escape-control-characters'], expected_part_of_stdout=escaped_bad_str ) class TestConsoleToolWithV1(BaseConsoleToolTest): """These tests use v1 interface to perform various setups before running CLI commands""" def setUp(self): super().setUp() self.v1_account_info = v1.StubAccountInfo() self.v1_b2_api = v1.B2Api(self.v1_account_info, None) self.v1_b2_api.session.raw_api = self.raw_api self.v1_b2_api.authorize_account('production', self.account_id, self.master_key) self._authorize_account() self._create_my_bucket() self.v1_bucket = self.v1_b2_api.create_bucket('my-v1-bucket', 'allPrivate') def test_cancel_large_file(self): file = self.v1_bucket.start_large_file('file1', 'text/plain', {}) self._run_command(['cancel-large-file', file.file_id], '9999 canceled\n', '', 0) def test_cancel_all_large_file(self): self.v1_bucket.start_large_file('file1', 'text/plain', {}) self.v1_bucket.start_large_file('file2', 'text/plain', {}) expected_stdout = ''' 9999 canceled 9998 canceled ''' self._run_command( ['cancel-all-unfinished-large-files', 'my-v1-bucket'], expected_stdout, '', 0 ) def test_list_parts_with_none(self): file = self.v1_bucket.start_large_file('file', 'text/plain', {}) self._run_command(['list-parts', file.file_id], '', '', 0) def test_list_parts_with_parts(self): bucket = self.b2_api.get_bucket_by_name('my-bucket') file = self.v1_bucket.start_large_file('file', 'text/plain', {}) content = b'hello world' large_file_upload_state = mock.MagicMock() large_file_upload_state.has_error.return_value = False bucket.api.services.upload_manager._upload_part( bucket.id_, file.file_id, UploadSourceBytes(content), 1, large_file_upload_state, None, None ) bucket.api.services.upload_manager._upload_part( bucket.id_, file.file_id, UploadSourceBytes(content), 3, large_file_upload_state, None, None ) expected_stdout = ''' 1 11 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed 3 11 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed ''' self._run_command(['list-parts', file.file_id], expected_stdout, '', 0) def test_list_unfinished_large_files_with_some(self): api_url = self.account_info.get_api_url() auth_token = self.account_info.get_account_auth_token() self.raw_api.start_large_file(api_url, auth_token, 'bucket_0', 'file1', 'text/plain', {}) self.raw_api.start_large_file( api_url, auth_token, 'bucket_0', 'file2', 'text/plain', {'color': 'blue'} ) self.raw_api.start_large_file( api_url, auth_token, 'bucket_0', 'file3', 'application/json', {} ) expected_stdout = ''' 9999 file1 text/plain 9998 file2 text/plain color=blue 9997 file3 application/json ''' self._run_command(['list-unfinished-large-files', 'my-bucket'], expected_stdout, '', 0) class TestRmConsoleTool(BaseConsoleToolTest): """ These tests replace default progress reporter of Rm class to ensure that it reports everything as fast as possible. """ class InstantReporter(ProgressReport): UPDATE_INTERVAL = 0.0 @classmethod def setUpClass(cls) -> None: cls.original_v3_progress_class = v3Rm.PROGRESS_REPORT_CLASS cls.original_v4_progress_class = v4Rm.PROGRESS_REPORT_CLASS v3Rm.PROGRESS_REPORT_CLASS = cls.InstantReporter v4Rm.PROGRESS_REPORT_CLASS = cls.InstantReporter def setUp(self): super().setUp() self._authorize_account() self._create_my_bucket() self.bucket = self.b2_api.get_bucket_by_name('my-bucket') self._upload_multiple_files(self.bucket) @classmethod def tearDownClass(cls) -> None: v3Rm.PROGRESS_REPORT_CLASS = cls.original_v3_progress_class v4Rm.PROGRESS_REPORT_CLASS = cls.original_v4_progress_class def test_rm_wildcard(self): self._run_command( [ 'rm', '--recursive', '--with-wildcard', '--no-progress', *self.b2_uri_args('my-bucket', '*.csv') ], ) expected_stdout = ''' a/test.tsv b/b2/test.tsv b/test.txt c/test.tsv ''' self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_versions(self): # Uploading content of the bucket again to create second version of each file. self._upload_multiple_files(self.bucket) self._run_command( [ 'rm', '--versions', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.csv') ], ) expected_stdout = ''' a/test.tsv a/test.tsv b/b2/test.tsv b/b2/test.tsv b/test.txt b/test.txt c/test.tsv c/test.tsv ''' self._run_command( ['ls', '--versions', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout ) def test_rm_no_recursive(self): self._run_command(['rm', '--no-progress', *self.b2_uri_args('my-bucket', 'b/')]) expected_stdout = ''' a/test.csv a/test.tsv b/b/test.csv b/b1/test.csv b/b2/test.tsv c/test.csv c/test.tsv ''' self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_dry_run(self): expected_stdout = ''' a/test.csv b/b/test.csv b/b1/test.csv c/test.csv ''' self._run_command( [ 'rm', '--recursive', '--with-wildcard', '--dry-run', *self.b2_uri_args('my-bucket', '*.csv') ], expected_stdout, ) expected_stdout = ''' a/test.csv a/test.tsv b/b/test.csv b/b1/test.csv b/b2/test.tsv b/test.txt c/test.csv c/test.tsv ''' self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_exact_filename(self): self._run_command( [ 'rm', '--recursive', '--with-wildcard', '--no-progress', *self.b2_uri_args('my-bucket', 'b/b/test.csv') ], ) expected_stdout = ''' a/test.csv a/test.tsv b/b1/test.csv b/b2/test.tsv b/test.txt c/test.csv c/test.tsv ''' self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_no_name_removes_everything(self): self._run_command(['rm', '--recursive', '--no-progress', *self.b2_uri_args('my-bucket')]) self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], '') def test_rm_with_wildcard_without_recursive(self): self._run_command( ['rm', '--with-wildcard', *self.b2_uri_args('my-bucket')], expected_stderr='ERROR: with_wildcard requires recursive to be turned on as well\n', expected_status=1, ) def test_rm_queue_size_and_number_of_threads(self): self._run_command( [ 'rm', '--recursive', '--threads', '2', '--queue-size', '4', *self.b2_uri_args('my-bucket') ] ) self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], '') def test_rm_progress(self): expected_in_stdout = ' count: 4/4 ' self._run_command( ['rm', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.csv')], expected_part_of_stdout=expected_in_stdout, ) expected_stdout = ''' a/test.tsv b/b2/test.tsv b/test.txt c/test.tsv ''' self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def _run_problematic_removal( self, additional_parameters: Optional[List[str]] = None, expected_in_stdout: Optional[str] = None, unexpected_in_stdout: Optional[str] = None ): additional_parameters = additional_parameters or [] original_delete_file_version = self.b2_api.raw_api.delete_file_version def mocked_delete_file_version( this, account_auth_token, file_id, file_name, bypass_governance=False, *args, **kwargs ): if file_name == 'b/b1/test.csv': raise Conflict() return original_delete_file_version( this, account_auth_token, file_id, file_name, bypass_governance, *args, **kwargs ) with mock.patch.object( self.b2_api.raw_api, 'delete_file_version', side_effect=mocked_delete_file_version, ): self._run_command( [ 'rm', '--recursive', '--with-wildcard', '--threads', '1', '--queue-size', '1', *additional_parameters, *self.b2_uri_args('my-bucket', '*'), ], expected_status=1, expected_part_of_stdout=expected_in_stdout, unexpected_part_of_stdout=unexpected_in_stdout, ) def test_rm_fail_fast(self): # Since we already have all the jobs submitted to another thread, # we can only rely on the log to tell when it stopped. expected_in_stdout = ''' Deletion of file "b/b1/test.csv" (9996) failed: Conflict: count: 3/4''' unexpected_in_stdout = ' count: 5/5 ' self._run_problematic_removal(['--fail-fast'], expected_in_stdout, unexpected_in_stdout) def test_rm_skipping_over_errors(self): self._run_problematic_removal() expected_stdout = ''' b/b1/test.csv ''' self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) @pytest.mark.apiver(from_ver=4) def test_rm_b2id(self): # Create a file file_version = self.bucket.upload(UploadSourceBytes(b''), 'new-file.txt') # Before deleting expected_stdout = ''' a/test.csv a/test.tsv b/b/test.csv b/b1/test.csv b/b2/test.tsv b/test.txt c/test.csv c/test.tsv new-file.txt ''' self._run_command(['ls', '--recursive', 'b2://my-bucket'], expected_stdout) # Delete file self._run_command(['rm', '--no-progress', f'b2id://{file_version.id_}'], '', '', 0) # After deleting expected_stdout = ''' a/test.csv a/test.tsv b/b/test.csv b/b1/test.csv b/b2/test.tsv b/test.txt c/test.csv c/test.tsv ''' self._run_command(['ls', '--recursive', 'b2://my-bucket'], expected_stdout) def rm_filters_helper(self, rm_args: List[str], expected_ls_stdout: str): self._authorize_account() self._run_command(['create-bucket', 'my-rm-bucket', 'allPublic'], 'bucket_1\n', '', 0) bucket = self.b2_api.get_bucket_by_name('my-rm-bucket') # Create some files, including files in a folder data = UploadSourceBytes(b'test-data') bucket.upload(data, 'a/test.csv') bucket.upload(data, 'a/test.tsv') bucket.upload(data, 'b/b/test.csv') bucket.upload(data, 'c/test.csv') bucket.upload(data, 'c/test.tsv') bucket.upload(data, 'test.csv') bucket.upload(data, 'test.tsv') bucket.upload(data, 'test.txt') self._run_command( ['rm', '--no-progress', *self.b2_uri_args('my-rm-bucket'), *rm_args], '', '', 0 ) self._run_command( ['ls', *self.b2_uri_args('my-rm-bucket'), '--recursive'], expected_ls_stdout, ) def test_rm_filters_include(self): expected_ls_stdout = ''' a/test.csv a/test.tsv b/b/test.csv c/test.csv c/test.tsv test.tsv test.txt ''' self.rm_filters_helper(['--include', '*.csv'], expected_ls_stdout) def test_rm_filters_exclude(self): expected_ls_stdout = ''' a/test.csv a/test.tsv b/b/test.csv c/test.csv c/test.tsv test.csv ''' self.rm_filters_helper(['--exclude', '*.csv'], expected_ls_stdout) def test_rm_filters_include_recursive(self): expected_ls_stdout = ''' a/test.tsv c/test.tsv test.tsv test.txt ''' self.rm_filters_helper(['--recursive', '--include', '*.csv'], expected_ls_stdout) def test_rm_filters_exclude_recursive(self): expected_ls_stdout = ''' a/test.csv b/b/test.csv c/test.csv test.csv ''' self.rm_filters_helper(['--recursive', '--exclude', '*.csv'], expected_ls_stdout) def test_rm_filters_mixed(self): expected_ls_stdout = ''' a/test.csv a/test.tsv c/test.tsv test.tsv test.txt ''' self.rm_filters_helper( ['--recursive', '--exclude', '*', '--include', '*.csv', '--exclude', 'a/*'], expected_ls_stdout ) class TestVersionConsoleTool(BaseConsoleToolTest): def test_version(self): self._run_command(['version', '--short'], expected_stdout=f'{VERSION}\n') self._run_command(['version'], expected_stdout=f'b2 command line tool, version {VERSION}\n') B2_Command_Line_Tool-3.19.1/test/unit/test_copy.py000066400000000000000000000070711461201031300217670ustar00rootroot00000000000000###################################################################### # # File: test/unit/test_copy.py # # Copyright 2021 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from unittest import mock from b2sdk.v2 import ( SSE_B2_AES, UNKNOWN_KEY_ID, EncryptionAlgorithm, EncryptionKey, EncryptionMode, EncryptionSetting, ) from b2._internal.console_tool import CopyFileById from .test_base import TestBase class TestCopy(TestBase): def test_determine_source_metadata(self): mock_api = mock.MagicMock() mock_console_tool = mock.MagicMock() mock_console_tool.api = mock_api copy_file_command = CopyFileById(mock_console_tool) result = copy_file_command._determine_source_metadata( 'id', destination_encryption=None, source_encryption=None, target_file_info=None, target_content_type=None, fetch_if_necessary=True, ) assert result == (None, None) assert len(mock_api.method_calls) == 0 result = copy_file_command._determine_source_metadata( 'id', destination_encryption=SSE_B2_AES, source_encryption=SSE_B2_AES, target_file_info={}, target_content_type='', fetch_if_necessary=True, ) assert result == (None, None) assert len(mock_api.method_calls) == 0 result = copy_file_command._determine_source_metadata( 'id', destination_encryption=SSE_B2_AES, source_encryption=SSE_B2_AES, target_file_info={}, target_content_type='', fetch_if_necessary=True, ) assert result == (None, None) assert len(mock_api.method_calls) == 0 source_sse_c = EncryptionSetting( EncryptionMode.SSE_C, EncryptionAlgorithm.AES256, EncryptionKey(b'some_key', UNKNOWN_KEY_ID) ) destination_sse_c = EncryptionSetting( EncryptionMode.SSE_C, EncryptionAlgorithm.AES256, EncryptionKey(b'some_other_key', 'key_id') ) result = copy_file_command._determine_source_metadata( 'id', destination_encryption=destination_sse_c, source_encryption=source_sse_c, target_file_info={}, target_content_type='', fetch_if_necessary=True, ) assert result == (None, None) assert len(mock_api.method_calls) == 0 with self.assertRaises( ValueError, 'Attempting to copy file with metadata while either source or ' 'destination uses SSE-C. Use --fetch-metadata to fetch source ' 'file metadata before copying.' ): copy_file_command._determine_source_metadata( 'id', destination_encryption=destination_sse_c, source_encryption=source_sse_c, target_file_info=None, target_content_type=None, fetch_if_necessary=False, ) assert len(mock_api.method_calls) == 0 result = copy_file_command._determine_source_metadata( 'id', destination_encryption=destination_sse_c, source_encryption=source_sse_c, target_file_info=None, target_content_type=None, fetch_if_necessary=True, ) assert result != (None, None) assert len(mock_api.method_calls) B2_Command_Line_Tool-3.19.1/test/unit/test_represent_file_metadata.py000066400000000000000000000152411461201031300256610ustar00rootroot00000000000000###################################################################### # # File: test/unit/test_represent_file_metadata.py # # Copyright 2021 Backblaze Inc. All Rights Reserved. # # License https://www.backblaze.com/using_b2_code.html # ###################################################################### from io import StringIO import pytest from b2sdk.v2 import ( SSE_B2_AES, B2Api, B2HttpApiConfig, EncryptionAlgorithm, EncryptionKey, EncryptionMode, EncryptionSetting, FileRetentionSetting, LegalHold, RawSimulator, RetentionMode, StubAccountInfo, ) from b2._internal.console_tool import ConsoleTool, DownloadCommand from .test_base import TestBase class TestReprentFileMetadata(TestBase): def setUp(self): self.master_b2_api = B2Api( StubAccountInfo(), None, api_config=B2HttpApiConfig(_raw_api_class=RawSimulator) ) self.raw_api = self.master_b2_api.session.raw_api (self.master_account_id, self.master_key) = self.raw_api.create_account() self.master_b2_api.authorize_account('production', self.master_account_id, self.master_key) self.lock_enabled_bucket = self.master_b2_api.create_bucket( 'lock-enabled-bucket', 'allPrivate', is_file_lock_enabled=True ) self.lock_disabled_bucket = self.master_b2_api.create_bucket( 'lock-disabled-bucket', 'allPrivate', is_file_lock_enabled=False ) new_key = self.master_b2_api.create_key( [ 'listKeys', 'listBuckets', 'listFiles', 'readFiles', ], 'restricted' ) self.restricted_key_id, self.restricted_key = new_key.id_, new_key.application_key self.restricted_b2_api = B2Api(StubAccountInfo(), None) self.restricted_b2_api.session.raw_api = self.raw_api self.restricted_b2_api.authorize_account( 'production', self.restricted_key_id, self.restricted_key ) def _get_b2api(**kwargs) -> B2Api: kwargs.pop('profile', None) return self.master_b2_api self.mp = pytest.MonkeyPatch() self.mp.setattr('b2._internal.console_tool._get_b2api_for_profile', _get_b2api) self.mp.setattr('b2._internal.console_tool._get_inmemory_b2api', _get_b2api) self.stdout = StringIO() self.stderr = StringIO() self.console_tool = ConsoleTool(self.stdout, self.stderr) def tearDown(self): self.mp.undo() super().tearDown() def assertRetentionRepr(self, file_id: str, api: B2Api, expected_repr: str): file_version = api.get_file_info(file_id) assert DownloadCommand._represent_retention(file_version.file_retention) == expected_repr def assertLegalHoldRepr(self, file_id: str, api: B2Api, expected_repr: str): file_version = api.get_file_info(file_id) assert DownloadCommand._represent_legal_hold(file_version.legal_hold) == expected_repr def assertEncryptionRepr(self, file_id: str, expected_repr: str): file_version = self.master_b2_api.get_file_info(file_id) assert DownloadCommand._represent_encryption( file_version.server_side_encryption ) == expected_repr def test_file_retention(self): file = self.lock_disabled_bucket.upload_bytes(b'insignificant', 'file') self.assertRetentionRepr(file.id_, self.master_b2_api, 'none') self.assertRetentionRepr(file.id_, self.restricted_b2_api, '') file = self.lock_enabled_bucket.upload_bytes(b'insignificant', 'file') self.assertRetentionRepr(file.id_, self.master_b2_api, 'none') self.assertRetentionRepr(file.id_, self.restricted_b2_api, '') self.master_b2_api.update_file_retention( file.id_, file.file_name, FileRetentionSetting(RetentionMode.GOVERNANCE, 1500) ) self.assertRetentionRepr( file.id_, self.master_b2_api, 'mode=governance, retainUntil=1970-01-01 00:00:01.500000+00:00' ) self.assertRetentionRepr(file.id_, self.restricted_b2_api, '') self.master_b2_api.update_file_retention( file.id_, file.file_name, FileRetentionSetting(RetentionMode.COMPLIANCE, 2000) ) self.assertRetentionRepr( file.id_, self.master_b2_api, 'mode=compliance, retainUntil=1970-01-01 00:00:02+00:00' ) self.assertRetentionRepr(file.id_, self.restricted_b2_api, '') def test_legal_hold(self): file = self.lock_disabled_bucket.upload_bytes(b'insignificant', 'file') self.assertLegalHoldRepr(file.id_, self.master_b2_api, '') self.assertLegalHoldRepr(file.id_, self.restricted_b2_api, '') file = self.lock_enabled_bucket.upload_bytes(b'insignificant', 'file') self.assertLegalHoldRepr(file.id_, self.master_b2_api, '') self.assertLegalHoldRepr(file.id_, self.restricted_b2_api, '') self.master_b2_api.update_file_legal_hold(file.id_, file.file_name, LegalHold.ON) self.assertLegalHoldRepr(file.id_, self.master_b2_api, 'on') self.assertLegalHoldRepr(file.id_, self.restricted_b2_api, '') self.master_b2_api.update_file_legal_hold(file.id_, file.file_name, LegalHold.OFF) self.assertLegalHoldRepr(file.id_, self.master_b2_api, 'off') self.assertLegalHoldRepr(file.id_, self.restricted_b2_api, '') def test_encryption(self): file = self.lock_enabled_bucket.upload_bytes(b'insignificant', 'file') self.assertEncryptionRepr(file.id_, 'none') file = self.lock_enabled_bucket.upload_bytes( b'insignificant', 'file', encryption=SSE_B2_AES ) self.assertEncryptionRepr(file.id_, 'mode=SSE-B2, algorithm=AES256') file = self.lock_enabled_bucket.upload_bytes( b'insignificant', 'file', encryption=EncryptionSetting( EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, key=EncryptionKey(b'', key_id=None), ) ) self.assertEncryptionRepr(file.id_, 'mode=SSE-C, algorithm=AES256') file = self.lock_enabled_bucket.upload_bytes( b'insignificant', 'file', encryption=EncryptionSetting( EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, key=EncryptionKey(b'', key_id='some_id'), ) ) self.assertEncryptionRepr(file.id_, 'mode=SSE-C, algorithm=AES256, key_id=some_id')